1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-17 10:22:36 +00:00

Resolve "Duplicate field with data"

This commit is contained in:
Davide Silvestri 2022-09-05 14:32:28 +00:00
parent 38ab653b91
commit cf7d5e728d
41 changed files with 1347 additions and 513 deletions

View file

@ -36,14 +36,7 @@ class JobSerializer(serializers.ModelSerializer):
@extend_schema_field(OpenApiTypes.STR)
def get_type(self, instance):
# It could be that the field related to the instance is already in the context
# else we can call the specific_class property to find it.
field = self.context.get("instance_type")
if not field:
field = job_type_registry.get_by_model(instance.specific_class)
return field.type
return job_type_registry.get_by_model(instance.specific_class).type
class CreateJobSerializer(serializers.Serializer):

View file

@ -16,7 +16,7 @@ ERROR_CANNOT_CHANGE_FIELD_TYPE = "ERROR_CANNOT_CHANGE_FIELD_TYPE"
ERROR_LINK_ROW_TABLE_NOT_PROVIDED = (
"ERROR_LINK_ROW_TABLE_NOT_PROVIDED",
HTTP_400_BAD_REQUEST,
"The `link_row_table` must be provided.",
"The `link_row_table_id` must be provided.",
)
ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE = "ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE"
ERROR_FIELD_NOT_IN_TABLE = (

View file

@ -24,14 +24,7 @@ class FieldSerializer(serializers.ModelSerializer):
@extend_schema_field(OpenApiTypes.STR)
def get_type(self, instance):
# It could be that the field related to the instance is already in the context
# else we can call the specific_class property to find it.
field = self.context.get("instance_type")
if not field:
field = field_type_registry.get_by_model(instance.specific_class)
return field.type
return field_type_registry.get_by_model(instance.specific_class).type
class RelatedFieldsSerializer(serializers.Serializer):
@ -184,3 +177,9 @@ class UniqueRowValueParamsSerializer(serializers.Serializer):
class UniqueRowValuesSerializer(serializers.Serializer):
values = serializers.ListSerializer(child=serializers.CharField())
class DuplicateFieldParamsSerializer(serializers.Serializer):
duplicate_data = serializers.BooleanField(
default=False, help_text="Indicates whether the data should be duplicated."
)

View file

@ -2,7 +2,12 @@ from django.urls import re_path
from baserow.contrib.database.fields.registries import field_type_registry
from .views import FieldsView, FieldView, UniqueRowValueFieldView
from .views import (
AsyncDuplicateFieldView,
FieldsView,
FieldView,
UniqueRowValueFieldView,
)
app_name = "baserow.contrib.database.api.fields"
@ -14,4 +19,9 @@ urlpatterns = field_type_registry.api_urls + [
name="unique_row_values",
),
re_path(r"(?P<field_id>[0-9]+)/$", FieldView.as_view(), name="item"),
re_path(
r"(?P<field_id>[0-9]+)/duplicate/async/$",
AsyncDuplicateFieldView.as_view(),
name="async_duplicate",
),
]

View file

@ -1,19 +1,26 @@
from typing import Any, Dict
from django.conf import settings
from django.db import transaction
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
from drf_spectacular.utils import extend_schema
from rest_framework import status
from rest_framework.decorators import permission_classes as method_permission_classes
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from baserow.api.decorators import (
map_exceptions,
validate_body,
validate_body_custom_fields,
validate_query_parameters,
)
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
from baserow.api.jobs.errors import ERROR_MAX_JOB_COUNT_EXCEEDED
from baserow.api.jobs.serializers import JobSerializer
from baserow.api.schemas import (
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
@ -65,6 +72,7 @@ from baserow.contrib.database.fields.exceptions import (
ReservedBaserowFieldNameException,
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.job_types import DuplicateFieldJobType
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.table.exceptions import (
@ -77,10 +85,14 @@ from baserow.contrib.database.tokens.handler import TokenHandler
from baserow.core.action.registries import action_type_registry
from baserow.core.db import specific_iterator
from baserow.core.exceptions import UserNotInGroup
from baserow.core.jobs.exceptions import MaxJobCountExceeded
from baserow.core.jobs.handler import JobHandler
from baserow.core.jobs.registries import job_type_registry
from baserow.core.trash.exceptions import CannotDeleteAlreadyDeletedItem
from .serializers import (
CreateFieldSerializer,
DuplicateFieldParamsSerializer,
FieldSerializer,
FieldSerializerWithRelatedFields,
RelatedFieldsSerializer,
@ -499,3 +511,58 @@ class UniqueRowValueFieldView(APIView):
)
return Response(UniqueRowValuesSerializer({"values": values}).data)
class AsyncDuplicateFieldView(APIView):
permission_classes = (IsAuthenticated,)
@extend_schema(
parameters=[
OpenApiParameter(
name="field_id",
location=OpenApiParameter.PATH,
type=OpenApiTypes.INT,
description="The field to duplicate.",
),
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
],
tags=["Database table fields"],
operation_id="duplicate_table_field",
description=(
"Duplicates the table with the provided `table_id` parameter "
"if the authorized user has access to the database's group."
),
responses={
202: DuplicateFieldJobType().get_serializer_class(),
400: get_error_schema(
[
"ERROR_USER_NOT_IN_GROUP",
"ERROR_REQUEST_BODY_VALIDATION",
"ERROR_MAX_JOB_COUNT_EXCEEDED",
]
),
404: get_error_schema(["ERROR_FIELD_DOES_NOT_EXIST"]),
},
)
@transaction.atomic
@map_exceptions(
{
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
MaxJobCountExceeded: ERROR_MAX_JOB_COUNT_EXCEEDED,
}
)
@validate_body(DuplicateFieldParamsSerializer)
def post(self, request: Request, field_id: int, data: Dict[str, Any]) -> Response:
"""Creates a job to duplicate a field in a table."""
job = JobHandler().create_and_start_job(
request.user,
DuplicateFieldJobType.type,
field_id=field_id,
duplicate_data=data["duplicate_data"],
)
serializer = job_type_registry.get_serializer(job, JobSerializer)
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)

View file

@ -304,13 +304,7 @@ class ViewSerializer(serializers.ModelSerializer):
@extend_schema_field(OpenApiTypes.STR)
def get_type(self, instance):
# It could be that the view related to the instance is already in the context
# else we can call the specific_class property to find it.
view = self.context.get("instance_type")
if not view:
view = view_type_registry.get_by_model(instance.specific_class)
return view.type
return view_type_registry.get_by_model(instance.specific_class).type
class CreateViewSerializer(serializers.ModelSerializer):
@ -407,13 +401,7 @@ class PublicViewSerializer(serializers.ModelSerializer):
@extend_schema_field(OpenApiTypes.STR)
def get_type(self, instance):
# It could be that the view related to the instance is already in the context
# else we can call the specific_class property to find it.
view = self.context.get("instance_type")
if not view:
view = view_type_registry.get_by_model(instance.specific_class)
return view.type
return view_type_registry.get_by_model(instance.specific_class).type
class Meta:
model = View

View file

@ -208,12 +208,14 @@ class DatabaseConfig(AppConfig):
from .fields.actions import (
CreateFieldActionType,
DeleteFieldActionType,
DuplicateFieldActionType,
UpdateFieldActionType,
)
action_type_registry.register(CreateFieldActionType())
action_type_registry.register(DeleteFieldActionType())
action_type_registry.register(UpdateFieldActionType())
action_type_registry.register(DuplicateFieldActionType())
from .views.view_types import FormViewType, GalleryViewType, GridViewType
@ -408,15 +410,17 @@ class DatabaseConfig(AppConfig):
group_storage_usage_item_registry.register(FormViewGroupStorageUsageItem())
from baserow.contrib.database.table.job_types import DuplicateTableJobType
from baserow.core.jobs.registries import job_type_registry
from .airtable.job_type import AirtableImportJobType
from .fields.job_types import DuplicateFieldJobType
from .file_import.job_type import FileImportJobType
from .table.job_types import DuplicateTableJobType
job_type_registry.register(AirtableImportJobType())
job_type_registry.register(FileImportJobType())
job_type_registry.register(DuplicateTableJobType())
job_type_registry.register(DuplicateFieldJobType())
# The signals must always be imported last because they use the registries
# which need to be filled first.

View file

@ -119,8 +119,7 @@ def read_repeatable_read_single_table_transaction(
https://www.postgresql.org/docs/current/mvcc-caveats.html for more info.
This manager uses READ COMMITTED and as such has a lower overhead, but does not get
the snapshot like reading guarantees that REAPEATABLE READ does.
This manager uses REPEATABLE READ to guarantee a valid snapshot of the data.
:param table_id: The table to obtain a table and field locks for to ensure
safe reading.

View file

@ -3,270 +3,20 @@ from copy import deepcopy
from typing import Any, Dict, List, Optional, Set, Tuple, Union
from django.contrib.auth.models import AbstractUser
from django.core.management.color import no_style
from django.db import connection
from django.db.models import ManyToManyField
from psycopg2 import sql
from baserow.contrib.database.db.schema import safe_django_schema_editor
from baserow.contrib.database.fields.backup_handler import (
BackupData,
FieldDataBackupHandler,
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import Field, SpecificFieldForUpdate
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.table.models import GeneratedTableModel, Table
from baserow.contrib.database.table.models import Table
from baserow.contrib.database.table.scopes import TableActionScopeType
from baserow.core.action.models import Action
from baserow.core.action.registries import ActionScopeStr, ActionType
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import extract_allowed
BackupData = Dict[str, Any]
class FieldDataBackupHandler:
"""
Backs up an arbitrary Baserow field by getting their model fields and deciding how
to backup based of it. Only the fields data (think cells) is backed up and no
associated field meta-data is backed up by this class.
The backup data is stored in the database and
no serialization/deserialization of the data occurs. So it is fast but
not suitable for actually backing up the data to prevent data loss, but instead
useful for backing up the data due to Baserow actions to facilitate undoing them.
If the model field is a many to many field then we backup by creating a duplicate
m2m table and copying the data into it.
Otherwise the field must be an actual column in the user table, so we duplicate
the column and copy the data into it.
Also knows how to restore from a backup and clean up any backups done by this
class even if the Field/Table etc has been permanently deleted from Baserow.
"""
@classmethod
def backup_field_data(
cls,
field_to_backup: Field,
identifier_to_backup_into: str,
) -> BackupData:
"""
Backs up the provided field's data into a new column or table which will be
named using the identifier_to_backup_into param.
:param field_to_backup: A Baserow field that you want to backup the data for.
:param identifier_to_backup_into: The name that will be used when creating
the backup column or table.
:return: A dictionary than can then be passed back into the other class methods
to restore the backed up data or cleaned it up.
"""
model = field_to_backup.table.get_model(
field_ids=[],
fields=[field_to_backup],
add_dependencies=False,
)
model_field_to_backup = model._meta.get_field(field_to_backup.db_column)
if isinstance(model_field_to_backup, ManyToManyField):
through = model_field_to_backup.remote_field.through
m2m_table_to_backup = through._meta.db_table
cls._create_duplicate_m2m_table(
model,
m2m_model_field_to_duplicate=model_field_to_backup,
new_m2m_table_name=identifier_to_backup_into,
)
cls._copy_m2m_data_between_tables(
source_table=m2m_table_to_backup,
target_table=identifier_to_backup_into,
m2m_model_field=model_field_to_backup,
through_model=through,
)
return {"backed_up_m2m_table_name": identifier_to_backup_into}
else:
table_name = model_field_to_backup.model._meta.db_table
cls._create_duplicate_nullable_column(
model,
model_field_to_duplicate=model_field_to_backup,
new_column_name=identifier_to_backup_into,
)
cls._copy_not_null_column_data(
table_name,
source_column=model_field_to_backup.column,
target_column=identifier_to_backup_into,
)
return {
"table_id_containing_backup_column": field_to_backup.table_id,
"backed_up_column_name": identifier_to_backup_into,
}
@classmethod
def restore_backup_data_into_field(
cls,
field_to_restore_backup_data_into: Field,
backup_data: BackupData,
):
"""
Given a dictionary generated by the backup_field_data this method copies the
backed up data back into an existing Baserow field of the same type.
"""
model = field_to_restore_backup_data_into.table.get_model(
field_ids=[],
fields=[field_to_restore_backup_data_into],
add_dependencies=False,
)
model_field_to_restore_into = model._meta.get_field(
field_to_restore_backup_data_into.db_column
)
if isinstance(model_field_to_restore_into, ManyToManyField):
backed_up_m2m_table_name = backup_data["backed_up_m2m_table_name"]
through = model_field_to_restore_into.remote_field.through
target_m2m_table = through._meta.db_table
cls._truncate_table(target_m2m_table)
cls._copy_m2m_data_between_tables(
source_table=backed_up_m2m_table_name,
target_table=target_m2m_table,
m2m_model_field=model_field_to_restore_into,
through_model=through,
)
cls._drop_table(backed_up_m2m_table_name)
else:
backed_up_column_name = backup_data["backed_up_column_name"]
table_name = model_field_to_restore_into.model._meta.db_table
cls._copy_not_null_column_data(
table_name,
source_column=backed_up_column_name,
target_column=model_field_to_restore_into.column,
)
cls._drop_column(table_name, backed_up_column_name)
@classmethod
def clean_up_backup_data(
cls,
backup_data: BackupData,
):
"""
Given a dictionary generated by the backup_field_data this method deletes any
backup data to reclaim space used.
"""
if "backed_up_m2m_table_name" in backup_data:
cls._drop_table(backup_data["backed_up_m2m_table_name"])
else:
try:
table = Table.objects_and_trash.get(
id=backup_data["table_id_containing_backup_column"]
)
cls._drop_column(
table.get_database_table_name(),
backup_data["backed_up_column_name"],
)
except Table.DoesNotExist:
# The table has already been permanently deleted by the trash system
# so there is nothing for us to do.
pass
@staticmethod
def _create_duplicate_m2m_table(
model: GeneratedTableModel,
m2m_model_field_to_duplicate: ManyToManyField,
new_m2m_table_name: str,
):
with safe_django_schema_editor() as schema_editor:
# Create a duplicate m2m table to backup the data into.
new_backup_table = deepcopy(m2m_model_field_to_duplicate)
new_backup_table.remote_field.through._meta.db_table = new_m2m_table_name
schema_editor.add_field(model, new_backup_table)
@staticmethod
def _truncate_table(target_table):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL("TRUNCATE TABLE {target_table}").format(
target_table=sql.Identifier(target_table),
)
)
@staticmethod
def _drop_table(backup_name: str):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL("DROP TABLE {backup_table}").format(
backup_table=sql.Identifier(backup_name),
)
)
@staticmethod
def _copy_m2m_data_between_tables(
source_table: str,
target_table: str,
m2m_model_field: ManyToManyField,
through_model: GeneratedTableModel,
):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL(
"""
INSERT INTO {target_table} (id, {m2m_column}, {m2m_reverse_column})
SELECT id, {m2m_column}, {m2m_reverse_column} FROM {source_table}
"""
).format(
source_table=sql.Identifier(source_table),
target_table=sql.Identifier(target_table),
m2m_column=sql.Identifier(m2m_model_field.m2m_column_name()),
m2m_reverse_column=sql.Identifier(
m2m_model_field.m2m_reverse_name()
),
)
)
# When the rows are inserted we keep the provide the old ids and because of
# that the auto increment is still set at `1`. This needs to be set to the
# maximum value because otherwise creating a new row could later fail.
sequence_sql = connection.ops.sequence_reset_sql(
no_style(), [through_model]
)
cursor.execute(sequence_sql[0])
@staticmethod
def _create_duplicate_nullable_column(
model: GeneratedTableModel, model_field_to_duplicate, new_column_name: str
):
with safe_django_schema_editor() as schema_editor:
# Create a duplicate column to backup the data into.
new_backup_model_field = deepcopy(model_field_to_duplicate)
new_backup_model_field.column = new_column_name
# It must be nullable so INSERT's into the table still work. If we restore
# this backed up column back into a real column we won't copy over any
# NULLs created by INSERTs.
new_backup_model_field.null = True
schema_editor.add_field(model, new_backup_model_field)
@staticmethod
def _copy_not_null_column_data(table_name, source_column, target_column):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL(
"UPDATE {table_name} SET {target_column} = {source_column} "
"WHERE {source_column} IS NOT NULL"
).format(
table_name=sql.Identifier(table_name),
target_column=sql.Identifier(target_column),
source_column=sql.Identifier(source_column),
)
)
@staticmethod
def _drop_column(table_name: str, column_to_drop: str):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL("ALTER TABLE {table_name} DROP COLUMN {column_to_drop}").format(
table_name=sql.Identifier(table_name),
column_to_drop=sql.Identifier(column_to_drop),
)
)
from baserow.core.utils import ChildProgressBuilder, extract_allowed
class UpdateFieldActionType(ActionType):
@ -674,3 +424,54 @@ class DeleteFieldActionType(ActionType):
user,
field,
)
class DuplicateFieldActionType(ActionType):
type = "duplicate_field"
@dataclasses.dataclass
class Params:
field_id: int
@classmethod
def do(
cls,
user: AbstractUser,
field: Field,
duplicate_data: bool = False,
progress_builder: Optional[ChildProgressBuilder] = None,
) -> Tuple[Field, List[Field]]:
"""
Duplicate a field. Undoing this action trashes the duplicated field and
redoing restores it.
:param user: The user on whose behalf the duplicated field will be
created.
:param field: The field instance to duplicate.
:param progress_builder: A progress builder instance that can be used to
track the progress of the duplication.
:return: A tuple with duplicated field instance and a list of the fields
that have been updated.
"""
new_field_clone, updated_fields = FieldHandler().duplicate_field(
user, field, duplicate_data, progress_builder=progress_builder
)
cls.register_action(
user, cls.Params(new_field_clone.id), cls.scope(field.table_id)
)
return new_field_clone, updated_fields
@classmethod
def scope(cls, table_id) -> ActionScopeStr:
return TableActionScopeType.value(table_id)
@classmethod
def undo(cls, user: AbstractUser, params: Params, action_being_undone: Action):
FieldHandler().delete_field(user, FieldHandler().get_field(params.field_id))
@classmethod
def redo(cls, user: AbstractUser, params: Params, action_being_redone: Action):
TrashHandler.restore_item(
user, "field", params.field_id, parent_trash_item_id=None
)

View file

@ -0,0 +1,305 @@
from copy import deepcopy
from typing import Any, Dict, Optional
from django.core.management.color import no_style
from django.db import connection
from django.db.models import ManyToManyField
from psycopg2 import sql
from baserow.contrib.database.db.schema import safe_django_schema_editor
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.table.models import GeneratedTableModel, Table
BackupData = Dict[str, Any]
class FieldDataBackupHandler:
"""
Backs up an arbitrary Baserow field by getting their model fields and deciding how
to backup based of it. Only the fields data (think cells) is backed up and no
associated field meta-data is backed up by this class.
The backup data is stored in the database and
no serialization/deserialization of the data occurs. So it is fast but
not suitable for actually backing up the data to prevent data loss, but instead
useful for backing up the data due to Baserow actions to facilitate undoing them.
If the model field is a many to many field then we backup by creating a duplicate
m2m table and copying the data into it.
Otherwise the field must be an actual column in the user table, so we duplicate
the column and copy the data into it.
Also knows how to restore from a backup and clean up any backups done by this
class even if the Field/Table etc has been permanently deleted from Baserow.
"""
@classmethod
def duplicate_field_data(
cls, original_field: Field, duplicated_field: Field
) -> None:
"""
Duplicates the data of the original field into the duplicated field.
:param original_field: The original field to duplicate the data from.
:param duplicated_field: The duplicated field to duplicate the data to.
"""
model = original_field.table.get_model(
field_ids=[],
fields=[original_field, duplicated_field],
add_dependencies=False,
)
original_model_field = model._meta.get_field(original_field.db_column)
duplicated_model_field = model._meta.get_field(duplicated_field.db_column)
if isinstance(original_model_field, ManyToManyField):
through = original_model_field.remote_field.through
m2m_table_to_duplicate = through._meta.db_table
new_m2m_table_name = (
duplicated_model_field.remote_field.through._meta.db_table
)
cls._copy_m2m_data_between_tables(
source_table=m2m_table_to_duplicate,
target_table=new_m2m_table_name,
m2m_model_field=original_model_field,
m2m_target_model_field=duplicated_model_field,
through_model=through,
)
else:
table_name = original_model_field.model._meta.db_table
cls._copy_not_null_column_data(
table_name,
source_column=original_model_field.column,
target_column=duplicated_model_field.db_column,
)
@classmethod
def backup_field_data(
cls,
field_to_backup: Field,
identifier_to_backup_into: str,
) -> BackupData:
"""
Backs up the provided field's data into a new column or table which will be
named using the identifier_to_backup_into param.
:param field_to_backup: A Baserow field that you want to backup the data for.
:param identifier_to_backup_into: The name that will be used when creating
the backup column or table.
:return: A dictionary than can then be passed back into the other class methods
to restore the backed up data or cleaned it up.
"""
model = field_to_backup.table.get_model(
field_ids=[],
fields=[field_to_backup],
add_dependencies=False,
)
model_field_to_backup = model._meta.get_field(field_to_backup.db_column)
if isinstance(model_field_to_backup, ManyToManyField):
through = model_field_to_backup.remote_field.through
m2m_table_to_backup = through._meta.db_table
cls._create_duplicate_m2m_table(
model,
m2m_model_field_to_duplicate=model_field_to_backup,
new_m2m_table_name=identifier_to_backup_into,
)
cls._copy_m2m_data_between_tables(
source_table=m2m_table_to_backup,
target_table=identifier_to_backup_into,
m2m_model_field=model_field_to_backup,
through_model=through,
)
return {"backed_up_m2m_table_name": identifier_to_backup_into}
else:
table_name = model_field_to_backup.model._meta.db_table
cls._create_duplicate_nullable_column(
model,
model_field_to_duplicate=model_field_to_backup,
new_column_name=identifier_to_backup_into,
)
cls._copy_not_null_column_data(
table_name,
source_column=model_field_to_backup.column,
target_column=identifier_to_backup_into,
)
return {
"table_id_containing_backup_column": field_to_backup.table_id,
"backed_up_column_name": identifier_to_backup_into,
}
@classmethod
def restore_backup_data_into_field(
cls,
field_to_restore_backup_data_into: Field,
backup_data: BackupData,
):
"""
Given a dictionary generated by the backup_field_data this method copies the
backed up data back into an existing Baserow field of the same type.
"""
model = field_to_restore_backup_data_into.table.get_model(
field_ids=[],
fields=[field_to_restore_backup_data_into],
add_dependencies=False,
)
model_field_to_restore_into = model._meta.get_field(
field_to_restore_backup_data_into.db_column
)
if isinstance(model_field_to_restore_into, ManyToManyField):
backed_up_m2m_table_name = backup_data["backed_up_m2m_table_name"]
through = model_field_to_restore_into.remote_field.through
target_m2m_table = through._meta.db_table
cls._truncate_table(target_m2m_table)
cls._copy_m2m_data_between_tables(
source_table=backed_up_m2m_table_name,
target_table=target_m2m_table,
m2m_model_field=model_field_to_restore_into,
through_model=through,
)
cls._drop_table(backed_up_m2m_table_name)
else:
backed_up_column_name = backup_data["backed_up_column_name"]
table_name = model_field_to_restore_into.model._meta.db_table
cls._copy_not_null_column_data(
table_name,
source_column=backed_up_column_name,
target_column=model_field_to_restore_into.column,
)
cls._drop_column(table_name, backed_up_column_name)
@classmethod
def clean_up_backup_data(
cls,
backup_data: BackupData,
):
"""
Given a dictionary generated by the backup_field_data this method deletes any
backup data to reclaim space used.
"""
if "backed_up_m2m_table_name" in backup_data:
cls._drop_table(backup_data["backed_up_m2m_table_name"])
else:
try:
table = Table.objects_and_trash.get(
id=backup_data["table_id_containing_backup_column"]
)
cls._drop_column(
table.get_database_table_name(),
backup_data["backed_up_column_name"],
)
except Table.DoesNotExist:
# The table has already been permanently deleted by the trash system
# so there is nothing for us to do.
pass
@staticmethod
def _create_duplicate_m2m_table(
model: GeneratedTableModel,
m2m_model_field_to_duplicate: ManyToManyField,
new_m2m_table_name: str,
):
with safe_django_schema_editor() as schema_editor:
# Create a duplicate m2m table to backup the data into.
new_backup_table = deepcopy(m2m_model_field_to_duplicate)
new_backup_table.remote_field.through._meta.db_table = new_m2m_table_name
schema_editor.add_field(model, new_backup_table)
@staticmethod
def _truncate_table(target_table):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL("TRUNCATE TABLE {target_table}").format(
target_table=sql.Identifier(target_table),
)
)
@staticmethod
def _drop_table(backup_name: str):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL("DROP TABLE {backup_table}").format(
backup_table=sql.Identifier(backup_name),
)
)
@staticmethod
def _copy_m2m_data_between_tables(
source_table: str,
target_table: str,
m2m_model_field: ManyToManyField,
through_model: GeneratedTableModel,
m2m_target_model_field: Optional[ManyToManyField] = None,
):
with connection.cursor() as cursor:
m2m_target_model_field = m2m_target_model_field or m2m_model_field
cursor.execute(
sql.SQL(
"""
INSERT INTO {target_table} (id, {m2m_column}, {m2m_reverse_target_column})
SELECT id, {m2m_column}, {m2m_reverse_column} FROM {source_table}
"""
).format(
source_table=sql.Identifier(source_table),
target_table=sql.Identifier(target_table),
m2m_column=sql.Identifier(m2m_model_field.m2m_column_name()),
m2m_reverse_column=sql.Identifier(
m2m_model_field.m2m_reverse_name()
),
m2m_reverse_target_column=sql.Identifier(
m2m_target_model_field.m2m_reverse_name()
),
)
)
# When the rows are inserted we keep the provide the old ids and because of
# that the auto increment is still set at `1`. This needs to be set to the
# maximum value because otherwise creating a new row could later fail.
sequence_sql = connection.ops.sequence_reset_sql(
no_style(), [through_model]
)
cursor.execute(sequence_sql[0])
@staticmethod
def _create_duplicate_nullable_column(
model: GeneratedTableModel, model_field_to_duplicate, new_column_name: str
):
with safe_django_schema_editor() as schema_editor:
# Create a duplicate column to backup the data into.
new_backup_model_field = deepcopy(model_field_to_duplicate)
new_backup_model_field.column = new_column_name
# It must be nullable so INSERT's into the table still work. If we restore
# this backed up column back into a real column we won't copy over any
# NULLs created by INSERTs.
new_backup_model_field.null = True
schema_editor.add_field(model, new_backup_model_field)
@staticmethod
def _copy_not_null_column_data(table_name, source_column, target_column):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL(
"UPDATE {table_name} SET {target_column} = {source_column} "
"WHERE {source_column} IS NOT NULL"
).format(
table_name=sql.Identifier(table_name),
target_column=sql.Identifier(target_column),
source_column=sql.Identifier(source_column),
)
)
@staticmethod
def _drop_column(table_name: str, column_to_drop: str):
with connection.cursor() as cursor:
cursor.execute(
sql.SQL("ALTER TABLE {table_name} DROP COLUMN {column_to_drop}").format(
table_name=sql.Identifier(table_name),
column_to_drop=sql.Identifier(column_to_drop),
)
)

View file

@ -1,3 +1,4 @@
import logging
from abc import ABC, abstractmethod
from collections import defaultdict
from copy import deepcopy
@ -123,6 +124,8 @@ if TYPE_CHECKING:
)
from baserow.contrib.database.table.models import GeneratedTableModel, Table
logger = logging.getLogger(__name__)
class TextFieldMatchingRegexFieldType(FieldType, ABC):
"""
@ -1020,15 +1023,38 @@ class LinkRowFieldType(FieldType):
type = "link_row"
model_class = LinkRowField
allowed_fields = [
"link_row_table",
"link_row_table_id",
"link_row_related_field",
"link_row_table",
"link_row_relation_id",
]
serializer_field_names = ["link_row_table", "link_row_related_field"]
serializer_field_names = [
"link_row_table_id",
"link_row_related_field_id",
"link_row_table",
"link_row_related_field",
]
serializer_field_overrides = {
"link_row_table_id": serializers.IntegerField(
required=False,
allow_null=True,
source="link_row_table.id",
help_text="The id of the linked table.",
),
"link_row_related_field_id": serializers.PrimaryKeyRelatedField(
read_only=True, required=False, help_text="The id of the related field."
),
"link_row_table": serializers.IntegerField(
required=False,
allow_null=True,
source="link_row_table.id",
help_text="(Deprecated) The id of the linked table.",
),
"link_row_related_field": serializers.PrimaryKeyRelatedField(
read_only=True, required=False
)
read_only=True,
required=False,
help_text="(Deprecated) The id of the related field.",
),
}
api_exceptions_map = {
LinkRowTableNotProvided: ERROR_LINK_ROW_TABLE_NOT_PROVIDED,
@ -1307,12 +1333,30 @@ class LinkRowFieldType(FieldType):
"""
This method checks if the provided link row table is an int because then it
needs to be converted to a table instance.
It also provided compatibility between the old name `link_row_table` and the new
name `link_row_table_id`.
"""
if "link_row_table" in values and isinstance(values["link_row_table"], int):
from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.table.models import Table
table = TableHandler().get_table(values["link_row_table"])
link_row_table_id = values.pop("link_row_table_id", None)
if link_row_table_id is None:
link_row_table = values.pop("link_row_table", None)
if isinstance(link_row_table, Table):
# set in a previous call to prepare_values, so we can use it.
values["link_row_table"] = link_row_table
elif isinstance(link_row_table, int):
logger.warning(
"The 'link_row_table' parameter is deprecated for LinkRow field."
"Please, use 'link_row_table_id' instead."
)
link_row_table_id = link_row_table
if isinstance(link_row_table_id, int):
table = TableHandler().get_table(link_row_table_id)
table.database.group.has_user(user, raise_error=True)
values["link_row_table"] = table
@ -1322,11 +1366,13 @@ class LinkRowFieldType(FieldType):
values = super().export_prepared_values(field)
if field.link_row_table:
values["link_row_table"] = field.link_row_table_id
values.pop("link_row_table", None)
values["link_row_table_id"] = field.link_row_table_id
# We don't want to serialize the related field as the update call will create
# it again.
values.pop("link_row_related_field")
values.pop("link_row_related_field", None)
values.pop("link_row_related_field_id", None)
return values
@ -1337,14 +1383,13 @@ class LinkRowFieldType(FieldType):
raised.
"""
if "link_row_table" not in values or not values["link_row_table"]:
link_row_table = values.get("link_row_table")
if link_row_table is None:
raise LinkRowTableNotProvided(
"The link_row_table argument must be provided when creating a link_row "
"field."
)
link_row_table = values["link_row_table"]
if table.database_id != link_row_table.database_id:
raise LinkRowTableNotInSameDatabase(
f"The link row table {link_row_table.id} is not in the same database "
@ -1357,13 +1402,10 @@ class LinkRowFieldType(FieldType):
link_row_table has changed and if it is within the same database.
"""
if (
"link_row_table" not in to_field_values
or not to_field_values["link_row_table"]
):
link_row_table = to_field_values.get("link_row_table")
if link_row_table is None:
return
link_row_table = to_field_values["link_row_table"]
table = from_field.table
if from_field.table.database_id != link_row_table.database_id:
@ -1660,7 +1702,7 @@ class LinkRowFieldType(FieldType):
self, old_field: LinkRowField, new_field_attrs: Dict[str, Any]
) -> bool:
new_link_row_table_id = new_field_attrs.get(
"link_row_table", old_field.link_row_table_id
"link_row_table_id", old_field.link_row_table_id
)
return old_field.link_row_table_id != new_link_row_table_id

View file

@ -42,8 +42,14 @@ from baserow.contrib.database.table.models import Table
from baserow.contrib.database.views.handler import ViewHandler
from baserow.core.trash.exceptions import RelatedTableTrashedException
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import extract_allowed, find_unused_name, set_allowed_attrs
from baserow.core.utils import (
ChildProgressBuilder,
extract_allowed,
find_unused_name,
set_allowed_attrs,
)
from .backup_handler import FieldDataBackupHandler
from .dependencies.handler import FieldDependencyHandler
from .dependencies.update_collector import FieldUpdateCollector
from .exceptions import (
@ -560,6 +566,65 @@ class FieldHandler:
else:
return field
def duplicate_field(
self,
user: AbstractUser,
field: Field,
duplicate_data: bool = False,
progress_builder: Optional[ChildProgressBuilder] = None,
) -> Tuple[Field, List[Field]]:
"""
Duplicates an existing field instance.
:param user: The user on whose behalf the table is duplicated.
:param field: The field instance that needs to be duplicated.
:param duplicate_data: Whether or not the data of the field should be
:param progress_builder: A progress builder object that can be used to
report progress.
:raises ValueError: When the provided table is not an instance of Table.
:return: A tuple with duplicated field instance and a list of the fields
that have been updated.
"""
if not isinstance(field, Field):
raise ValueError("The field is not an instance of Field")
progress = ChildProgressBuilder.build(progress_builder, child_total=3)
database = field.table.database
database.group.has_user(user, raise_error=True)
specific_field = field.specific
field_type = field_type_registry.get_by_model(specific_field)
serialized_field = field_type.export_serialized(specific_field)
progress.increment()
new_name = self.find_next_unused_field_name(
field.table,
[serialized_field.pop("name")],
)
# remove properties that are unqiue to the field
for key in ["id", "order", "primary"]:
serialized_field.pop(key, None)
new_field, updated_fields = self.create_field(
user,
field.table,
field_type.type,
primary=False,
name=new_name,
return_updated_fields=True,
**serialized_field,
)
progress.increment()
if duplicate_data and not field_type.read_only:
FieldDataBackupHandler.duplicate_field_data(field, new_field)
progress.increment()
return new_field, updated_fields
def delete_field(
self,
user: AbstractUser,

View file

@ -0,0 +1,77 @@
from rest_framework import serializers
from baserow.api.errors import ERROR_GROUP_DOES_NOT_EXIST, ERROR_USER_NOT_IN_GROUP
from baserow.contrib.database.api.fields.serializers import (
FieldSerializer,
FieldSerializerWithRelatedFields,
)
from baserow.contrib.database.db.atomic import (
read_repeatable_read_single_table_transaction,
)
from baserow.contrib.database.fields.actions import DuplicateFieldActionType
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import DuplicateFieldJob
from baserow.core.action.registries import action_type_registry
from baserow.core.exceptions import GroupDoesNotExist, UserNotInGroup
from baserow.core.jobs.registries import JobType
class DuplicateFieldJobType(JobType):
type = "duplicate_field"
model_class = DuplicateFieldJob
max_count = 1
api_exceptions_map = {
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
}
request_serializer_field_names = ["field_id"]
request_serializer_field_overrides = {
"field_id": serializers.IntegerField(
help_text="The ID of the field to duplicate.",
),
"duplicate_data": serializers.BooleanField(
help_text="Whether to duplicate the data of the field.",
default=False,
),
}
serializer_field_names = ["original_field", "duplicated_field"]
serializer_field_overrides = {
"original_field": FieldSerializer(read_only=True),
"duplicated_field": FieldSerializerWithRelatedFields(read_only=True),
}
def transaction_atomic_context(self, job: "DuplicateFieldJob"):
return read_repeatable_read_single_table_transaction(
job.original_field.table.id
)
def prepare_values(self, values, user):
field = FieldHandler().get_field(values["field_id"])
field.table.database.group.has_user(user, raise_error=True)
return {
"original_field": field,
"duplicate_data": values.get("duplicate_data", False),
}
def run(self, job, progress):
duplicate_field_action_type = action_type_registry.get_by_type(
DuplicateFieldActionType
)
new_field_clone, updated_fields = duplicate_field_action_type.do(
job.user,
job.original_field,
job.duplicate_data,
progress.create_child_builder(represents_progress=progress.total),
)
# update the job with the new duplicated field instance
job.duplicated_field = new_field_clone
job.save(update_fields=("duplicated_field",))
return new_field_clone, updated_fields

View file

@ -19,6 +19,8 @@ from baserow.contrib.database.formula import (
)
from baserow.contrib.database.mixins import ParentFieldTrashableModelMixin
from baserow.contrib.database.table.cache import invalidate_table_in_model_cache
from baserow.core.jobs.mixins import JobWithUndoRedoIds, JobWithWebsocketId
from baserow.core.jobs.models import Job
from baserow.core.mixins import (
CreatedAndUpdatedOnMixin,
OrderableMixin,
@ -500,4 +502,26 @@ class LookupField(FormulaField):
)
class DuplicateFieldJob(JobWithWebsocketId, JobWithUndoRedoIds, Job):
original_field = models.ForeignKey(
Field,
null=True,
related_name="duplicated_by_jobs",
on_delete=models.SET_NULL,
help_text="The Baserow field to duplicate.",
)
duplicate_data = models.BooleanField(
default=False,
help_text="Indicates if the data of the field should be duplicated.",
)
duplicated_field = models.OneToOneField(
Field,
null=True,
related_name="duplicated_from_jobs",
on_delete=models.SET_NULL,
help_text="The duplicated Baserow field.",
)
SpecificFieldForUpdate = NewType("SpecificFieldForUpdate", Field)

View file

@ -2,7 +2,7 @@ from django.db import models
from baserow.contrib.database.models import Database
from baserow.contrib.database.table.models import Table
from baserow.core.jobs.mixins import JobWithUserDataMixin
from baserow.core.jobs.mixins import JobWithUndoRedoIds, JobWithWebsocketId
from baserow.core.jobs.models import Job
@ -20,8 +20,7 @@ def default_report():
return {"failing_rows": {}}
class FileImportJob(JobWithUserDataMixin, Job):
user_data_to_save = ["user_websocket_id"]
class FileImportJob(JobWithWebsocketId, JobWithUndoRedoIds, Job):
database = models.ForeignKey(
Database,

View file

@ -0,0 +1,104 @@
# Generated by Django 3.2.13 on 2022-09-02 21:29
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0031_duplicateapplicationjob_user_action_group_id"),
("database", "0086_formview_mode"),
]
operations = [
migrations.AddField(
model_name="duplicatetablejob",
name="user_action_group_id",
field=models.CharField(
help_text="The user session uuid needed for undo/redo action group functionality.",
max_length=36,
null=True,
),
),
migrations.AddField(
model_name="fileimportjob",
name="user_action_group_id",
field=models.CharField(
help_text="The user session uuid needed for undo/redo action group functionality.",
max_length=36,
null=True,
),
),
migrations.CreateModel(
name="DuplicateFieldJob",
fields=[
(
"job_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="core.job",
),
),
(
"user_websocket_id",
models.CharField(
help_text="The user websocket uuid needed to manage signals sent correctly.",
max_length=36,
null=True,
),
),
(
"user_session_id",
models.CharField(
help_text="The user session uuid needed for undo/redo functionality.",
max_length=36,
null=True,
),
),
(
"user_action_group_id",
models.CharField(
help_text="The user session uuid needed for undo/redo action group functionality.",
max_length=36,
null=True,
),
),
(
"duplicate_data",
models.BooleanField(
default=False,
help_text="Indicates if the data of the field should be duplicated.",
),
),
(
"duplicated_field",
models.OneToOneField(
help_text="The duplicated Baserow field.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="duplicated_from_jobs",
to="database.field",
),
),
(
"original_field",
models.ForeignKey(
help_text="The Baserow field to duplicate.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="duplicated_by_jobs",
to="database.field",
),
),
],
options={
"abstract": False,
},
bases=("core.job", models.Model),
),
]

View file

@ -59,7 +59,7 @@ class CreateTableActionType(ActionType):
)
params = cls.Params(table.id)
cls.register_action(user, params, cls.scope(table.database.id))
cls.register_action(user, params, cls.scope(database.id))
return table, error_report
@ -245,11 +245,11 @@ class DuplicateTableActionType(ActionType):
progress_builder: Optional[ChildProgressBuilder] = None,
) -> Table:
"""
Duplicate the table.
Duplicate a table.
Undoing this action trashes the duplicated table and redoing restores it.
:param user: The user on whose behalf the table is created.
:param table: The name of the table is created.
:param table: The table instance to duplicate.
:param progress_builder: A progress builder instance that can be used to
track the progress of the duplication.
:return: The duplicated table instance.

View file

@ -24,7 +24,7 @@ from baserow.contrib.database.table.cache import (
from baserow.contrib.database.views.exceptions import ViewFilterTypeNotAllowedForField
from baserow.contrib.database.views.registries import view_filter_type_registry
from baserow.core.db import specific_iterator
from baserow.core.jobs.mixins import JobWithUserDataMixin
from baserow.core.jobs.mixins import JobWithUndoRedoIds, JobWithWebsocketId
from baserow.core.jobs.models import Job
from baserow.core.mixins import (
CreatedAndUpdatedOnMixin,
@ -658,9 +658,7 @@ class Table(
return f"tbl_order_id_{self.id}_idx"
class DuplicateTableJob(JobWithUserDataMixin, Job):
user_data_to_save = ["user_websocket_id"]
class DuplicateTableJob(JobWithWebsocketId, JobWithUndoRedoIds, Job):
original_table = models.ForeignKey(
Table,
@ -669,6 +667,7 @@ class DuplicateTableJob(JobWithUserDataMixin, Job):
on_delete=models.SET_NULL,
help_text="The Baserow table to duplicate.",
)
duplicated_table = models.OneToOneField(
Table,
null=True,

View file

@ -4,25 +4,68 @@ from django.contrib.auth.models import AbstractUser
from django.db import models
from baserow.api.sessions import (
get_client_undo_redo_action_group_id,
get_untrusted_client_session_id,
set_client_undo_redo_action_group_id,
set_untrusted_client_session_id,
)
class JobWithUserDataMixin(models.Model):
"""
This mixin permits to add information about the user session to the job
in order to handle correctly the undo/redo functionality and the signals updates.
NOTE: it only works used in subclasses of Job.
This mixin permits to add information about the user session to the job in
order to handle correctly the undo/redo functionality and the signals
updates. NOTE: This is an abstract mixin. Extend from JobWithWebsocketId
and JobWithUndoRedoIds in your final JobTypes classes instead.
"""
user_data_to_save = ["user_session_id", "user_websocket_id"]
def save_user_data_if_not_present(self, user: AbstractUser) -> None:
"""
Save the user session in the job so to be able to restore.
Call this in a request context and not from a celery job or other contexts.
user_session_id = models.CharField(
max_length=36,
null=True,
help_text="The user session uuid needed for undo/redo functionality.",
)
:param user: The user to save the data for.
"""
# call _save_user_data_if_not_present for all the subclasses that define it.
for cls in self.__class__.__bases__:
if hasattr(cls, "_save_user_data_if_not_present"):
cls._save_user_data_if_not_present(self, self.user)
def restore_user_data_if_present(self, user: AbstractUser) -> None:
"""
Restore the user session in the job so to be able to restore.
:param user: The user to restore the data for.
"""
# call _save_user_data_if_not_present for all the subclasses that define it.
for cls in self.__class__.__bases__:
if hasattr(cls, "_restore_user_data_if_present"):
cls._restore_user_data_if_present(self, user)
def save(self, *args, **kwargs):
self.save_user_data_if_not_present(self.user)
return super().save(*args, **kwargs)
def __getattribute__(self, name: str) -> Any:
value = super().__getattribute__(name)
if name == "user":
user = value
self.restore_user_data_if_present(user)
return value
class Meta:
abstract = True
class JobWithWebsocketId(JobWithUserDataMixin):
"""
This mixin add the websocket id to the job so that actions and handlers can
use it to send websocket messages to the client accordingly.
"""
user_websocket_id = models.CharField(
max_length=36,
@ -34,35 +77,70 @@ class JobWithUserDataMixin(models.Model):
"""
Save the user session in the job so to be able to restore.
Call this in a request context and not from a celery job or other contexts.
:param user: The user to save the data for.
"""
if self.user_session_id is None and "user_session_id" in self.user_data_to_save:
self.user_session_id = get_untrusted_client_session_id(user)
if (
self.user_websocket_id is None
and "user_websocket_id" in self.user_data_to_save
):
if getattr(self, "user_websocket_id") is None:
self.user_websocket_id = getattr(user, "web_socket_id", None)
def _restore_user_data_if_present(self, user: AbstractUser) -> None:
if self.user_session_id and "user_session_id" in self.user_data_to_save:
set_untrusted_client_session_id(user, self.user_session_id)
"""
Restore the user session in the job so to be able to restore.
if self.user_websocket_id and "user_websocket_id" in self.user_data_to_save:
:param user: The user to restore the data for.
"""
if getattr(self, "user_websocket_id") is not None:
user.web_socket_id = self.user_websocket_id
def save(self, *args, **kwargs):
self._save_user_data_if_not_present(self.user)
return super().save(*args, **kwargs)
def __getattribute__(self, name: str) -> Any:
value = super().__getattribute__(name)
if name == "user":
self._restore_user_data_if_present(value)
return value
class Meta:
abstract = True
class JobWithUndoRedoIds(JobWithUserDataMixin):
"""
This mixin add the ids needed for the undo/redo functionality to
work with the code called from the job.
"""
user_session_id = models.CharField(
max_length=36,
null=True,
help_text="The user session uuid needed for undo/redo functionality.",
)
user_action_group_id = models.CharField(
max_length=36,
null=True,
help_text="The user session uuid needed for undo/redo action group functionality.",
)
def _save_user_data_if_not_present(self, user: AbstractUser) -> None:
"""
Save the user session in the job so to be able to restore.
Call this in a request context and not from a celery job or other contexts.
:param user: The user to save the data for.
"""
if getattr(self, "user_session_id") is None:
self.user_session_id = get_untrusted_client_session_id(user)
if getattr(self, "user_action_group_id") is None:
self.user_action_group_id = get_client_undo_redo_action_group_id(user)
def _restore_user_data_if_present(self, user: AbstractUser) -> None:
"""
Restore the user session in the job so to be able to restore.
:param user: The user to restore the data for.
"""
if getattr(self, "user_session_id") is not None:
set_untrusted_client_session_id(user, self.user_session_id)
if getattr(self, "user_action_group_id") is not None:
set_client_undo_redo_action_group_id(user, self.user_action_group_id)
class Meta:
abstract = True

View file

@ -0,0 +1,22 @@
# Generated by Django 3.2.13 on 2022-09-02 21:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0030_snapshots"),
]
operations = [
migrations.AddField(
model_name="duplicateapplicationjob",
name="user_action_group_id",
field=models.CharField(
help_text="The user session uuid needed for undo/redo action group functionality.",
max_length=36,
null=True,
),
),
]

View file

@ -9,7 +9,7 @@ from django.db.models import Q, UniqueConstraint
from rest_framework.exceptions import NotAuthenticated
from baserow.core.jobs.mixins import JobWithUserDataMixin
from baserow.core.jobs.mixins import JobWithUndoRedoIds, JobWithWebsocketId
from baserow.core.jobs.models import Job
from baserow.core.user_files.models import UserFile
@ -420,9 +420,7 @@ class TrashEntry(models.Model):
]
class DuplicateApplicationJob(JobWithUserDataMixin, Job):
user_data_to_save = ["user_websocket_id"]
class DuplicateApplicationJob(JobWithWebsocketId, JobWithUndoRedoIds, Job):
original_application = models.ForeignKey(
Application,

View file

@ -134,9 +134,7 @@ class CustomFieldsInstanceMixin:
base_class=base_class, request_serializer=request
)
return serializer_class(
model_instance, context={"instance_type": self, **context}, **kwargs
)
return serializer_class(model_instance, context=context, **kwargs)
class APIUrlsInstanceMixin:

View file

@ -1,4 +1,3 @@
import contextlib
from contextlib import contextmanager
from decimal import Decimal
from typing import Any, Dict, List, Optional, Type
@ -10,6 +9,7 @@ from django.utils.timezone import make_aware, utc
import psycopg2
from freezegun import freeze_time
from pytest_unordered import unordered
from baserow.contrib.database.fields.field_helpers import (
construct_all_possible_field_kwargs,
@ -392,7 +392,7 @@ def assert_undo_redo_actions_fails_with_error(
assert action.error is not None, "Action has no error, but should have one"
@contextlib.contextmanager
@contextmanager
def independent_test_db_connection():
d = connection.settings_dict
conn = psycopg2.connect(
@ -405,3 +405,38 @@ def independent_test_db_connection():
conn.autocommit = False
yield conn
conn.close()
def assert_serialized_field_values_are_the_same(
value_1, value_2, ordered=False, field_name=None
):
if isinstance(value_1, list) and not ordered:
assert unordered(value_1, value_2)
else:
assert value_1 == value_2, f"{field_name or 'error'}: {value_1} != {value_2}"
def extract_serialized_field_value(field_value):
if not field_value:
return field_value
def extract_value(value):
if isinstance(value, dict):
if "name" in value:
return value["name"]
return value["value"]
return value
if isinstance(field_value, list):
return [extract_value(value) for value in field_value]
return extract_value(field_value)
def assert_serialized_rows_contain_same_values(row_1, row_2):
for field_name, row_field_value in row_1.items():
row_1_value = extract_serialized_field_value(row_field_value)
row_2_value = extract_serialized_field_value(row_2[field_name])
assert_serialized_field_values_are_the_same(
row_1_value, row_2_value, field_name=field_name
)

View file

@ -4,6 +4,7 @@ from django.shortcuts import reverse
import pytest
from rest_framework.status import (
HTTP_200_OK,
HTTP_202_ACCEPTED,
HTTP_204_NO_CONTENT,
HTTP_400_BAD_REQUEST,
HTTP_401_UNAUTHORIZED,
@ -13,7 +14,10 @@ from rest_framework.status import (
from baserow.contrib.database.fields.models import Field, NumberField, TextField
from baserow.contrib.database.tokens.handler import TokenHandler
from baserow.test_utils.helpers import independent_test_db_connection
from baserow.test_utils.helpers import (
independent_test_db_connection,
setup_interesting_test_table,
)
@pytest.mark.django_db
@ -757,3 +761,139 @@ def test_create_field_returns_with_error_if_cant_lock_table_if_locked_for_key_sh
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_async_duplicate_field(api_client, data_fixture):
user_1, token_1 = data_fixture.create_user_and_token(
email="test_1@test.nl", password="password", first_name="Test1"
)
group_1 = data_fixture.create_group(user=user_1)
_, token_2 = data_fixture.create_user_and_token(
email="test_2@test.nl", password="password", first_name="Test2"
)
_, token_3 = data_fixture.create_user_and_token(
email="test_3@test.nl",
password="password",
first_name="Test3",
group=group_1,
)
database = data_fixture.create_database_application(group=group_1)
table_1, _, _, _ = setup_interesting_test_table(
data_fixture, database=database, user=user_1
)
field_set = table_1.field_set.all()
original_field_count = field_set.count()
primary_field = field_set.get(primary=True)
# user cannot duplicate a field if not belonging to the same group
response = api_client.post(
reverse(
"api:database:fields:async_duplicate", kwargs={"field_id": primary_field.id}
),
format="json",
HTTP_AUTHORIZATION=f"JWT {token_2}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
# user cannot duplicate a non-existent field
response = api_client.post(
reverse("api:database:fields:async_duplicate", kwargs={"field_id": 99999}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token_1}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST"
# user can duplicate a field created by other in the same group
response = api_client.post(
reverse(
"api:database:fields:async_duplicate", kwargs={"field_id": primary_field.id}
),
{"duplicate_data": False},
format="json",
HTTP_AUTHORIZATION=f"JWT {token_3}",
)
assert response.status_code == HTTP_202_ACCEPTED
job = response.json()
assert job["id"] is not None
assert job["state"] == "pending"
assert job["type"] == "duplicate_field"
# check that now the job ended correctly and the field was duplicated
response = api_client.get(
reverse(
"api:jobs:item",
kwargs={"job_id": job["id"]},
),
HTTP_AUTHORIZATION=f"JWT {token_3}",
)
assert response.status_code == HTTP_200_OK
job = response.json()
assert job["state"] == "finished"
assert job["type"] == "duplicate_field"
assert job["original_field"]["id"] == primary_field.id
assert job["duplicated_field"]["id"] > primary_field.id
assert job["duplicated_field"]["name"] == f"{primary_field.name} 2"
# check that the table is accessible and has one more column
rows_url = reverse("api:database:rows:list", kwargs={"table_id": table_1.id})
response = api_client.get(
f"{rows_url}?user_field_names=true",
format="json",
HTTP_AUTHORIZATION=f"JWT {token_1}",
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert len(response_json["results"]) > 0
assert field_set.count() == original_field_count + 1
for row in response_json["results"]:
assert row[f"{primary_field.name} 2"] is None
# user can duplicate a field with data
response = api_client.post(
reverse(
"api:database:fields:async_duplicate", kwargs={"field_id": primary_field.id}
),
{"duplicate_data": True},
format="json",
HTTP_AUTHORIZATION=f"JWT {token_3}",
)
assert response.status_code == HTTP_202_ACCEPTED
job = response.json()
assert job["id"] is not None
assert job["state"] == "pending"
assert job["type"] == "duplicate_field"
# check that now the job ended correctly and the field was duplicated
response = api_client.get(
reverse(
"api:jobs:item",
kwargs={"job_id": job["id"]},
),
HTTP_AUTHORIZATION=f"JWT {token_3}",
)
assert response.status_code == HTTP_200_OK
job = response.json()
assert job["state"] == "finished"
assert job["type"] == "duplicate_field"
assert job["original_field"]["id"] == primary_field.id
assert job["duplicated_field"]["id"] > primary_field.id
assert job["duplicated_field"]["name"] == f"{primary_field.name} 3"
# check that the table is accessible and has one more column
rows_url = reverse("api:database:rows:list", kwargs={"table_id": table_1.id})
response = api_client.get(
f"{rows_url}?user_field_names=true",
format="json",
HTTP_AUTHORIZATION=f"JWT {token_1}",
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert len(response_json["results"]) > 0
assert field_set.count() == original_field_count + 2
for row in response_json["results"]:
assert row[f"{primary_field.name} 3"] == row[primary_field.name]

View file

@ -6,7 +6,6 @@ from django.shortcuts import reverse
from django.test.utils import CaptureQueriesContext, override_settings
import pytest
from pytest_unordered import unordered
from rest_framework.status import (
HTTP_200_OK,
HTTP_202_ACCEPTED,
@ -18,6 +17,7 @@ from rest_framework.status import (
from baserow.contrib.database.file_import.models import FileImportJob
from baserow.contrib.database.table.models import Table
from baserow.test_utils.helpers import (
assert_serialized_rows_contain_same_values,
independent_test_db_connection,
setup_interesting_test_table,
)
@ -591,46 +591,5 @@ def test_async_duplicate_table(api_client, data_fixture):
assert len(response_json["results"]) > 0
duplicated_rows = response_json["results"]
def assert_row_field_value(
field_name, duplicated_value, original_value, ordered=True
):
if ordered:
assert (
duplicated_value == original_value
), f"{field_name}: {duplicated_value} != {original_value}"
else:
assert unordered(duplicated_value, original_value)
for original_row, duplicated_row in zip(original_rows, duplicated_rows):
for field_name, original_value in original_row.items():
if not original_value:
assert_row_field_value(
field_name, duplicated_row[field_name], original_value
)
elif field_name in ["single_select", "formula_singleselect"]:
assert_row_field_value(
field_name,
duplicated_row[field_name]["value"],
original_value["value"],
)
elif field_name in ["multiple_select", "lookup"] or field_name.endswith(
"_link_row"
):
assert_row_field_value(
field_name,
[v["value"] for v in duplicated_row[field_name]],
[v["value"] for v in original_value],
ordered=False,
)
elif field_name == "file":
assert_row_field_value(
field_name,
[f["name"] for f in duplicated_row[field_name]],
[f["name"] for f in original_value],
ordered=False,
)
else:
assert_row_field_value(
field_name, duplicated_row[field_name], original_value
)
assert_serialized_rows_contain_same_values(original_row, duplicated_row)

View file

@ -198,7 +198,7 @@ def test_dependencies_for_link_row_link_row_self_reference(data_fixture):
table=table_a,
type_name="link_row",
name="self",
link_row_table=table_a.id,
link_row_table=table_a,
)
assert when_field_updated(table_a_primary) == causes(
a_field_update_for(field=table_a_self_link, via=[table_a_self_link])

View file

@ -8,10 +8,15 @@ import pytest
from pytest_unordered import unordered
from rest_framework.status import HTTP_200_OK
from baserow.contrib.database.fields.actions import UpdateFieldActionType
from baserow.contrib.database.fields.actions import (
DuplicateFieldActionType,
UpdateFieldActionType,
)
from baserow.contrib.database.fields.exceptions import FieldDoesNotExist
from baserow.contrib.database.fields.field_types import TextFieldType
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import (
Field,
LinkRowField,
MultipleSelectField,
NumberField,
@ -26,7 +31,9 @@ from baserow.core.action.registries import action_type_registry
from baserow.core.models import GROUP_USER_PERMISSION_ADMIN
from baserow.core.trash.handler import TrashHandler
from baserow.test_utils.helpers import (
assert_serialized_field_values_are_the_same,
assert_undo_redo_actions_are_valid,
extract_serialized_field_value,
setup_interesting_test_table,
)
@ -1309,3 +1316,62 @@ def test_can_undo_updating_max_value_of_rating_field(
Decimal("2"),
Decimal("1"),
]
@pytest.mark.django_db
@pytest.mark.undo_redo
def test_can_undo_redo_duplicate_fields_of_interesting_table(api_client, data_fixture):
session_id = "session-id"
user, token = data_fixture.create_user_and_token(session_id=session_id)
database = data_fixture.create_database_application(user=user)
field_handler = FieldHandler()
table, _, _, _ = setup_interesting_test_table(data_fixture, user, database)
original_field_set = list(table.field_set.all())
for field in original_field_set:
duplicated_field, updated_fields = action_type_registry.get_by_type(
DuplicateFieldActionType
).do(user, field, duplicate_data=True)
assert field_handler.get_field(duplicated_field.id).name == f"{field.name} 2"
actions_undone = ActionHandler.undo(
user, [DuplicateFieldActionType.scope(table_id=field.table_id)], session_id
)
assert_undo_redo_actions_are_valid(actions_undone, [DuplicateFieldActionType])
with pytest.raises(FieldDoesNotExist):
field_handler.get_field(duplicated_field.id)
actions_redone = ActionHandler.redo(
user, [DuplicateFieldActionType.scope(table_id=field.table_id)], session_id
)
assert_undo_redo_actions_are_valid(actions_redone, [DuplicateFieldActionType])
assert field_handler.get_field(duplicated_field.id).name == f"{field.name} 2"
# check that the field values have been duplicated correctly
response = api_client.get(
reverse("api:database:rows:list", kwargs={"table_id": table.id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert len(response_json["results"]) > 0
assert table.field_set.count() == len(original_field_set) * 2
for row in response_json["results"]:
for field in original_field_set:
row_1_value = extract_serialized_field_value(row[field.db_column])
duplicated_field = Field.objects.get(
table_id=table.id, name=f"{field.name} 2"
)
row_2_value = extract_serialized_field_value(
row[duplicated_field.db_column]
)
assert_serialized_field_values_are_the_same(
row_1_value,
row_2_value,
field_name=field.name,
)

View file

@ -449,7 +449,7 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
# Try to make a relation with a table from another database
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{"name": "Link", "type": "link_row", "link_row_table": unrelated_table_1.id},
{"name": "Link", "type": "link_row", "link_row_table_id": unrelated_table_1.id},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
@ -461,7 +461,7 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
# Try to make a relation with a table that we don't have access to.
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{"name": "Link", "type": "link_row", "link_row_table": unrelated_table_2.id},
{"name": "Link", "type": "link_row", "link_row_table_id": unrelated_table_2.id},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
@ -482,7 +482,7 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
assert response_json["error"] == "ERROR_LINK_ROW_TABLE_NOT_PROVIDED"
assert LinkRowField.objects.all().count() == 0
# Create new link row field type.
# Create new link row field type using the deprecated `link_row_table` parameter.
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{
@ -497,18 +497,18 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response.status_code == HTTP_200_OK, response_json
assert response_json["name"] == "Link 1"
assert response_json["type"] == "link_row"
assert response_json["link_row_table"] == customers_table.id
assert response_json["link_row_table_id"] == customers_table.id
assert LinkRowField.objects.all().count() == 2
field_id = response_json["id"]
field = LinkRowField.objects.all().order_by("id").first()
related_field = LinkRowField.objects.all().order_by("id").last()
assert response_json["link_row_related_field"] == related_field.id
assert response_json["link_row_related_field"] != 999999
assert response_json["link_row_related_field_id"] == related_field.id
assert response_json["link_row_related_field_id"] != 999999
# Check if the correct fields are correctly linked.
assert field.table.id == table.id
@ -526,8 +526,8 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json["name"] == "Link 1"
assert response_json["type"] == "link_row"
assert response_json["link_row_table"] == customers_table.id
assert response_json["link_row_related_field"] == related_field.id
assert response_json["link_row_table_id"] == customers_table.id
assert response_json["link_row_related_field_id"] == related_field.id
# Just fetching the related field and check if is has the correct values.
response = api_client.get(
@ -537,8 +537,8 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json["name"] == "Example"
assert response_json["link_row_table"] == table.id
assert response_json["link_row_related_field"] == field.id
assert response_json["link_row_table_id"] == table.id
assert response_json["link_row_related_field_id"] == field.id
# Only updating the name of the field without changing anything else
response = api_client.patch(
@ -551,14 +551,14 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json["name"] == "Link new name"
assert response_json["type"] == "link_row"
assert response_json["link_row_table"] == customers_table.id
assert response_json["link_row_related_field"] == related_field.id
assert response_json["link_row_table_id"] == customers_table.id
assert response_json["link_row_related_field_id"] == related_field.id
# Only try to update the link_row_related_field, but this is a read only field so
# nothing should happen.
response = api_client.patch(
reverse("api:database:fields:item", kwargs={"field_id": field_id}),
{"link_row_related_field": 9999},
{"link_row_related_field_id": 9999},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
@ -566,12 +566,12 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json["name"] == "Link new name"
assert response_json["type"] == "link_row"
assert response_json["link_row_table"] == customers_table.id
assert response_json["link_row_related_field"] == related_field.id
assert response_json["link_row_table_id"] == customers_table.id
assert response_json["link_row_related_field_id"] == related_field.id
response = api_client.patch(
reverse("api:database:fields:item", kwargs={"field_id": field_id}),
{"link_row_table": cars_table.id},
{"link_row_table_id": cars_table.id},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
@ -579,8 +579,8 @@ def test_link_row_field_type_api_views(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json["name"] == "Link new name"
assert response_json["type"] == "link_row"
assert response_json["link_row_table"] == cars_table.id
assert response_json["link_row_related_field"] == related_field.id
assert response_json["link_row_table_id"] == cars_table.id
assert response_json["link_row_related_field_id"] == related_field.id
field.refresh_from_db()
related_field.refresh_from_db()

View file

@ -9,6 +9,7 @@ from baserow.core.action.handler import ActionHandler
from baserow.core.action.scopes import ApplicationActionScopeType
from baserow.core.jobs.constants import JOB_FINISHED
from baserow.core.jobs.handler import JobHandler
from baserow.test_utils.helpers import assert_undo_redo_actions_are_valid
@pytest.mark.django_db(transaction=True)
@ -46,7 +47,7 @@ def test_can_submit_duplicate_table_job(data_fixture):
@pytest.mark.django_db(transaction=True)
@pytest.mark.undo_redo
def test_cannot_undo_duplicate_table_job(data_fixture):
def test_can_undo_duplicate_table_job(data_fixture):
session_id = "session-id"
user = data_fixture.create_user(session_id=session_id)
database = data_fixture.create_database_application(user=user)
@ -67,4 +68,4 @@ def test_cannot_undo_duplicate_table_job(data_fixture):
[ApplicationActionScopeType.value(application_id=database.id)],
session_id,
)
assert actions_undone == []
assert_undo_redo_actions_are_valid(actions_undone, [DuplicateTableJobType])

View file

@ -11,6 +11,7 @@ from baserow.core.jobs.constants import JOB_FINISHED
from baserow.core.jobs.handler import JobHandler
from baserow.core.jobs.tasks import run_async_job
from baserow.core.models import Application
from baserow.test_utils.helpers import assert_undo_redo_actions_are_valid
@pytest.mark.django_db
@ -102,7 +103,7 @@ def test_can_submit_duplicate_application_job(data_fixture):
@pytest.mark.django_db(transaction=True)
def test_cannot_undo_duplicate_application_job(data_fixture):
def test_can_undo_duplicate_application_job(data_fixture):
session_id = "session-id"
user = data_fixture.create_user(session_id=session_id)
group = data_fixture.create_group(user=user)
@ -127,4 +128,6 @@ def test_cannot_undo_duplicate_application_job(data_fixture):
user, [GroupActionScopeType.value(group_id=group.id)], session_id
)
assert actions_undone == []
assert_undo_redo_actions_are_valid(
actions_undone, [DuplicateApplicationJobType]
)

View file

@ -23,6 +23,7 @@ For example:
* Sort fields on row select modal by the order of the first view in the related table. [#1062](https://gitlab.com/bramw/baserow/-/issues/1062)
* New signals `user_updated`, `user_deleted`, `user_restored`, `user_permanently_deleted` were added to track user changes.
* `list_groups` endpoint now also returns the list of all group users for each group.
* Fields can now be duplicated with their cell values also. [#964](https://gitlab.com/bramw/baserow/-/issues/964)
### Bug Fixes
* Resolve circular dependency in `FieldWithFiltersAndSortsSerializer` [#1113](https://gitlab.com/bramw/baserow/-/issues/1113)

View file

@ -7,10 +7,14 @@
<form @submit.prevent="duplicateField()">
<div class="control margin-bottom-1">
<div class="control__elements">
<Checkbox v-model="duplicateData" :disabled="true">
{{ $t('duplicateFieldContext.cloneData') }} ({{
$t('duplicateFieldContext.soon')
}})
<Checkbox v-model="duplicateData" :disabled="formFieldTypeIsReadOnly">
{{
$t(
formFieldTypeIsReadOnly
? 'duplicateFieldContext.readOnlyField'
: 'duplicateFieldContext.cloneData'
)
}}
</Checkbox>
</div>
</div>
@ -29,14 +33,16 @@
<script>
import { mapGetters } from 'vuex'
import { notifyIf } from '@baserow/modules/core/utils/error'
import { createNewUndoRedoActionGroupId } from '@baserow/modules/database/utils/action'
import FieldService from '@baserow/modules/database/services/field'
import jobProgress from '@baserow/modules/core/mixins/jobProgress'
import error from '@baserow/modules/core/mixins/error'
import modal from '@baserow/modules/core/mixins/modal'
import { clone } from '@baserow/modules/core/utils/object'
import { createNewUndoRedoActionGroupId } from '@baserow/modules/database/utils/action'
export default {
name: 'DuplicateFieldModal',
mixins: [modal, error],
mixins: [modal, error, jobProgress],
props: {
table: {
type: Object,
@ -50,66 +56,101 @@ export default {
data() {
return {
loading: false,
duplicateData: false,
duplicateData: true,
actionGroupId: null,
}
},
computed: {
existingFieldName() {
return this.fields.map((field) => field.name)
},
formFieldTypeIsReadOnly() {
return this.$registry.get('field', this.fromField.type).isReadOnly
},
...mapGetters({
fields: 'field/getAll',
}),
},
methods: {
async duplicateField() {
this.hideError()
this.loading = true
const values = clone(this.fromField)
const type = values.type
delete values.type
delete values.id
values.primary = false
const baseName = values.name
// Prevents name collision
let index = 2
while (this.existingFieldName.includes(`${baseName} ${index}`)) {
index += 1
}
values.name = `${baseName} ${index}`
const actionGroupId = createNewUndoRedoActionGroupId()
onDuplicationEnd() {
this.loading = false
this.actionGroupId = null
},
showError(title, message) {
this.$store.dispatch(
'notification/error',
{ title, message },
{ root: true }
)
},
// eslint-disable-next-line require-await
async onJobFailed() {
this.onDuplicationEnd()
this.showError(
this.$t('clientHandler.notCompletedTitle'),
this.$t('clientHandler.notCompletedDescription')
)
},
// eslint-disable-next-line require-await
async onJobPollingError(error) {
this.onDuplicationEnd()
notifyIf(error, 'table')
},
async onJobDone() {
const newFieldId = this.job.duplicated_field.id
try {
const { data: newField } = await FieldService(this.$client).get(
newFieldId
)
this.onFieldDuplicated(newField)
} catch (error) {
this.onDuplicationEnd()
notifyIf(error, 'table')
}
},
onFieldDuplicated(newField) {
try {
const { forceCreateCallback, fetchNeeded, newField } =
await this.$store.dispatch('field/create', {
type,
values,
table: this.table,
forceCreate: false,
undoRedoActionGroupId: actionGroupId,
})
const callback = async () => {
await forceCreateCallback()
this.loading = false
await this.$store.dispatch('field/forceCreate', {
table: this.table,
values: newField,
relatedFields: newField.related_fields,
})
this.hide()
// GridViewHead will update the order of the fields
this.$emit('move-field', {
newField,
position: 'right',
fromField: this.fromField,
undoRedoActionGroupId: actionGroupId,
undoRedoActionGroupId: this.actionGroupId,
})
this.onDuplicationEnd()
}
this.$emit('field-created', { callback, newField, fetchNeeded })
this.$emit('field-created', { callback, newField, fetchNeeded: true })
} catch (error) {
this.loading = false
this.onDuplicationEnd()
this.handleError(error)
}
},
async duplicateField() {
if (this.loading || this.disabled) {
return
}
this.loading = true
this.hideError()
this.actionGroupId = createNewUndoRedoActionGroupId()
try {
const { data: job } = await FieldService(this.$client).asyncDuplicate(
this.fromField.id,
this.duplicateData,
this.actionGroupId
)
this.startJobPoller(job)
} catch (error) {
this.onDuplicationEnd()
notifyIf(error, 'table')
}
},
},
}
</script>

View file

@ -7,9 +7,9 @@
</label>
<div class="control__elements">
<Dropdown
v-model="values.link_row_table"
:class="{ 'dropdown--error': $v.values.link_row_table.$error }"
@hide="$v.values.link_row_table.$touch()"
v-model="values.link_row_table_id"
:class="{ 'dropdown--error': $v.values.link_row_table_id.$error }"
@hide="$v.values.link_row_table_id.$touch()"
>
<DropdownItem
v-for="table in tables"
@ -18,7 +18,7 @@
:value="table.id"
></DropdownItem>
</Dropdown>
<div v-if="$v.values.link_row_table.$error" class="error">
<div v-if="$v.values.link_row_table_id.$error" class="error">
{{ $t('error.requiredField') }}
</div>
</div>
@ -39,30 +39,28 @@ export default {
mixins: [form, fieldSubForm],
data() {
return {
allowedValues: ['link_row_table'],
allowedValues: ['link_row_table_id'],
values: {
link_row_table: null,
link_row_table_id: null,
},
initialLinkRowTable: null,
initialLinkRowTableId: null,
}
},
computed: {
tables() {
const applications = this.$store.getters['application/getAll']
const databaseType = DatabaseApplicationType.getType()
const tableId = this.table.id
const databaseId = this.table.database_id
// Search for the database of the related table and return all the siblings of
// that table because those are the only ones the user can choose form.
for (let i = 0; i < applications.length; i++) {
const application = applications[i]
if (application.type === databaseType) {
for (let tableI = 0; tableI < application.tables.length; tableI++) {
const table = application.tables[tableI]
if (table.id === tableId) {
return application.tables
}
}
if (
application.type === databaseType &&
application.id === databaseId
) {
return application.tables
}
}
@ -70,16 +68,16 @@ export default {
},
},
mounted() {
this.initialLinkRowTable = this.values.link_row_table
this.initialLinkRowTableId = this.values.link_row_table_id
},
validations: {
values: {
link_row_table: { required },
link_row_table_id: { required },
},
},
methods: {
reset() {
this.initialLinkRowTable = this.values.link_row_table
this.initialLinkRowTableId = this.values.link_row_table_id
return form.methods.reset.call(this)
},
isValid() {

View file

@ -156,9 +156,9 @@ export default {
const selectedField = this.$store.getters['field/get'](
this.values.through_field_id
)
if (selectedField && selectedField.link_row_table) {
if (selectedField && selectedField.link_row_table_id) {
const { data } = await FieldService(this.$client).fetchAll(
selectedField.link_row_table
selectedField.link_row_table_id
)
this.fieldsInThroughTable = data
.filter((f) => {
@ -167,7 +167,7 @@ export default {
// circular reference.
return (
!this.defaultValues.primary ||
this.defaultValues.table_id !== f.link_row_table
this.defaultValues.table_id !== f.link_row_table_id
)
})
.filter((f) => {

View file

@ -36,13 +36,13 @@
<SelectRowModal
v-if="!readOnly"
ref="selectModal"
:table-id="field.link_row_table"
:table-id="field.link_row_table_id"
:value="value"
@selected="addValue(value, $event)"
></SelectRowModal>
<ForeignRowEditModal
ref="rowEditModal"
:table-id="field.link_row_table"
:table-id="field.link_row_table_id"
@hidden="modalOpen = false"
></ForeignRowEditModal>
</div>

View file

@ -30,7 +30,7 @@
<SelectRowModal
v-if="!disabled"
ref="selectModal"
:table-id="field.link_row_table"
:table-id="field.link_row_table_id"
@selected="setValue"
></SelectRowModal>
</a>
@ -90,7 +90,7 @@ export default {
this.loading = true
try {
this.name = await RowService(this.$client).getName(
this.field.link_row_table,
this.field.link_row_table_id,
value
)
} finally {

View file

@ -43,14 +43,14 @@
</div>
<SelectRowModal
ref="selectModal"
:table-id="field.link_row_table"
:table-id="field.link_row_table_id"
:value="value"
@selected="addValue(value, $event)"
@hidden="hideModal"
></SelectRowModal>
<ForeignRowEditModal
ref="rowEditModal"
:table-id="field.link_row_table"
:table-id="field.link_row_table_id"
@hidden="hideModal"
></ForeignRowEditModal>
</div>

View file

@ -790,7 +790,7 @@ export class LinkRowFieldType extends FieldType {
prepareRichValueForCopy(field, value) {
return {
tableId: field.link_row_table,
tableId: field.link_row_table_id,
value,
}
}
@ -812,7 +812,7 @@ export class LinkRowFieldType extends FieldType {
prepareValueForPaste(field, clipboardData, richClipboardData) {
if (
this.checkRichValueIsCompatible(richClipboardData) &&
field.link_row_table === richClipboardData.tableId
field.link_row_table_id === richClipboardData.tableId
) {
if (richClipboardData === null) {
return []
@ -837,7 +837,7 @@ export class LinkRowFieldType extends FieldType {
* to be removed from the store without making an API call.
*/
tableDeleted({ dispatch }, field, table, database) {
if (field.link_row_table === table.id) {
if (field.link_row_table_id === table.id) {
dispatch('field/forceDelete', field, { root: true })
}
}
@ -847,7 +847,9 @@ export class LinkRowFieldType extends FieldType {
}
getDocsDescription(field) {
return this.app.i18n.t('fieldDocs.linkRow', { table: field.link_row_table })
return this.app.i18n.t('fieldDocs.linkRow', {
table: field.link_row_table_id,
})
}
getDocsRequestExample(field) {

View file

@ -1,6 +1,6 @@
{
"table": {
"chooseView": "Choose view"
"chooseView": "Choose view"
},
"webhookModal": {
"title": "{name} webhooks",
@ -653,7 +653,7 @@
"duplicateFieldContext": {
"duplicate": "Duplicate field",
"cloneData": "Copy data",
"soon": "Available soon"
"readOnlyField": "Cell values will be filled automatically."
},
"snapshotsModal": {
"title": "snapshots",
@ -689,9 +689,9 @@
"preview": "Preview",
"fields": "Fields"
},
"formViewModePreviewForm": {
"formViewModePreviewForm": {
"addCoverImage": "Add a cover image",
"addLogo": "Add a logo",
"noFields": "This form doesn't have any fields. Click on a field in the left sidebar to add one."
}
}
}

View file

@ -1,17 +1,21 @@
import { UNDO_REDO_ACTION_GROUP_HEADER } from '@baserow/modules/database/utils/action'
export default (client) => {
const getRequestConfig = ({ undoRedoActionGroupId }) => {
const config = {}
if (undoRedoActionGroupId != null) {
config.headers = {
[UNDO_REDO_ACTION_GROUP_HEADER]: undoRedoActionGroupId,
}
return config
}
}
return {
fetchAll(tableId) {
return client.get(`/database/fields/table/${tableId}/`)
},
create(tableId, values, undoRedoActionGroupId = null) {
const config = {}
if (undoRedoActionGroupId != null) {
config.headers = {
[UNDO_REDO_ACTION_GROUP_HEADER]: undoRedoActionGroupId,
}
}
const config = getRequestConfig({ undoRedoActionGroupId })
return client.post(`/database/fields/table/${tableId}/`, values, config)
},
get(fieldId) {
@ -39,5 +43,17 @@ export default (client) => {
delete(fieldId) {
return client.delete(`/database/fields/${fieldId}/`)
},
asyncDuplicate(
fieldId,
duplicateData = false,
undoRedoActionGroupId = null
) {
const config = getRequestConfig({ undoRedoActionGroupId })
return client.post(
`/database/fields/${fieldId}/duplicate/async/`,
{ duplicate_data: duplicateData },
config
)
},
}
}

View file

@ -39,7 +39,7 @@ const mockedFields = {
table_id: 42,
type: 'link_row',
link_row_related_field: 270,
link_row_table: 43,
link_row_table_id: 43,
},
number: {
id: 4,