1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-06 22:08:52 +00:00

Resolve "Duplicate view"

This commit is contained in:
Jrmi 2022-06-29 13:05:38 +00:00
parent 5143374025
commit 745ccca6d6
21 changed files with 628 additions and 66 deletions
backend
changelog.md
premium/backend
src/baserow_premium/views
tests/baserow_premium/views
web-frontend/modules/database
components/view
locales
services
store

View file

@ -5,6 +5,7 @@ from baserow.contrib.database.views.registries import view_type_registry
from .views import (
ViewsView,
ViewView,
DuplicateViewView,
OrderViewsView,
ViewFiltersView,
ViewFilterView,
@ -45,6 +46,11 @@ urlpatterns = view_type_registry.api_urls + [
name="decoration_item",
),
re_path(r"(?P<view_id>[0-9]+)/$", ViewView.as_view(), name="item"),
re_path(
r"(?P<view_id>[0-9]+)/duplicate/$",
DuplicateViewView.as_view(),
name="duplicate",
),
re_path(
r"(?P<view_id>[0-9]+)/filters/$", ViewFiltersView.as_view(), name="list_filters"
),

View file

@ -14,6 +14,7 @@ from baserow.contrib.database.views.actions import (
CreateViewActionType,
DeleteViewActionType,
OrderViewsActionType,
DuplicateViewActionType,
UpdateViewActionType,
CreateViewFilterActionType,
DeleteViewFilterActionType,
@ -519,6 +520,70 @@ class ViewView(APIView):
return Response(status=204)
class DuplicateViewView(APIView):
permission_classes = (IsAuthenticated,)
@extend_schema(
parameters=[
OpenApiParameter(
name="view_id",
location=OpenApiParameter.PATH,
type=OpenApiTypes.INT,
description="Duplicates the view related to the provided value.",
),
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
],
tags=["Database table views"],
operation_id="duplicate_database_table_view",
description=(
"Duplicates an existing view if the user has access to it. "
"When a view is duplicated everthing is copied except:"
"\n- The name is appended with the copy number. "
"Ex: `View Name` -> `View Name (2)` and `View (2)` -> `View (3)`"
"\n- If the original view is publicly shared, the new view will not be"
" shared anymore"
),
responses={
200: DiscriminatorCustomFieldsMappingSerializer(
view_type_registry, ViewSerializer
),
400: get_error_schema(
[
"ERROR_USER_NOT_IN_GROUP",
]
),
404: get_error_schema(["ERROR_VIEW_DOES_NOT_EXIST"]),
},
)
@transaction.atomic
@map_exceptions(
{
ViewDoesNotExist: ERROR_VIEW_DOES_NOT_EXIST,
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
}
)
def post(self, request, view_id):
"""Duplicates a view."""
view = ViewHandler().get_view(view_id).specific
view_type = view_type_registry.get_by_model(view)
with view_type.map_api_exceptions():
duplicate_view = action_type_registry.get_by_type(
DuplicateViewActionType
).do(user=request.user, original_view=view)
serializer = view_type_registry.get_serializer(
duplicate_view,
ViewSerializer,
filters=True,
sortings=True,
decorations=True,
)
return Response(serializer.data)
class OrderViewsView(APIView):
permission_classes = (IsAuthenticated,)

View file

@ -95,6 +95,7 @@ class DatabaseConfig(AppConfig):
from baserow.contrib.database.views.actions import (
CreateViewActionType,
DuplicateViewActionType,
DeleteViewActionType,
OrderViewsActionType,
UpdateViewActionType,
@ -112,6 +113,7 @@ class DatabaseConfig(AppConfig):
)
action_type_registry.register(CreateViewActionType())
action_type_registry.register(DuplicateViewActionType())
action_type_registry.register(DeleteViewActionType())
action_type_registry.register(OrderViewsActionType())
action_type_registry.register(UpdateViewActionType())

View file

@ -40,7 +40,7 @@ from baserow.contrib.database.table.models import Table
from baserow.contrib.database.views.handler import ViewHandler
from baserow.core.trash.exceptions import RelatedTableTrashedException
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import extract_allowed, set_allowed_attrs
from baserow.core.utils import extract_allowed, set_allowed_attrs, find_unused_name
from .dependencies.handler import FieldDependencyHandler
from .dependencies.update_collector import FieldUpdateCollector
from .exceptions import (
@ -755,64 +755,21 @@ class FieldHandler:
max_field_name_length = Field.get_max_name_length()
# If the field_name_to_try is longer than the maximally allowed
# field name length the name needs to be truncated.
field_names_to_try = [
item[0:max_field_name_length] for item in field_names_to_try
]
# Check if any of the names to try are available by finding any existing field
# names with the same name.
taken_field_names = set(
Field.objects.exclude(id__in=field_ids_to_ignore)
.filter(table=table, name__in=field_names_to_try)
.values("name")
.distinct()
.values_list("name", flat=True)
)
# If there are more names to try than the ones used in the table then there must
# be one which isn't used.
if len(set(field_names_to_try)) > len(taken_field_names):
# Loop over to ensure we maintain the ordering provided by
# field_names_to_try, so we always return the first available name and
# not any.
for field_name in field_names_to_try:
if field_name not in taken_field_names:
return field_name
# None of the names in the param list are available, now using the last one lets
# append a number to the name until we find a free one.
original_field_name = field_names_to_try[-1]
# Lookup any existing field names. This way we can skip these and ensure our
# new field has a unique name.
existing_field_name_collisions = set(
existing_field_name_collisions = (
Field.objects.exclude(id__in=field_ids_to_ignore)
.filter(table=table)
.order_by("name")
.distinct()
.values_list("name", flat=True)
)
i = 2
while True:
suffix_to_append = f" {i}"
suffix_length = len(suffix_to_append)
length_of_original_field_name_plus_suffix = (
len(original_field_name) + suffix_length
)
# At this point we know, that the original_field_name can only
# be maximally the length of max_field_name_length. Therefore
# if the length_of_original_field_name_plus_suffix is longer
# we can further truncate the field_name by the length of the
# suffix.
if length_of_original_field_name_plus_suffix > max_field_name_length:
field_name = f"{original_field_name[:-suffix_length]}{suffix_to_append}"
else:
field_name = f"{original_field_name}{suffix_to_append}"
i += 1
if field_name not in existing_field_name_collisions:
return field_name
return find_unused_name(
field_names_to_try,
existing_field_name_collisions,
max_length=max_field_name_length,
)
def restore_field(
self,

View file

@ -754,6 +754,51 @@ class CreateViewActionType(ActionType):
TrashHandler.restore_item(user, "view", params.view_id)
class DuplicateViewActionType(ActionType):
type = "duplicate_view"
@dataclasses.dataclass
class Params:
view_id: int
@classmethod
def do(cls, user: AbstractUser, original_view: View) -> View:
"""
Duplicate an existing view.
Undoing this action deletes the new view.
Redoing this action restores the view.
:param user: The user creating the view.
:param original_view: The view to duplicate.
"""
view = ViewHandler().duplicate_view(
user,
original_view,
)
cls.register_action(
user=user,
params=cls.Params(view.id),
scope=cls.scope(original_view.table.id),
)
return view
@classmethod
def scope(cls, table_id: int) -> ActionScopeStr:
return TableActionScopeType.value(table_id)
@classmethod
def undo(cls, user: AbstractUser, params: Params, action_to_undo: Action):
ViewHandler().delete_view_by_id(user, params.view_id)
@classmethod
def redo(cls, user: AbstractUser, params: Params, action_to_redo: Action):
TrashHandler.restore_item(user, "view", params.view_id)
class DeleteViewActionType(ActionType):
type = "delete_view"

View file

@ -1,6 +1,8 @@
import re
from collections import defaultdict
from dataclasses import dataclass
from copy import deepcopy
from io import BytesIO
from typing import (
Dict,
Any,
@ -12,6 +14,7 @@ from typing import (
Type,
Union,
)
from zipfile import ZIP_DEFLATED, ZipFile
import jwt
@ -24,6 +27,7 @@ from django.core.cache import cache
from django.db import models as django_models
from django.db.models import F, Count
from django.db.models.query import QuerySet
from django.core.files.storage import default_storage
from baserow.contrib.database.fields.exceptions import FieldNotInTable
from baserow.contrib.database.fields.field_filters import FilterBuilder
@ -38,6 +42,7 @@ from baserow.core.utils import (
extract_allowed,
set_allowed_attrs,
get_model_reference_field_name,
find_unused_name,
)
from .exceptions import (
ViewDoesNotExist,
@ -84,10 +89,12 @@ from .signals import (
)
from .validators import EMPTY_VALUES
FieldOptionsDict = Dict[int, Dict[str, Any]]
ending_number_regex = re.compile(r"(.+) (\d+)$")
class ViewHandler:
PUBLIC_VIEW_TOKEN_ALGORITHM = "HS256" # nosec
@ -203,6 +210,80 @@ class ViewHandler:
return instance
def duplicate_view(self, user: AbstractUser, original_view: View) -> View:
"""
Duplicates the given view to create a new one. The name is appended with the
copy number and if the original view is publicly shared, the created view
will not be shared anymore. The new view will be created just after the original
view.
:param user: The user whose ask for the duplication.
:param original_view: The original view to be duplicated.
:return: The created view instance.
"""
group = original_view.table.database.group
group.has_user(user, raise_error=True)
view_type = view_type_registry.get_by_model(original_view)
storage = default_storage
fields = original_view.table.field_set.all()
files_buffer = BytesIO()
# Use export/import to duplicate the view easily
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
serialized = view_type.export_serialized(original_view, files_zip, storage)
existing_view_names = View.objects.filter(
table_id=original_view.table.id
).values_list("name", flat=True)
# Change the name of the view
name = serialized["name"]
match = ending_number_regex.match(name)
if match:
name, _ = match.groups()
serialized["name"] = find_unused_name(
[name], existing_view_names, max_length=255
)
# The new view must not be publicly shared
if "public" in serialized:
serialized["public"] = False
id_mapping = {
"database_fields": {field.id: field.id for field in fields},
"database_field_select_options": {},
}
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
duplicated_view = view_type.import_serialized(
original_view.table, serialized, id_mapping, files_zip, storage
)
queryset = View.objects.filter(table_id=original_view.table.id)
view_ids = queryset.values_list("id", flat=True)
ordered_ids = []
for view_id in view_ids:
if view_id != duplicated_view.id:
ordered_ids.append(view_id)
if view_id == original_view.id:
ordered_ids.append(duplicated_view.id)
View.order_objects(queryset, ordered_ids)
duplicated_view.refresh_from_db()
view_created.send(
self, view=duplicated_view, user=user, type_name=view_type.type
)
views_reordered.send(
self, table=original_view.table, order=ordered_ids, user=None
)
return duplicated_view
def update_view(
self, user: AbstractUser, view: View, **data: Dict[str, Any]
) -> View:

View file

@ -76,6 +76,7 @@ class GridViewType(ViewType):
"""
serialized = super().export_serialized(grid, files_zip, storage)
serialized["row_identifier_type"] = grid.row_identifier_type
serialized_field_options = []
for field_option in grid.get_field_options():
@ -329,6 +330,10 @@ class GalleryViewType(ViewType):
"""
serialized = super().export_serialized(gallery, files_zip, storage)
if gallery.card_cover_image_field:
serialized["card_cover_image_field_id"] = gallery.card_cover_image_field.id
serialized_field_options = []
for field_option in gallery.get_field_options():
serialized_field_options.append(
@ -351,7 +356,14 @@ class GalleryViewType(ViewType):
"""
serialized_copy = serialized_values.copy()
if serialized_copy.get("card_cover_image_field_id", None):
serialized_copy["card_cover_image_field_id"] = id_mapping[
"database_fields"
][serialized_copy["card_cover_image_field_id"]]
field_options = serialized_copy.pop("field_options")
gallery_view = super().import_serialized(
table, serialized_copy, id_mapping, files_zip, storage
)

View file

@ -335,6 +335,68 @@ def remove_invalid_surrogate_characters(content: bytes) -> str:
return re.sub(r"\\u(d|D)([a-z|A-Z|0-9]{3})", "", content.decode("utf-8", "ignore"))
def find_unused_name(
variants_to_try: Iterable[str],
existing_names: Iterable[str],
max_length: int = None,
suffix: str = " {0}",
):
"""
Finds an unused name among the existing names. If no names in the provided
variants_to_try list are available then the last name in that list will
have a number appended which ensures it is an available unique name.
Respects the maximally allowed name length. In case the variants_to_try
are longer than that, they will get truncated to the maximally allowed length.
:param variants_to_try: An iterable of name variant we want to try.
:param existing_names: An iterable of all pre existing values.
:parm max_length: Set this value if you have a length limit to the new name.
:param suffix: The suffix you want to append to the name to avoid
duplicate. The string is going to be formated with a number.
:return: The first available unused name.
"""
existing_names_set = set(existing_names)
if max_length is not None:
variants_to_try = [item[0:max_length] for item in variants_to_try]
remaining_names = set(variants_to_try) - existing_names_set
# Some variants to try remain, let's return the first one
if remaining_names:
# Loop over to ensure we maintain the ordering provided by
# variant_to_try, so we always return the first available name and
# not any.
for name in variants_to_try:
if name in remaining_names:
return name
# None of the names in the param list are available, now using the last one lets
# append a number to the name until we find a free one.
original_name = variants_to_try[-1]
i = 2
while True:
suffix_to_append = suffix.format(i)
suffix_length = len(suffix_to_append)
length_of_original_name_with_suffix = len(original_name) + suffix_length
# At this point we know, that the original_name can only
# be maximally the length of max_length. Therefore
# if the length_of_original_name_with_suffix is longer
# we can further truncate the name by the length of the
# suffix.
if max_length is not None and length_of_original_name_with_suffix > max_length:
name = f"{original_name[:-suffix_length]}{suffix_to_append}"
else:
name = f"{original_name}{suffix_to_append}"
if name not in existing_names_set:
return name
i += 1
def grouper(n: int, iterable: Iterable):
"""
Groups the iterable by `n` per chunk and yields it.

View file

@ -269,6 +269,7 @@ def test_to_baserow_database_export():
"type": "grid",
"name": "Grid",
"order": 1,
"row_identifier_type": "id",
"filter_type": "AND",
"filters_disabled": False,
"filters": [],

View file

@ -185,6 +185,70 @@ def test_delete_view(api_client, data_fixture):
assert GridView.objects.all().count() == 1
@pytest.mark.django_db
def test_duplicate_views(api_client, data_fixture):
user, token = data_fixture.create_user_and_token(
email="test@test.nl", password="password", first_name="Test1"
)
table_1 = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table_1)
table_2 = data_fixture.create_database_table()
view_1 = data_fixture.create_grid_view(table=table_1, order=1)
view_2 = data_fixture.create_grid_view(table=table_2, order=2)
view_3 = data_fixture.create_grid_view(table=table_1, order=3)
field_option = data_fixture.create_grid_view_field_option(
grid_view=view_1,
field=field,
aggregation_type="whatever",
aggregation_raw_type="empty",
)
view_filter = data_fixture.create_view_filter(
view=view_1, field=field, value="test", type="equal"
)
view_sort = data_fixture.create_view_sort(view=view_1, field=field, order="ASC")
view_decoration = data_fixture.create_view_decoration(
view=view_1,
value_provider_conf={"config": 12},
)
response = api_client.post(
reverse("api:database:views:duplicate", kwargs={"view_id": view_2.id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
response = api_client.post(
reverse("api:database:views:duplicate", kwargs={"view_id": 999999}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_VIEW_DOES_NOT_EXIST"
assert View.objects.count() == 3
response = api_client.post(
reverse("api:database:views:duplicate", kwargs={"view_id": view_1.id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert View.objects.count() == 4
assert response_json["id"] != view_1.id
assert response_json["order"] == view_1.order + 1
assert "sortings" in response_json
assert "filters" in response_json
assert "decorations" in response_json
@pytest.mark.django_db
def test_order_views(api_client, data_fixture):
user, token = data_fixture.create_user_and_token(

View file

@ -0,0 +1,45 @@
import pytest
from baserow.contrib.database.views.actions import DuplicateViewActionType
from baserow.contrib.database.action.scopes import TableActionScopeType
from baserow.contrib.database.views.models import View
from baserow.core.action.handler import ActionHandler
from baserow.core.action.registries import action_type_registry
@pytest.mark.django_db
def test_can_undo_duplicate_view(data_fixture):
session_id = "session-id"
user = data_fixture.create_user(session_id=session_id)
table = data_fixture.create_database_table(user)
grid_view = data_fixture.create_grid_view(table=table)
new_view = action_type_registry.get_by_type(DuplicateViewActionType).do(
user, grid_view
)
assert View.objects.count() == 2
ActionHandler.undo(user, [TableActionScopeType.value(table.id)], session_id)
assert View.objects.count() == 1
@pytest.mark.django_db
def test_can_undo_redo_create_view(data_fixture):
session_id = "session-id"
user = data_fixture.create_user(session_id=session_id)
table = data_fixture.create_database_table(user)
grid_view = data_fixture.create_grid_view(table=table)
action_type_registry.get_by_type(DuplicateViewActionType).do(user, grid_view)
assert View.objects.count() == 2
ActionHandler.undo(user, [TableActionScopeType.value(table.id)], session_id)
assert View.objects.count() == 1
ActionHandler.redo(user, [TableActionScopeType.value(table.id)], session_id)
assert View.objects.count() == 2

View file

@ -376,6 +376,65 @@ def test_delete_form_view(send_mock, data_fixture):
assert send_mock.call_args[1]["user"].id == user.id
@pytest.mark.django_db
@patch("baserow.contrib.database.views.signals.view_created.send")
@patch("baserow.contrib.database.views.signals.views_reordered.send")
def test_duplicate_views(reordered_mock, created_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table)
grid = data_fixture.create_public_password_protected_grid_view(table=table, order=1)
# Add another view to challenge the insertion position of the duplicate
form = data_fixture.create_form_view(table=table, order=2)
field_option = data_fixture.create_grid_view_field_option(
grid_view=grid,
field=field,
aggregation_type="whatever",
aggregation_raw_type="empty",
)
view_filter = data_fixture.create_view_filter(
view=grid, field=field, value="test", type="equal"
)
view_sort = data_fixture.create_view_sort(view=grid, field=field, order="ASC")
view_decoration = data_fixture.create_view_decoration(
view=grid,
value_provider_conf={"config": 12},
)
handler = ViewHandler()
with pytest.raises(UserNotInGroup):
handler.duplicate_view(user=user_2, original_view=grid)
new_view = handler.duplicate_view(user=user, original_view=grid)
created_mock.assert_called_once()
assert created_mock.call_args[1]["view"].id == new_view.id
assert created_mock.call_args[1]["user"].id == user.id
reordered_mock.assert_called_once()
assert reordered_mock.call_args[1]["order"] == [grid.id, new_view.id, form.id]
grid.refresh_from_db()
assert new_view.name == grid.name + " 2"
assert new_view.id != grid.id
assert new_view.order == grid.order + 1
assert new_view.public is False
assert new_view.viewfilter_set.all().first().value == view_filter.value
assert new_view.viewsort_set.all().first().order == view_sort.order
assert (
new_view.viewdecoration_set.all()[0].value_provider_conf
== view_decoration.value_provider_conf
)
new_view2 = handler.duplicate_view(user=user, original_view=new_view)
assert new_view2.name == grid.name + " 3"
@pytest.mark.django_db
@patch("baserow.contrib.database.views.signals.views_reordered.send")
def test_order_views(send_mock, data_fixture):

View file

@ -19,7 +19,11 @@ from baserow.contrib.database.fields.handler import FieldHandler
@pytest.mark.django_db
def test_import_export_grid_view(data_fixture):
grid_view = data_fixture.create_grid_view(
name="Test", order=1, filter_type="AND", filters_disabled=False
name="Test",
order=1,
filter_type="AND",
filters_disabled=False,
row_identifier_type="count",
)
field = data_fixture.create_text_field(table=grid_view.table)
imported_field = data_fixture.create_text_field(table=grid_view.table)
@ -52,6 +56,7 @@ def test_import_export_grid_view(data_fixture):
assert grid_view.order == imported_grid_view.order
assert grid_view.filter_type == imported_grid_view.filter_type
assert grid_view.filters_disabled == imported_grid_view.filters_disabled
assert grid_view.row_identifier_type == imported_grid_view.row_identifier_type
assert imported_grid_view.viewfilter_set.all().count() == 1
assert imported_grid_view.viewsort_set.all().count() == 1
@ -148,7 +153,10 @@ def test_import_export_gallery_view(data_fixture, tmpdir):
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
table = data_fixture.create_database_table(user=user)
gallery_view = data_fixture.create_gallery_view(table=table)
file_field = data_fixture.create_file_field(table=table)
gallery_view = data_fixture.create_gallery_view(
table=table, card_cover_image_field=file_field
)
text_field = data_fixture.create_text_field(table=table)
field_option = data_fixture.create_gallery_view_field_option(
gallery_view, text_field, order=1
@ -166,14 +174,21 @@ def test_import_export_gallery_view(data_fixture, tmpdir):
assert serialized["type"] == "gallery"
assert serialized["name"] == gallery_view.name
assert serialized["order"] == 0
assert len(serialized["field_options"]) == 1
assert serialized["card_cover_image_field_id"] == file_field.id
assert len(serialized["field_options"]) == 2
assert serialized["field_options"][0]["id"] == field_option.id
assert serialized["field_options"][0]["field_id"] == field_option.field_id
assert serialized["field_options"][0]["hidden"] is True
assert serialized["field_options"][0]["order"] == 1
imported_single_select_field = data_fixture.create_text_field(table=table)
id_mapping = {"database_fields": {text_field.id: imported_single_select_field.id}}
imported_file_field = data_fixture.create_file_field(table=table)
id_mapping = {
"database_fields": {
text_field.id: imported_single_select_field.id,
file_field.id: imported_file_field.id,
}
}
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
imported_gallery_view = gallery_view_type.import_serialized(
@ -183,8 +198,9 @@ def test_import_export_gallery_view(data_fixture, tmpdir):
assert gallery_view.id != imported_gallery_view.id
assert gallery_view.name == imported_gallery_view.name
assert gallery_view.order == imported_gallery_view.order
assert imported_gallery_view.card_cover_image_field.id == imported_file_field.id
imported_field_options = imported_gallery_view.get_field_options()
assert len(imported_field_options) == 1
assert len(imported_field_options) == 2
imported_field_option = imported_field_options[0]
assert field_option.id != imported_field_option.id
assert field_option.hidden == imported_field_option.hidden

View file

@ -16,6 +16,7 @@ from baserow.core.utils import (
truncate_middle,
split_comma_separated_string,
remove_invalid_surrogate_characters,
find_unused_name,
grouper,
Progress,
ChildProgressBuilder,
@ -113,6 +114,88 @@ def test_remove_invalid_surrogate_characters():
assert remove_invalid_surrogate_characters(b"test\uD83Dtest") == "testtest"
def test_unused_names():
assert find_unused_name(["test"], ["foo", "bar", "baz"]) == "test"
assert find_unused_name(["test"], ["test", "field", "field 2"]) == "test 2"
assert find_unused_name(["test", "other"], ["test", "field", "field 2"]) == "other"
assert find_unused_name(["field"], ["test", "field", "field 2"]) == "field 3"
assert find_unused_name(["field"], [1, 2]) == "field"
assert (
find_unused_name(
["regex like field [0-9]"],
["regex like field [0-9]", "regex like field [0-9] 2"],
)
== "regex like field [0-9] 3"
)
# Try another suffix
assert (
find_unused_name(
["field"], ["field", "field 4" "field (1)", "field (2)"], suffix=" ({0})"
)
== "field (3)"
)
def test_unused_names_with_max_length():
max_name_length = 255
exactly_length_field_name = "x" * max_name_length
too_long_field_name = "x" * (max_name_length + 1)
# Make sure that the returned string does not exceed the max_name_length
assert (
len(
find_unused_name(
[exactly_length_field_name], [], max_length=max_name_length
)
)
<= max_name_length
)
assert (
len(
find_unused_name(
[f"{exactly_length_field_name} - test"], [], max_length=max_name_length
)
)
<= max_name_length
)
assert (
len(find_unused_name([too_long_field_name], [], max_length=max_name_length))
<= max_name_length
)
initial_name = (
"xIyV4w3J4J0Zzd5ZIz4eNPucQOa9tS25ULHw2SCr4RDZ9h2AvxYr5nlGRNQR2ir517B3SkZB"
"nw2eGnBJQAdX8A6QcSCmcbBAnG3BczFytJkHJK7cE6VsAS6tROTg7GOwSQsdImURRwEarrXo"
"lv9H4bylyJM0bDPkgB4H6apiugZ19X0C9Fw2ed125MJHoFgTZLbJRc6joNyJSOkGkmGhBuIq"
"RKipRYGzB4oiFKYPx5Xoc8KHTsLqVDQTWwwzhaR"
)
expected_name_1 = (
"xIyV4w3J4J0Zzd5ZIz4eNPucQOa9tS25ULHw2SCr4RDZ9h2AvxYr5nlGRNQR2ir517B3SkZB"
"nw2eGnBJQAdX8A6QcSCmcbBAnG3BczFytJkHJK7cE6VsAS6tROTg7GOwSQsdImURRwEarrXo"
"lv9H4bylyJM0bDPkgB4H6apiugZ19X0C9Fw2ed125MJHoFgTZLbJRc6joNyJSOkGkmGhBuIq"
"RKipRYGzB4oiFKYPx5Xoc8KHTsLqVDQTWwwzh 2"
)
expected_name_2 = (
"xIyV4w3J4J0Zzd5ZIz4eNPucQOa9tS25ULHw2SCr4RDZ9h2AvxYr5nlGRNQR2ir517B3SkZB"
"nw2eGnBJQAdX8A6QcSCmcbBAnG3BczFytJkHJK7cE6VsAS6tROTg7GOwSQsdImURRwEarrXo"
"lv9H4bylyJM0bDPkgB4H6apiugZ19X0C9Fw2ed125MJHoFgTZLbJRc6joNyJSOkGkmGhBuIq"
"RKipRYGzB4oiFKYPx5Xoc8KHTsLqVDQTWwwzh 3"
)
assert (
find_unused_name([initial_name], [initial_name], max_length=max_name_length)
== expected_name_1
)
assert (
find_unused_name(
[initial_name], [initial_name, expected_name_1], max_length=max_name_length
)
== expected_name_2
)
def test_grouper():
assert list(grouper(2, [1, 2, 3, 4, 5])) == [(1, 2), (3, 4), (5,)]

View file

@ -20,6 +20,7 @@ For example:
* Added multi-cell clearing via backspace key (delete on Mac).
* Added API exception registry that allows plugins to provide custom exception mappings for the REST API.
* Added formula round and int functions. [#891](https://gitlab.com/bramw/baserow/-/issues/891)
* Views can be duplicated. [#962](https://gitlab.com/bramw/baserow/-/issues/962)
### Bug Fixes

View file

@ -103,7 +103,11 @@ class KanbanViewType(ViewType):
"""
serialized = super().export_serialized(kanban, files_zip, storage)
serialized["single_select_field_id"] = kanban.single_select_field_id
if kanban.single_select_field_id:
serialized["single_select_field_id"] = kanban.single_select_field_id
if kanban.card_cover_image_field_id:
serialized["card_cover_image_field_id"] = kanban.card_cover_image_field_id
serialized_field_options = []
for field_option in kanban.get_field_options():
@ -132,9 +136,16 @@ class KanbanViewType(ViewType):
"""
serialized_copy = serialized_values.copy()
serialized_copy["single_select_field_id"] = id_mapping["database_fields"][
serialized_copy.pop("single_select_field_id")
]
if "single_select_field_id" in serialized_copy:
serialized_copy["single_select_field_id"] = id_mapping["database_fields"][
serialized_copy.pop("single_select_field_id")
]
if "card_cover_image_field_id" in serialized_copy:
serialized_copy["card_cover_image_field_id"] = id_mapping[
"database_fields"
][serialized_copy.pop("card_cover_image_field_id")]
field_options = serialized_copy.pop("field_options")
kanban_view = super().import_serialized(
table, serialized_copy, id_mapping, files_zip, storage

View file

@ -72,15 +72,16 @@ def test_import_export_kanban_view(premium_data_fixture, tmpdir):
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
table = premium_data_fixture.create_database_table(user=user)
file_field = premium_data_fixture.create_file_field(table=table)
kanban_view = premium_data_fixture.create_kanban_view(
table=table,
single_select_field=None,
table=table, single_select_field=None, card_cover_image_field=file_field
)
single_select_field = premium_data_fixture.create_single_select_field(table=table)
field_option = premium_data_fixture.create_kanban_view_field_option(
kanban_view=kanban_view, field=single_select_field, hidden=True, order=1
)
kanban_view.single_select_field = single_select_field
kanban_view.save()
files_buffer = BytesIO()
kanban_field_type = view_type_registry.get("kanban")
@ -95,7 +96,8 @@ def test_import_export_kanban_view(premium_data_fixture, tmpdir):
assert serialized["name"] == kanban_view.name
assert serialized["order"] == 0
assert serialized["single_select_field_id"] == single_select_field.id
assert len(serialized["field_options"]) == 1
assert serialized["card_cover_image_field_id"] == file_field.id
assert len(serialized["field_options"]) == 2
assert serialized["field_options"][0]["id"] == field_option.id
assert serialized["field_options"][0]["field_id"] == field_option.field_id
assert serialized["field_options"][0]["hidden"] is True
@ -104,9 +106,13 @@ def test_import_export_kanban_view(premium_data_fixture, tmpdir):
imported_single_select_field = premium_data_fixture.create_single_select_field(
table=table
)
imported_file_field = premium_data_fixture.create_file_field(table=table)
id_mapping = {
"database_fields": {single_select_field.id: imported_single_select_field.id}
"database_fields": {
single_select_field.id: imported_single_select_field.id,
file_field.id: imported_file_field.id,
}
}
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
@ -120,9 +126,14 @@ def test_import_export_kanban_view(premium_data_fixture, tmpdir):
assert (
kanban_view.single_select_field_id != imported_kanban_view.single_select_field
)
assert (
kanban_view.card_cover_image_field_id
!= imported_kanban_view.card_cover_image_field_id
)
assert imported_kanban_view.card_cover_image_field_id == imported_file_field.id
imported_field_options = imported_kanban_view.get_field_options()
assert len(imported_field_options) == 1
assert len(imported_field_options) == 2
imported_field_option = imported_field_options[0]
assert field_option.id != imported_field_option.id
assert imported_single_select_field.id == imported_field_option.field_id

View file

@ -7,6 +7,12 @@
{{ $t('viewContext.exportView') }}
</a>
</li>
<li>
<a @click="duplicateView()">
<i class="context__menu-icon fas fa-fw fa-clone"></i>
{{ $t('viewContext.duplicateView') }}
</a>
</li>
<li>
<a @click="openWebhookModal()">
<i class="context__menu-icon fas fa-fw fa-globe"></i>
@ -86,6 +92,29 @@ export default {
this.setLoading(this.view, false)
},
async duplicateView() {
this.setLoading(this.view, true)
let newView
try {
newView = await this.$store.dispatch('view/duplicate', this.view)
} catch (error) {
this.handleError(error, 'view')
}
this.$refs.context.hide()
this.setLoading(this.view, false)
// Redirect to the newly created view.
this.$nuxt.$router.push({
name: 'database-table',
params: {
databaseId: this.table.database_id,
tableId: this.table.id,
viewId: newView.id,
},
})
},
exportView() {
this.$refs.context.hide()
this.$refs.exportViewModal.show()

View file

@ -451,6 +451,7 @@
},
"viewContext": {
"exportView": "Export view",
"duplicateView": "Duplicate view",
"renameView": "Rename view",
"webhooks": "Webhooks",
"deleteView": "Delete view"

View file

@ -62,6 +62,9 @@ export default (client) => {
update(viewId, values) {
return client.patch(`/database/views/${viewId}/`, values)
},
duplicate(viewId) {
return client.post(`/database/views/${viewId}/duplicate/`)
},
order(tableId, order) {
return client.post(`/database/views/table/${tableId}/order/`, {
view_ids: order,

View file

@ -85,7 +85,7 @@ export const mutations = {
view._.loading = value
},
ADD_ITEM(state, item) {
state.items.push(item)
state.items = [...state.items, item].sort((a, b) => a.order - b.order)
},
UPDATE_ITEM(state, { id, values }) {
const index = state.items.findIndex((item) => item.id === id)
@ -328,6 +328,14 @@ export const actions = {
forceUpdate({ commit }, { view, values }) {
commit('UPDATE_ITEM', { id: view.id, values })
},
/**
* Duplicates an existing view.
*/
async duplicate({ commit, dispatch }, view) {
const { data } = await ViewService(this.$client).duplicate(view.id)
await dispatch('forceCreate', { data })
return data
},
/**
* Deletes an existing view with the provided id. A request to the server is first
* made and after that it will be deleted from the store.