1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-14 00:59:06 +00:00

Merge branch '508-prevent-fields-with-duplicate-names-in-the-same-table' into 'develop'

Resolve "Prevent fields with duplicate names in the same table"

Closes 

See merge request 
This commit is contained in:
Nigel Gott 2021-07-09 14:28:32 +00:00
commit c3af2a02b9
34 changed files with 1297 additions and 108 deletions

View file

@ -1,5 +1,6 @@
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
from baserow.contrib.database.fields.handler import RESERVED_BASEROW_FIELD_NAMES
ERROR_FIELD_DOES_NOT_EXIST = (
"ERROR_FIELD_DOES_NOT_EXIST",
@ -40,5 +41,21 @@ ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE = (
HTTP_400_BAD_REQUEST,
"The field type {e.field_type} is not compatible with the primary field.",
)
ERROR_MAX_FIELD_COUNT_EXCEEDED = "ERROR_MAX_FIELD_COUNT_EXCEEDED"
ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS = (
"ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS",
HTTP_400_BAD_REQUEST,
"You cannot have two fields with the same name in the same table, please choose a "
"unique name for each field.",
)
ERROR_RESERVED_BASEROW_FIELD_NAME = (
"ERROR_RESERVED_BASEROW_FIELD_NAME",
HTTP_400_BAD_REQUEST,
f"The field names {','.join(RESERVED_BASEROW_FIELD_NAMES)} are reserved and cannot "
f"and cannot be used for a user created field, please choose different field name.",
)
ERROR_INVALID_BASEROW_FIELD_NAME = (
"ERROR_INVALID_BASEROW_FIELD_NAME",
HTTP_400_BAD_REQUEST,
"Fields must not be blank or only consist of whitespace.",
)

View file

@ -26,12 +26,18 @@ from baserow.contrib.database.api.fields.errors import (
ERROR_CANNOT_CHANGE_FIELD_TYPE,
ERROR_FIELD_DOES_NOT_EXIST,
ERROR_MAX_FIELD_COUNT_EXCEEDED,
ERROR_RESERVED_BASEROW_FIELD_NAME,
ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS,
ERROR_INVALID_BASEROW_FIELD_NAME,
)
from baserow.contrib.database.fields.exceptions import (
CannotDeletePrimaryField,
CannotChangeFieldType,
FieldDoesNotExist,
MaxFieldLimitExceeded,
ReservedBaserowFieldNameException,
FieldWithSameNameAlreadyExists,
InvalidBaserowFieldName,
)
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.fields.handler import FieldHandler
@ -140,6 +146,9 @@ class FieldsView(APIView):
"ERROR_USER_NOT_IN_GROUP",
"ERROR_REQUEST_BODY_VALIDATION",
"ERROR_MAX_FIELD_COUNT_EXCEEDED",
"ERROR_RESERVED_BASEROW_FIELD_NAME",
"ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS",
"ERROR_INVALID_BASEROW_FIELD_NAME",
]
),
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
@ -156,6 +165,9 @@ class FieldsView(APIView):
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
MaxFieldLimitExceeded: ERROR_MAX_FIELD_COUNT_EXCEEDED,
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
FieldWithSameNameAlreadyExists: ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS,
ReservedBaserowFieldNameException: ERROR_RESERVED_BASEROW_FIELD_NAME,
InvalidBaserowFieldName: ERROR_INVALID_BASEROW_FIELD_NAME,
}
)
def post(self, request, data, table_id):
@ -253,6 +265,9 @@ class FieldView(APIView):
"ERROR_USER_NOT_IN_GROUP",
"ERROR_CANNOT_CHANGE_FIELD_TYPE",
"ERROR_REQUEST_BODY_VALIDATION",
"ERROR_RESERVED_BASEROW_FIELD_NAME",
"ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS",
"ERROR_INVALID_BASEROW_FIELD_NAME",
]
),
404: get_error_schema(["ERROR_FIELD_DOES_NOT_EXIST"]),
@ -264,6 +279,9 @@ class FieldView(APIView):
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
CannotChangeFieldType: ERROR_CANNOT_CHANGE_FIELD_TYPE,
FieldWithSameNameAlreadyExists: ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS,
ReservedBaserowFieldNameException: ERROR_RESERVED_BASEROW_FIELD_NAME,
InvalidBaserowFieldName: ERROR_INVALID_BASEROW_FIELD_NAME,
}
)
def patch(self, request, field_id):

View file

@ -2,7 +2,6 @@ from django.conf import settings
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
ERROR_TABLE_DOES_NOT_EXIST = (
"ERROR_TABLE_DOES_NOT_EXIST",
HTTP_404_NOT_FOUND,
@ -29,3 +28,9 @@ ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED = (
f"The initial table data limit has been exceeded. You can provide a maximum of "
f"{settings.INITIAL_TABLE_DATA_LIMIT} rows.",
)
ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES = (
"ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES",
HTTP_400_BAD_REQUEST,
"Field names must be unique in Baserow per table however the initial table data "
"provided contains duplicate field names, please make them unique and try again.",
)

View file

@ -1,41 +1,48 @@
from django.db import transaction
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from drf_spectacular.utils import extend_schema
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
from drf_spectacular.utils import extend_schema
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from baserow.api.applications.errors import ERROR_APPLICATION_DOES_NOT_EXIST
from baserow.api.decorators import validate_body, map_exceptions
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
from baserow.api.schemas import get_error_schema
from baserow.api.applications.errors import ERROR_APPLICATION_DOES_NOT_EXIST
from baserow.core.exceptions import UserNotInGroup, ApplicationDoesNotExist
from baserow.core.handler import CoreHandler
from baserow.contrib.database.api.fields.errors import ERROR_MAX_FIELD_COUNT_EXCEEDED
from baserow.contrib.database.fields.exceptions import MaxFieldLimitExceeded
from baserow.contrib.database.api.fields.errors import (
ERROR_MAX_FIELD_COUNT_EXCEEDED,
ERROR_RESERVED_BASEROW_FIELD_NAME,
ERROR_INVALID_BASEROW_FIELD_NAME,
)
from baserow.contrib.database.fields.exceptions import (
MaxFieldLimitExceeded,
ReservedBaserowFieldNameException,
InvalidBaserowFieldName,
)
from baserow.contrib.database.models import Database
from baserow.contrib.database.table.models import Table
from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.table.exceptions import (
TableDoesNotExist,
TableNotInDatabase,
InvalidInitialTableData,
InitialTableDataLimitExceeded,
InitialTableDataDuplicateName,
)
from .serializers import (
TableSerializer,
TableCreateSerializer,
TableUpdateSerializer,
OrderTablesSerializer,
)
from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.table.models import Table
from baserow.core.exceptions import UserNotInGroup, ApplicationDoesNotExist
from baserow.core.handler import CoreHandler
from .errors import (
ERROR_TABLE_DOES_NOT_EXIST,
ERROR_TABLE_NOT_IN_DATABASE,
ERROR_INVALID_INITIAL_TABLE_DATA,
ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED,
ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES,
)
from .serializers import (
TableSerializer,
TableCreateSerializer,
TableUpdateSerializer,
OrderTablesSerializer,
)
@ -111,6 +118,9 @@ class TablesView(APIView):
"ERROR_REQUEST_BODY_VALIDATION",
"ERROR_INVALID_INITIAL_TABLE_DATA",
"ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED",
"ERROR_RESERVED_BASEROW_FIELD_NAME",
"ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES",
"ERROR_INVALID_BASEROW_FIELD_NAME",
]
),
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
@ -124,6 +134,9 @@ class TablesView(APIView):
InvalidInitialTableData: ERROR_INVALID_INITIAL_TABLE_DATA,
InitialTableDataLimitExceeded: ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED,
MaxFieldLimitExceeded: ERROR_MAX_FIELD_COUNT_EXCEEDED,
InitialTableDataDuplicateName: ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES,
ReservedBaserowFieldNameException: ERROR_RESERVED_BASEROW_FIELD_NAME,
InvalidBaserowFieldName: ERROR_INVALID_BASEROW_FIELD_NAME,
}
)
@validate_body(TableCreateSerializer)

View file

@ -80,3 +80,18 @@ class IncompatiblePrimaryFieldTypeError(Exception):
def __init__(self, field_type=None, *args, **kwargs):
self.field_type = field_type
super().__init__(*args, **kwargs)
class FieldWithSameNameAlreadyExists(Exception):
"""Raised when a field is created or updated with a name that matches an
existing fields name in the same table."""
class ReservedBaserowFieldNameException(Exception):
"""Raised when a field is created or updated with a name that matches a reserved
Baserow field name."""
class InvalidBaserowFieldName(Exception):
"""Raised when a field name is not provided or an invalid blank field name is
provided."""

View file

@ -823,12 +823,22 @@ class LinkRowFieldType(FieldType):
if field.link_row_related_field:
return
field.link_row_related_field = FieldHandler().create_field(
handler = FieldHandler()
# First just try the tables name, so if say the Client table is linking to the
# Address table, the new field in the Address table will just be called 'Client'
# . However say we then add another link from the Client to Address table with
# a field name of "Bank Address", the new field in the Address table will be
# called 'Client - Bank Address'.
related_field_name = handler.find_next_unused_field_name(
field.link_row_table,
[f"{field.table.name}", f"{field.table.name} - " f"{field.name}"],
)
field.link_row_related_field = handler.create_field(
user=user,
table=field.link_row_table,
type_name=self.type,
do_schema_change=False,
name=field.table.name,
name=related_field_name,
link_row_table=field.table,
link_row_related_field=field,
link_row_relation_id=field.link_row_relation_id,

View file

@ -1,5 +1,7 @@
import logging
import re
from copy import deepcopy
from typing import Dict, Any, Optional, List
from django.conf import settings
from django.db import connections
@ -16,13 +18,65 @@ from .exceptions import (
FieldDoesNotExist,
IncompatiblePrimaryFieldTypeError,
MaxFieldLimitExceeded,
FieldWithSameNameAlreadyExists,
ReservedBaserowFieldNameException,
InvalidBaserowFieldName,
)
from .models import Field, SelectOption
from .registries import field_type_registry, field_converter_registry
from .signals import field_created, field_updated, field_deleted
from ..table.models import Table
logger = logging.getLogger(__name__)
# Please keep in sync with the web-frontend version of this constant found in
# web-frontend/modules/database/utils/constants.js
RESERVED_BASEROW_FIELD_NAMES = {"id", "order"}
def _validate_field_name(
field_values: Dict[str, Any],
table: Table,
existing_field: Optional[Field] = None,
raise_if_name_missing: bool = True,
):
"""
Raises various exceptions if the provided field name is invalid.
:param field_values: The dictionary which should contain a name key.
:param table: The table to check that this field name is valid for.
:param existing_field: If this is name change for an existing field then the
existing field instance must be provided here.
:param raise_if_name_missing: When True raises a InvalidBaserowFieldName if the
name key is not in field_values. When False does not return and immediately
returns if the key is missing.
:raises InvalidBaserowFieldName: If "name" is
:return:
"""
if "name" not in field_values:
if raise_if_name_missing:
raise InvalidBaserowFieldName()
else:
return
name = field_values["name"]
if existing_field is not None and existing_field.name == name:
return
if name.strip() == "":
raise InvalidBaserowFieldName()
if Field.objects.filter(table=table, name=name).exists():
raise FieldWithSameNameAlreadyExists(
f"A field already exists for table '{table.name}' with the name '{name}'."
)
if name in RESERVED_BASEROW_FIELD_NAMES:
raise ReservedBaserowFieldNameException(
f"A field named {name} cannot be created as it already exists as a "
f"reserved Baserow field name."
)
class FieldHandler:
def get_field(self, field_id, field_model=None, base_queryset=None):
@ -115,6 +169,8 @@ class FieldHandler:
f"Fields count exceeds the limit of {settings.MAX_FIELD_LIMIT}"
)
_validate_field_name(field_values, table)
field_values = field_type.prepare_values(field_values, user)
before = field_type.before_create(
table, primary, field_values, last_order, user
@ -193,6 +249,10 @@ class FieldHandler:
allowed_fields = ["name"] + field_type.allowed_fields
field_values = extract_allowed(kwargs, allowed_fields)
_validate_field_name(
field_values, field.table, field, raise_if_name_missing=False
)
field_values = field_type.prepare_values(field_values, user)
before = field_type.before_update(old_field, field_values, user)
@ -403,3 +463,65 @@ class FieldHandler:
if len(to_create) > 0:
SelectOption.objects.bulk_create(to_create)
# noinspection PyMethodMayBeStatic
def find_next_unused_field_name(
self,
table,
field_names_to_try: List[str],
field_ids_to_ignore: Optional[List[int]] = None,
):
"""
Finds a unused field name in the provided table. If no names in the provided
field_names_to_try list are available then the last field name in that list will
have a number appended which ensures it is an available unique field name.
:param table: The table whose fields to search.
:param field_names_to_try: The field_names to try in order before starting to
append a number.
:param field_ids_to_ignore: A list of field id's to exclude from checking to see
if the field name clashes with.
:return: An available field name
"""
if field_ids_to_ignore is None:
field_ids_to_ignore = []
# Check if any of the names to try are available by finding any existing field
# names with the same name.
taken_field_names = set(
Field.objects.exclude(id__in=field_ids_to_ignore)
.filter(table=table, name__in=field_names_to_try)
.values("name")
.distinct()
.values_list("name", flat=True)
)
# If there are more names to try than the ones used in the table then there must
# be one which isn't used.
if len(set(field_names_to_try)) > len(taken_field_names):
# Loop over to ensure we maintain the ordering provided by
# field_names_to_try, so we always return the first available name and
# not any.
for field_name in field_names_to_try:
if field_name not in taken_field_names:
return field_name
# None of the names in the param list are available, now using the last one lets
# append a number to the name until we find a free one.
original_field_name = field_names_to_try[-1]
# Lookup any existing fields which could potentially collide with our new
# field name. This way we can skip these and ensure our new field has a
# unique name.
existing_field_name_collisions = set(
Field.objects.exclude(id__in=field_ids_to_ignore)
.filter(table=table, name__regex=fr"^{re.escape(original_field_name)} \d+$")
.order_by("name")
.distinct()
.values_list("name", flat=True)
)
i = 2
while True:
field_name = f"{original_field_name} {i}"
i += 1
if field_name not in existing_field_name_collisions:
return field_name

View file

@ -59,6 +59,7 @@ class Field(
table = models.ForeignKey("database.Table", on_delete=models.CASCADE)
order = models.PositiveIntegerField(help_text="Lowest first.")
name = models.CharField(max_length=255)
old_name = models.CharField(max_length=255, null=True, blank=True)
primary = models.BooleanField(
default=False,
help_text="Indicates if the field is a primary field. If `true` the field "

View file

@ -0,0 +1,150 @@
# Generated by Django 2.2.11 on 2021-06-14 09:08
import re
from django.db import migrations, models
from django.db.models import Count, F, Q
# noinspection PyPep8Naming,PyUnusedLocal
def forward(apps, schema_editor):
Table = apps.get_model("database", "Table")
Field = apps.get_model("database", "Field")
fix_fields_with_reserved_names(Field, Table)
fix_fields_with_duplicate_names(Field, Table)
def fix_fields_with_duplicate_names(Field, Table):
tables_with_duplicate_fields = (
Table.objects.annotate(duplicate_name_count=Count("field__name"))
.filter(Q(duplicate_name_count__gt=1))
.distinct()
.values_list("id", "field__name")
.order_by()
)
for table_id, name_to_fix in tables_with_duplicate_fields.iterator(chunk_size=2000):
# Rename duplicate fields to Dup, Dup_2, Dupe_3, Dupe_4
# We set the start index here to 1 so the first duplicate is not modified and
# instead left as Dup.
rename_non_unique_names_in_table(
Field, table_id, name_to_fix, 1, 2, name_to_fix
)
def fix_fields_with_reserved_names(Field, Table):
reserved_fields = {"id", "order", ""}
tables_with_reserved_fields = (
Table.objects.values("id", "field__name")
.filter(Q(field__name__in=reserved_fields))
.distinct()
.values_list("id", "field__name")
.order_by()
)
for table_id, name_to_fix in tables_with_reserved_fields.iterator(chunk_size=2000):
if name_to_fix == "":
# Rename blank fields to Field_1, Field_2, Field_3 etc
new_name_prefix = "Field"
next_name_number = 1
else:
# Rename other reserved fields to order_2, order_3 etc
next_name_number = 2
new_name_prefix = name_to_fix
rename_non_unique_names_in_table(
Field, table_id, name_to_fix, 0, next_name_number, new_name_prefix
)
def rename_non_unique_names_in_table(
Field, table_id, name_to_fix, start_index, next_name_number, new_name_prefix
):
"""
Given a table and a field name in that table this function will update all fields
in with that name in table to be unique. It does this by appending an _Number to
the field names where Number starts from next_name_number. This method will also
ensure that new duplicates will not be created if say an existing
FieldName_Number field exists.
:param Field: The Field model to use.
:param table_id: The table id to fix the fields for.
:param name_to_fix: The name used to find all the fields in the table to fix.
:param start_index: The starting index to start fixing fields from, so 0 would fix
all fields matching name_to_fix, 1 would not fix the first field (ordered by id)
but would fix all following fields etc.
:param next_name_number: The number to start appending onto the new field names
from. So the first fixed field will be called XXX_1 if next_name_number=1 (
and there isn't an existing XXX_1 field).
:param new_name_prefix: The name before the _Number to rename fixed fields to, so
usually this should be just the field name, but say you want to rename a
weird field name like " " to "Field_2" etc then set new_name_prefix to "Field".
"""
fields_to_fix = Field.objects.filter(table_id=table_id, name=name_to_fix).order_by(
"id"
)
escaped_name = re.escape(new_name_prefix)
existing_collisions = set(
Field.objects.filter(table_id=table_id, name__regex=fr"^{escaped_name}_\d+$")
.order_by("name")
.distinct()
.values_list("name", flat=True)
)
# Skip the field with the smallest ID as we want to leave the first one
# with the duplicate name unchanged and fix the following ones not to
# clash.
fields = []
for field in fields_to_fix[start_index:]:
new_name, next_name_number = find_next_unused_field_name(
new_name_prefix, next_name_number, existing_collisions
)
field.old_name = field.name
field.name = new_name
fields.append(field)
Field.objects.bulk_update(fields, ["name", "old_name"])
def find_next_unused_field_name(field_name, start_index, existing_collisions):
"""
Finds a unused field name in the provided table starting with field_name.
If field_name is not taken then it will be returned, if it is taken then the
next name appended with an _X where X is a positive integer which is free will
be returned.
:param existing_collisions: A set of existing field names to skip over when finding
the next free field name.
:param start_index: The number to start looking for fields from.
:param field_name: The field_name to find a unused name for.
:return: A free field name starting with field_name possibly followed by an
_X where X is a positive integer.
"""
original_field_name = field_name
i = start_index
while True:
field_name = f"{original_field_name}_{i}"
i += 1
if field_name not in existing_collisions:
break
return field_name, i
# noinspection PyPep8Naming,PyUnusedLocal
def reverse(apps, schema_editor):
Field = apps.get_model("database", "Field")
Field.objects.filter(old_name__isnull=False).update(name=F("old_name"))
class Migration(migrations.Migration):
dependencies = [
("database", "0032_trash"),
]
operations = [
migrations.AddField(
model_name="field",
name="old_name",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.RunPython(forward, reverse),
]

View file

@ -27,3 +27,9 @@ class InitialTableDataLimitExceeded(Exception):
Raised when the initial table data limit has been exceeded when creating a new
table.
"""
class InitialTableDataDuplicateName(Exception):
"""
Raised when the initial table data contains duplicate field names.
"""

View file

@ -1,25 +1,32 @@
from django.db import connections
from django.conf import settings
from django.db import connections
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import extract_allowed, set_allowed_attrs
from baserow.contrib.database.fields.models import TextField
from baserow.contrib.database.views.handler import ViewHandler
from baserow.contrib.database.views.view_types import GridViewType
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.exceptions import MaxFieldLimitExceeded
from baserow.contrib.database.fields.exceptions import (
MaxFieldLimitExceeded,
ReservedBaserowFieldNameException,
InvalidBaserowFieldName,
)
from baserow.contrib.database.fields.field_types import (
LongTextFieldType,
BooleanFieldType,
)
from .models import Table
from baserow.contrib.database.fields.handler import (
FieldHandler,
RESERVED_BASEROW_FIELD_NAMES,
)
from baserow.contrib.database.fields.models import TextField
from baserow.contrib.database.views.handler import ViewHandler
from baserow.contrib.database.views.view_types import GridViewType
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import extract_allowed, set_allowed_attrs
from .exceptions import (
TableDoesNotExist,
TableNotInDatabase,
InvalidInitialTableData,
InitialTableDataLimitExceeded,
InitialTableDataDuplicateName,
)
from .models import Table
from .signals import table_created, table_updated, table_deleted, tables_reordered
@ -164,6 +171,20 @@ class TableHandler:
for i in range(len(fields), largest_column_count):
fields.append(f"Field {i + 1}")
# Stripping whitespace from field names is already done by
# TableCreateSerializer however we repeat to ensure that non API usages of
# this method is consistent with api usage.
field_name_set = {name.strip() for name in fields}
if len(field_name_set) != len(fields):
raise InitialTableDataDuplicateName()
if len(field_name_set.intersection(RESERVED_BASEROW_FIELD_NAMES)) > 0:
raise ReservedBaserowFieldNameException()
if "" in field_name_set:
raise InvalidBaserowFieldName()
for row in data:
for i in range(len(row), largest_column_count):
row.append("")

View file

@ -3,6 +3,7 @@ from typing import Optional, Any, List
from django.conf import settings
from django.db import connections
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.fields.signals import field_restored
@ -85,6 +86,16 @@ class FieldTrashableItemType(TrashableItemType):
return trashed_item.name
def trashed_item_restored(self, trashed_item: Field, trash_entry: TrashEntry):
trashed_item.name = FieldHandler().find_next_unused_field_name(
trashed_item.table,
[trashed_item.name, f"{trashed_item.name} (Restored)"],
[trashed_item.id], # Ignore the field itself from the check.
)
# We need to set the specific field's name also so when the field_restored
# serializer switches to serializing the specific instance it picks up and uses
# the new name set here rather than the name currently in the DB.
trashed_item.specific.name = trashed_item.name
trashed_item.save()
field_restored.send(
self,
field=trashed_item,

View file

@ -1,3 +1,4 @@
import logging
from typing import Optional, Dict, Any
from django.conf import settings
@ -20,6 +21,7 @@ from baserow.core.trash.exceptions import (
)
from baserow.core.trash.registries import TrashableItemType, trash_item_type_registry
logger = logging.getLogger(__name__)
User = get_user_model()
@ -165,12 +167,15 @@ class TrashHandler:
"""
now = timezone.now()
cutoff = now - timezone.timedelta(
hours=settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED
)
TrashEntry.objects.filter(trashed_at__lte=cutoff).update(
hours = settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED
cutoff = now - timezone.timedelta(hours=hours)
updated_count = TrashEntry.objects.filter(trashed_at__lte=cutoff).update(
should_be_permanently_deleted=True
)
logger.info(
f"Successfully marked {updated_count} old trash items for deletion as they "
f"were older than {hours} hours."
)
@staticmethod
def empty(requesting_user: User, group_id: int, application_id: Optional[int]):
@ -193,6 +198,7 @@ class TrashHandler:
"""
trash_item_lookup_cache = {}
deleted_count = 0
for trash_entry in TrashEntry.objects.filter(
should_be_permanently_deleted=True
):
@ -213,6 +219,11 @@ class TrashHandler:
# to delete the entry as the item itself has been correctly deleted.
pass
trash_entry.delete()
deleted_count += 1
logger.info(
f"Successfully deleted {deleted_count} trash entries and their associated "
"trashed items."
)
@staticmethod
def permanently_delete(trashable_item):

View file

@ -1025,7 +1025,7 @@
{
"id": 60,
"type": "link_row",
"name": "Projects",
"name": "Projects - Project lead",
"order": 1,
"primary": false,
"link_row_table_id": 11,
@ -1034,7 +1034,7 @@
{
"id": 62,
"type": "link_row",
"name": "Projects",
"name": "Projects - Project team",
"order": 2,
"primary": false,
"link_row_table_id": 11,

View file

@ -226,6 +226,24 @@ def test_create_field(api_client, data_fixture):
assert response.status_code == HTTP_401_UNAUTHORIZED
assert response.json()["error"] == "ERROR_NO_PERMISSION_TO_TABLE"
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{"name": text.name, "type": "text"},
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS"
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{"name": "id", "type": "text"},
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_RESERVED_BASEROW_FIELD_NAME"
@pytest.mark.django_db
def test_get_field(api_client, data_fixture):
@ -282,6 +300,7 @@ def test_update_field(api_client, data_fixture):
table_2 = data_fixture.create_database_table(user=user_2)
text = data_fixture.create_text_field(table=table, primary=True)
text_2 = data_fixture.create_text_field(table=table_2)
existing_field = data_fixture.create_text_field(table=table, name="existing_field")
url = reverse("api:database:fields:item", kwargs={"field_id": text_2.id})
response = api_client.patch(
@ -398,6 +417,23 @@ def test_update_field(api_client, data_fixture):
assert "number_decimal_places" not in response_json
assert "number_negative" not in response_json
url = reverse("api:database:fields:item", kwargs={"field_id": text.id})
response = api_client.patch(
url, {"name": "id"}, format="json", HTTP_AUTHORIZATION=f"JWT {token}"
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_RESERVED_BASEROW_FIELD_NAME"
url = reverse("api:database:fields:item", kwargs={"field_id": text.id})
response = api_client.patch(
url,
{"name": existing_field.name},
format="json",
HTTP_AUTHORIZATION=f"JWT" f" {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_FIELD_WITH_SAME_NAME_ALREADY_EXISTS"
@pytest.mark.django_db
def test_delete_field(api_client, data_fixture):

View file

@ -256,11 +256,7 @@ def test_create_table_with_data(api_client, data_fixture):
'Falsea"""',
'a"a"a"a"a,',
"a",
"a",
"",
"/w. r/awr",
"",
"",
],
],
"first_row_header": True,
@ -278,16 +274,90 @@ def test_create_table_with_data(api_client, data_fixture):
assert text_fields[2].name == 'Falsea"""'
assert text_fields[3].name == 'a"a"a"a"a,'
assert text_fields[4].name == "a"
assert text_fields[5].name == "a"
assert text_fields[6].name == ""
assert text_fields[7].name == "/w. r/awr"
assert text_fields[8].name == ""
assert text_fields[9].name == ""
assert text_fields[5].name == "/w. r/awr"
model = table.get_model()
results = model.objects.all()
assert results.count() == 0
url = reverse("api:database:tables:list", kwargs={"database_id": database.id})
response = api_client.post(
url,
{
"name": "Test 4",
"data": [
[
"id",
],
],
"first_row_header": True,
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_RESERVED_BASEROW_FIELD_NAME"
url = reverse("api:database:tables:list", kwargs={"database_id": database.id})
response = api_client.post(
url,
{
"name": "Test 4",
"data": [
[
"test",
"test",
],
],
"first_row_header": True,
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES"
assert "unique" in response_json["detail"]
url = reverse("api:database:tables:list", kwargs={"database_id": database.id})
response = api_client.post(
url,
{
"name": "Test 4",
"data": [
[
" ",
],
],
"first_row_header": True,
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_INVALID_BASEROW_FIELD_NAME"
assert "blank" in response_json["detail"]
url = reverse("api:database:tables:list", kwargs={"database_id": database.id})
response = api_client.post(
url,
{
"name": "Test 4",
"data": [
[
" test 1",
" test 2",
],
],
"first_row_header": True,
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
@pytest.mark.django_db
def test_get_table(api_client, data_fixture):

View file

@ -15,6 +15,8 @@ from baserow.contrib.database.fields.exceptions import (
IncompatiblePrimaryFieldTypeError,
CannotChangeFieldType,
MaxFieldLimitExceeded,
FieldWithSameNameAlreadyExists,
ReservedBaserowFieldNameException,
)
from baserow.contrib.database.fields.field_helpers import (
construct_all_possible_field_kwargs,
@ -36,6 +38,8 @@ from baserow.core.exceptions import UserNotInGroup
# You must add --runslow to pytest to run this test, you can do this in intellij by
# editing the run config for this test and adding --runslow to additional args.
@pytest.mark.django_db
@pytest.mark.slow
def test_can_convert_between_all_fields(data_fixture):
@ -246,6 +250,17 @@ def test_create_field(send_mock, data_fixture):
assert NumberField.objects.all().count() == 1
assert BooleanField.objects.all().count() == 1
with pytest.raises(FieldWithSameNameAlreadyExists):
handler.create_field(
user=user, table=table, type_name="boolean", name=boolean_field.name
)
with pytest.raises(ReservedBaserowFieldNameException):
handler.create_field(user=user, table=table, type_name="boolean", name="order")
with pytest.raises(ReservedBaserowFieldNameException):
handler.create_field(user=user, table=table, type_name="boolean", name="id")
field_limit = settings.MAX_FIELD_LIMIT
settings.MAX_FIELD_LIMIT = 2
@ -276,20 +291,26 @@ def test_create_primary_field(data_fixture):
with pytest.raises(PrimaryFieldAlreadyExists):
handler = FieldHandler()
handler.create_field(user=user, table=table_1, type_name="text", primary=True)
handler.create_field(
user=user, table=table_1, type_name="text", primary=True, name="test"
)
handler = FieldHandler()
field = handler.create_field(
user=user, table=table_2, type_name="text", primary=True
user=user, table=table_2, type_name="text", primary=True, name="primary"
)
assert field.primary
with pytest.raises(PrimaryFieldAlreadyExists):
handler.create_field(user=user, table=table_2, type_name="text", primary=True)
handler.create_field(
user=user, table=table_2, type_name="text", primary=True, name="new_primary"
)
# Should be able to create a regular field when there is already a primary field.
handler.create_field(user=user, table=table_2, type_name="text", primary=False)
handler.create_field(
user=user, table=table_2, type_name="text", primary=False, name="non_primary"
)
@pytest.mark.django_db
@ -400,6 +421,16 @@ def test_update_field(send_mock, data_fixture):
assert getattr(rows[1], f"field_{field.id}") is False
assert getattr(rows[2], f"field_{field.id}") is False
with pytest.raises(ReservedBaserowFieldNameException):
handler.update_field(user=user, field=field, name="order")
with pytest.raises(ReservedBaserowFieldNameException):
handler.update_field(user=user, field=field, name="id")
field_2 = data_fixture.create_text_field(table=table, order=1)
with pytest.raises(FieldWithSameNameAlreadyExists):
handler.update_field(user=user, field=field_2, name=field.name)
@pytest.mark.django_db
def test_update_field_failing(data_fixture):
@ -875,3 +906,32 @@ def test_update_select_options(data_fixture):
assert SelectOption.objects.all().count() == 2
assert field_2.select_options.all().count() == 0
@pytest.mark.django_db
def test_find_next_free_field_name(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
data_fixture.create_text_field(table=table, order=0)
data_fixture.create_text_field(name="test", table=table, order=1)
field_1 = data_fixture.create_text_field(name="field", table=table, order=1)
data_fixture.create_text_field(name="field 2", table=table, order=1)
handler = FieldHandler()
assert handler.find_next_unused_field_name(table, ["test"]) == "test 2"
assert handler.find_next_unused_field_name(table, ["test", "other"]) == "other"
assert handler.find_next_unused_field_name(table, ["field"]) == "field 3"
assert (
handler.find_next_unused_field_name(table, ["field"], [field_1.id]) == "field"
)
data_fixture.create_text_field(name="regex like field [0-9]", table=table, order=1)
data_fixture.create_text_field(
name="regex like field [0-9] 2", table=table, order=1
)
assert (
handler.find_next_unused_field_name(table, ["regex like field [0-9]"])
== "regex like field [0-9] 3"
)

View file

@ -108,12 +108,12 @@ def test_link_row_field_type(data_fixture):
user=user,
table=table,
type_name="link_row",
name="Customer",
name="Customer 2",
link_row_table=customers_table,
)
assert link_field_1.link_row_related_field.name == "Example"
assert link_field_2.link_row_related_field.name == "Example"
assert link_field_2.link_row_related_field.name == "Example - Customer 2"
connection = connections["default"]
tables = connection.introspection.table_names()
@ -154,7 +154,9 @@ def test_link_row_field_type(data_fixture):
# Going to change only the name of the field. This should not result in any errors
# of schema changes.
link_field_1 = field_handler.update_field(user, link_field_1, name="Customer 2")
link_field_1 = field_handler.update_field(
user, link_field_1, name="Customer Renamed"
)
with pytest.raises(LinkRowTableNotInSameDatabase):
field_handler.update_field(user, link_field_1, link_row_table=unrelated_table_1)
@ -192,8 +194,7 @@ def test_link_row_field_type(data_fixture):
table_row.save()
assert getattr(table_row, f"field_{link_field_2.id}") == "Text value"
# Delete the existing field. Alter that the related field should be deleted and
# no table named _relation_ should exist.
# Delete the existing field. Alter that the related field should be trashed.
field_handler.delete_field(user, link_field_1)
# Change a the text field back into a link row field.
@ -240,6 +241,7 @@ def test_link_row_field_type_rows(data_fixture):
link_row_field = field_handler.create_field(
user=user,
table=example_table,
name="Link Row",
type_name="link_row",
link_row_table=customers_table,
)
@ -375,6 +377,7 @@ def test_link_row_enhance_queryset(data_fixture, django_assert_num_queries):
link_row_field = field_handler.create_field(
user=user,
table=example_table,
name="Link Row",
type_name="link_row",
link_row_table=customers_table,
)
@ -606,6 +609,7 @@ def test_link_row_field_type_api_row_views(api_client, data_fixture):
link_row_field = field_handler.create_field(
user=user,
table=example_table,
name="Link Row",
type_name="link_row",
link_row_table=customers_table,
)
@ -749,7 +753,11 @@ def test_import_export_link_row_field(data_fixture, user_tables_in_separate_db):
field_handler = FieldHandler()
core_handler = CoreHandler()
link_row_field = field_handler.create_field(
user=user, table=table, type_name="link_row", link_row_table=customers_table
user=user,
table=table,
name="Link Row",
type_name="link_row",
link_row_table=customers_table,
)
row_handler = RowHandler()

View file

@ -75,7 +75,11 @@ def test_single_select_field_type(data_fixture):
assert SelectOption.objects.all().count() == 0
field = field_handler.create_field(
user=user, table=table, type_name="single_select", select_options=[]
user=user,
table=table,
type_name="single_select",
select_options=[],
name="Another Single Select",
)
field_handler.update_field(user=user, field=field, new_type_name="text")
@ -93,6 +97,7 @@ def test_single_select_field_type_rows(data_fixture, django_assert_num_queries):
field = field_handler.create_field(
user=user,
table=table,
name="name",
type_name="single_select",
select_options=[
{"value": "Option 1", "color": "red"},
@ -246,7 +251,7 @@ def test_single_select_field_type_api_views(api_client, data_fixture):
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{
"name": "Select 1",
"name": "Select 2",
"type": "single_select",
"select_options": [{"value": "Option 1", "color": "red"}],
},
@ -262,7 +267,7 @@ def test_single_select_field_type_api_views(api_client, data_fixture):
assert select_options[0].value == "Option 1"
assert select_options[0].color == "red"
assert select_options[0].order == 0
assert response_json["name"] == "Select 1"
assert response_json["name"] == "Select 2"
assert response_json["type"] == "single_select"
assert response_json["select_options"] == [
{"id": select_options[0].id, "value": "Option 1", "color": "red"}
@ -354,6 +359,7 @@ def test_single_select_field_type_api_row_views(api_client, data_fixture):
user=user,
table=table,
type_name="single_select",
name="Single select",
select_options=[
{"value": "Option 1", "color": "red"},
{"value": "Option 2", "color": "blue"},
@ -531,6 +537,7 @@ def test_primary_single_select_field_with_link_row_field(
customers_primary = field_handler.create_field(
user=user,
table=customers_table,
name="Single Select",
type_name="single_select",
select_options=[
{"value": "Option 1", "color": "red"},
@ -542,6 +549,7 @@ def test_primary_single_select_field_with_link_row_field(
link_row_field = field_handler.create_field(
user=user,
table=example_table,
name="Link row",
type_name="link_row",
link_row_table=customers_table,
)

View file

@ -0,0 +1,307 @@
import pytest
# noinspection PyPep8Naming
from django.db import connection
from django.db.migrations.executor import MigrationExecutor
migrate_from = [("database", "0032_trash")]
migrate_to = [("database", "0033_unique_field_names")]
# noinspection PyPep8Naming
@pytest.mark.django_db
def test_migration_fixes_duplicate_field_names(
data_fixture, transactional_db, user_tables_in_separate_db
):
old_state = migrate(migrate_from)
# The models used by the data_fixture below are not touched by this migration so
# it is safe to use the latest version in the test.
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
Table = old_state.apps.get_model("database", "Table")
ContentType = old_state.apps.get_model("contenttypes", "ContentType")
table = Table.objects.create(database_id=database.id, name="test", order=0)
other_table = Table.objects.create(database_id=database.id, name="test", order=1)
TextField = old_state.apps.get_model("database", "TextField")
Field = old_state.apps.get_model("database", "Field")
content_type_id = ContentType.objects.get_for_model(TextField).id
table_1_fields = make_fields_with_names(
[
"Duplicate",
"Duplicate",
],
table.id,
content_type_id,
Field,
)
table_2_fields = make_fields_with_names(
[
"Duplicate",
"Other",
"Other",
"Other",
],
other_table.id,
content_type_id,
Field,
)
new_state = migrate(migrate_to)
MigratedField = new_state.apps.get_model("database", "Field")
assert_fields_name_and_old_name_is(
[
("Duplicate", None),
("Duplicate_2", "Duplicate"),
],
table_1_fields,
MigratedField,
)
assert_fields_name_and_old_name_is(
[
("Duplicate", None),
("Other", None),
("Other_2", "Other"),
("Other_3", "Other"),
],
table_2_fields,
MigratedField,
)
# noinspection PyPep8Naming
@pytest.mark.django_db
def test_migration_handles_existing_fields_with_underscore_number(
data_fixture, transactional_db, user_tables_in_separate_db
):
old_state = migrate(migrate_from)
# The models used by the data_fixture below are not touched by this migration so
# it is safe to use the latest version in the test.
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
Table = old_state.apps.get_model("database", "Table")
ContentType = old_state.apps.get_model("contenttypes", "ContentType")
table = Table.objects.create(database_id=database.id, name="test", order=0)
TextField = old_state.apps.get_model("database", "TextField")
Field = old_state.apps.get_model("database", "Field")
content_type_id = ContentType.objects.get_for_model(TextField).id
table_1_fields = make_fields_with_names(
[
"Duplicate",
"Duplicate",
"Duplicate",
"Duplicate_2",
"Duplicate_2",
"Duplicate_2",
"Duplicate_2_2",
"Duplicate_2_2",
"Duplicate_3",
"Duplicate_3",
"Name Like a Regex [0-9]",
"Name Like a Regex [0-9]_2",
"Name Like a Regex [0-9]",
],
table.id,
content_type_id,
Field,
)
new_state = migrate(migrate_to)
MigratedField = new_state.apps.get_model("database", "Field")
assert_fields_name_and_old_name_is(
[
("Duplicate", None),
("Duplicate_4", "Duplicate"),
("Duplicate_5", "Duplicate"),
("Duplicate_2", None),
("Duplicate_2_3", "Duplicate_2"),
("Duplicate_2_4", "Duplicate_2"),
("Duplicate_2_2", None),
("Duplicate_2_2_2", "Duplicate_2_2"),
("Duplicate_3", None),
("Duplicate_3_2", "Duplicate_3"),
("Name Like a Regex [0-9]", None),
("Name Like a Regex [0-9]_2", None),
("Name Like a Regex [0-9]_3", "Name Like a Regex [0-9]"),
],
table_1_fields,
MigratedField,
)
# noinspection PyPep8Naming
@pytest.mark.django_db
def test_backwards_migration_restores_field_names(
data_fixture, transactional_db, user_tables_in_separate_db
):
old_state = migrate(migrate_to)
# The models used by the data_fixture below are not touched by this migration so
# it is safe to use the latest version in the test.
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
Table = old_state.apps.get_model("database", "Table")
ContentType = old_state.apps.get_model("contenttypes", "ContentType")
table = Table.objects.create(database_id=database.id, name="test", order=0)
TextField = old_state.apps.get_model("database", "TextField")
Field = old_state.apps.get_model("database", "Field")
content_type_id = ContentType.objects.get_for_model(TextField).id
table_1_fields = make_fields_with_names(
[
("Duplicate", None),
("Duplicate_2", "Duplicate"),
("Duplicate_3", "Duplicate"),
],
table.id,
content_type_id,
Field,
)
new_state = migrate(migrate_from)
BackwardsMigratedField = new_state.apps.get_model("database", "Field")
assert_fields_name_is(
[
"Duplicate",
"Duplicate",
"Duplicate",
],
table_1_fields,
BackwardsMigratedField,
)
# noinspection PyPep8Naming
@pytest.mark.django_db
def test_migration_fixes_duplicate_field_names_and_reserved_names(
data_fixture, transactional_db, user_tables_in_separate_db
):
old_state = migrate(migrate_from)
# The models used by the data_fixture below are not touched by this migration so
# it is safe to use the latest version in the test.
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
Table = old_state.apps.get_model("database", "Table")
ContentType = old_state.apps.get_model("contenttypes", "ContentType")
table = Table.objects.create(database_id=database.id, name="test", order=0)
other_table = Table.objects.create(database_id=database.id, name="test", order=1)
TextField = old_state.apps.get_model("database", "TextField")
Field = old_state.apps.get_model("database", "Field")
content_type_id = ContentType.objects.get_for_model(TextField).id
table_1_fields = make_fields_with_names(
[
"Duplicate",
"Duplicate",
"id",
"id",
],
table.id,
content_type_id,
Field,
)
table_2_fields = make_fields_with_names(
["", "order", "order", "Order", "Id"],
other_table.id,
content_type_id,
Field,
)
# Run the migration to test
new_state = migrate(migrate_to)
# After the initial migration is done, we can use the model state:
MigratedField = new_state.apps.get_model("database", "Field")
assert_fields_name_and_old_name_is(
[
("Duplicate", None),
("Duplicate_2", "Duplicate"),
("id_2", "id"),
("id_3", "id"),
],
table_1_fields,
MigratedField,
)
assert_fields_name_and_old_name_is(
[
("Field_1", ""),
("order_2", "order"),
("order_3", "order"),
("Order", None),
("Id", None),
],
table_2_fields,
MigratedField,
)
def make_fields_with_names(field_names, table_id, content_type_id, Field):
fields = []
first = True
for field_name in field_names:
if isinstance(field_name, tuple):
field = Field.objects.create(
name=field_name[0],
old_name=field_name[1],
table_id=table_id,
primary=first,
order=1,
content_type_id=content_type_id,
)
else:
field = Field.objects.create(
name=field_name,
table_id=table_id,
primary=first,
order=1,
content_type_id=content_type_id,
)
fields.append(field)
first = False
return fields
# noinspection PyPep8Naming
def assert_fields_name_and_old_name_is(name_old_name_tuples, fields, Field):
for expected_name, expected_old_name in name_old_name_tuples:
field = fields.pop(0)
looked_up_field = Field.objects.get(id=field.id)
assert looked_up_field.name == expected_name
if expected_old_name is None:
assert looked_up_field.old_name is None
else:
assert looked_up_field.old_name == expected_old_name
# noinspection PyPep8Naming
def assert_fields_name_is(expected_names, fields, Field):
for expected_name in expected_names:
field = fields.pop(0)
looked_up_field = Field.objects.get(id=field.id)
assert looked_up_field.name == expected_name
def migrate(target):
executor = MigrationExecutor(connection)
executor.loader.build_graph() # reload.
executor.migrate(target)
new_state = executor.loader.project_state(target)
return new_state

View file

@ -150,7 +150,10 @@ def test_fill_table_with_initial_data(data_fixture):
with pytest.raises(MaxFieldLimitExceeded):
table_handler.create_table(
user, database, name="Table 1", data=[["fields"] * 3, ["rows"] * 3]
user,
database,
name="Table 1",
data=[["field1", "field2", "field3"], ["rows"] * 3],
)
settings.MAX_FIELD_LIMIT = field_limit

View file

@ -746,3 +746,87 @@ def test_can_perm_delete_tables_in_another_user_db(
f"database_table_{table.id}"
not in user_tables_in_separate_db.introspection.table_names()
)
@pytest.mark.django_db
def test_a_restored_field_will_have_its_name_changed_to_ensure_it_is_unique(
data_fixture,
):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user, name="Placeholder")
table = data_fixture.create_database_table(database=database, name="Table")
customers_table = data_fixture.create_database_table(
name="Customers", database=database
)
field_handler = FieldHandler()
row_handler = RowHandler()
# Create a primary field and some example data for the customers table.
customers_primary_field = field_handler.create_field(
user=user, table=customers_table, type_name="text", name="Name", primary=True
)
row_handler.create_row(
user=user,
table=customers_table,
values={f"field_{customers_primary_field.id}": "John"},
)
row_handler.create_row(
user=user,
table=customers_table,
values={f"field_{customers_primary_field.id}": "Jane"},
)
link_field_1 = field_handler.create_field(
user=user,
table=table,
type_name="link_row",
name="Customer",
link_row_table=customers_table,
)
TrashHandler.trash(user, database.group, database, link_field_1)
TrashHandler.trash(user, database.group, database, customers_primary_field)
assert LinkRowField.trash.count() == 2
clashing_field = field_handler.create_field(
user=user, table=customers_table, type_name="text", name="Name"
)
another_clashing_field = field_handler.create_field(
user=user,
table=customers_table,
type_name="text",
name="Name (Restored)",
)
link_field_2 = field_handler.create_field(
user=user,
table=table,
type_name="link_row",
name="Customer",
link_row_table=customers_table,
)
TrashHandler.restore_item(user, "field", link_field_1.id)
link_field_1.refresh_from_db()
assert LinkRowField.objects.count() == 4
assert link_field_2.name == "Customer"
assert link_field_1.name == "Customer (Restored)"
assert link_field_2.link_row_related_field.name == "Table"
assert link_field_1.link_row_related_field.name == "Table (Restored)"
TrashHandler.restore_item(user, "field", customers_primary_field.id)
customers_primary_field.refresh_from_db()
assert TextField.objects.count() == 3
assert clashing_field.name == "Name"
assert another_clashing_field.name == "Name (Restored)"
assert customers_primary_field.name == "Name (Restored) 2"
# Check that a normal trash and restore when there aren't any naming conflicts will
# return the old names.
TrashHandler.trash(user, database.group, database, link_field_1)
TrashHandler.restore_item(user, "field", link_field_1.id)
link_field_1.refresh_from_db()
assert link_field_2.name == "Customer"
assert link_field_1.name == "Customer (Restored)"

View file

@ -1610,7 +1610,11 @@ def test_empty_filter_type(data_fixture):
tmp_table = data_fixture.create_database_table(database=table.database)
tmp_field = data_fixture.create_text_field(table=tmp_table, primary=True)
link_row_field = FieldHandler().create_field(
user=user, table=table, type_name="link_row", link_row_table=tmp_table
user=user,
table=table,
name="Link row",
type_name="link_row",
link_row_table=tmp_table,
)
tmp_row = tmp_table.get_model().objects.create(**{f"field_{tmp_field.id}": "Test"})
@ -1732,7 +1736,11 @@ def test_not_empty_filter_type(data_fixture):
tmp_table = data_fixture.create_database_table(database=table.database)
tmp_field = data_fixture.create_text_field(table=tmp_table, primary=True)
link_row_field = FieldHandler().create_field(
user=user, table=table, type_name="link_row", link_row_table=tmp_table
user=user,
table=table,
name="Link row",
type_name="link_row",
link_row_table=tmp_table,
)
tmp_row = tmp_table.get_model().objects.create(**{f"field_{tmp_field.id}": "Test"})

View file

@ -1,9 +1,11 @@
from __future__ import print_function
import sys
import psycopg2
import pytest
from django.db import connections
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
import sys
@pytest.fixture

View file

@ -12,6 +12,8 @@
* Added before and after date filters.
* Support building Baserow out of the box on Ubuntu by lowering the required docker
version to build Baserow down to 19.03.
* Disallow duplicate field names in the same table, blank field names or field names
called 'order' and 'id'. Existing invalid field names will be fixed automatically.
## Released (2021-06-02)

View file

@ -11,9 +11,29 @@
placeholder="Name"
@blur="$v.values.name.$touch()"
/>
<div v-if="$v.values.name.$error" class="error">
<div
v-if="$v.values.name.$dirty && !$v.values.name.required"
class="error"
>
This field is required.
</div>
<div
v-else-if="
$v.values.name.$dirty && !$v.values.name.mustHaveUniqueFieldName
"
class="error"
>
A field with this name already exists.
</div>
<div
v-else-if="
$v.values.name.$dirty &&
!$v.values.name.mustNotClashWithReservedName
"
class="error"
>
This field name is not allowed.
</div>
</div>
</div>
<div class="control">
@ -53,6 +73,8 @@
import { required } from 'vuelidate/lib/validators'
import form from '@baserow/modules/core/mixins/form'
import { mapGetters } from 'vuex'
import { RESERVED_BASEROW_FIELD_NAMES } from '@baserow/modules/database/utils/constants'
// @TODO focus form on open
export default {
@ -85,14 +107,36 @@ export default {
hasFormComponent() {
return !!this.values.type && this.getFormComponent(this.values.type)
},
},
validations: {
values: {
name: { required },
type: { required },
existingFieldId() {
return this.defaultValues ? this.defaultValues.id : null
},
...mapGetters({
fields: 'field/getAllWithPrimary',
}),
},
validations() {
return {
values: {
name: {
required,
mustHaveUniqueFieldName: this.mustHaveUniqueFieldName,
mustNotClashWithReservedName: this.mustNotClashWithReservedName,
},
type: { required },
},
}
},
methods: {
mustHaveUniqueFieldName(param) {
let fields = this.fields
if (this.existingFieldId !== null) {
fields = fields.filter((f) => f.id !== this.existingFieldId)
}
return !fields.map((f) => f.name).includes(param.trim())
},
mustNotClashWithReservedName(param) {
return !RESERVED_BASEROW_FIELD_NAMES.includes(param.trim())
},
getFormComponent(type) {
return this.$registry.get('field', type).getFormComponent()
},

View file

@ -192,12 +192,13 @@ export default {
// If parsed successfully and it is not empty then the initial data can be
// prepared for creating the table. We store the data stringified because
// it doesn't need to be reactive.
this.values.data = JSON.stringify(data.data)
this.error = ''
this.preview = this.getPreview(
const dataWithHeader = this.ensureHeaderExistsAndIsValid(
data.data,
this.values.firstRowHeader
)
this.values.data = JSON.stringify(dataWithHeader)
this.error = ''
this.preview = this.getPreview(dataWithHeader)
}
},
error(error) {

View file

@ -190,9 +190,10 @@ export default {
data.unshift(header)
this.values.data = JSON.stringify(data)
const dataWithHeader = this.ensureHeaderExistsAndIsValid(data, true)
this.values.data = JSON.stringify(dataWithHeader)
this.error = ''
this.preview = this.getPreview(data, true)
this.preview = this.getPreview(dataWithHeader)
},
},
}

View file

@ -99,9 +99,13 @@ export default {
// If parsed successfully and it is not empty then the initial data can be
// prepared for creating the table. We store the data stringified because it
// doesn't need to be reactive.
this.values.data = JSON.stringify(data.data)
const dataWithHeader = this.ensureHeaderExistsAndIsValid(
data.data,
this.values.firstRowHeader
)
this.values.data = JSON.stringify(dataWithHeader)
this.error = ''
this.preview = this.getPreview(data.data, this.values.firstRowHeader)
this.preview = this.getPreview(dataWithHeader)
this.$emit('input', this.value)
},
error(error) {

View file

@ -154,9 +154,13 @@ export default {
return
}
this.values.data = JSON.stringify(xmlData)
const dataWithHeader = this.ensureHeaderExistsAndIsValid(
xmlData,
hasHeader
)
this.values.data = JSON.stringify(dataWithHeader)
this.error = ''
this.preview = this.getPreview(xmlData, hasHeader)
this.preview = this.getPreview(dataWithHeader)
},
},
}

View file

@ -1,43 +1,138 @@
/**
* Mixin that introduces helper methods for the importer form component.
*/
import { RESERVED_BASEROW_FIELD_NAMES } from '@baserow/modules/database/utils/constants'
export default {
methods: {
/**
* Adds a header of Field 1, Field 2 etc if the first row is not already a header,
* otherwise checks that the existing header has valid and non duplicate field
* names. If there are invalid or duplicate field names they will be replaced with
* valid unique field names instead.
*
* @param data An array starting with a header row if firstRowHeader is true,
* followed by rows of data.
* @param firstRowHeader Whether or not the first row in the data array is a
* header row or not.
* @return {*} An updated data object with the first row being a valid unique
* header row.
*/
ensureHeaderExistsAndIsValid(data, firstRowHeader) {
let head = data[0]
const columns = Math.max(...data.map((entry) => entry.length))
// If the first row is not the header, a header containing columns named
// 'Field N' needs to be generated.
if (!firstRowHeader) {
head = []
for (let i = 1; i <= columns; i++) {
head.push(`Field ${i}`)
}
data.unshift(head)
} else {
// The header row might not be long enough to cover all columns, ensure it does
// first.
head = this.fill(head, columns)
head = this.makeHeaderUniqueAndValid(head)
data[0] = head
}
return data
},
/**
* Fills the row with a minimum amount of empty columns.
*/
fill(row, columns) {
for (let i = row.length; i < columns; i++) {
row.push('')
}
return row
},
/**
* Generates an object that can used to render a quick preview of the provided
* data. Can be used in combination with the TableImporterPreview component.
*/
getPreview(data, firstRowHeader) {
let head = data[0]
let rows = data.slice(1, 4)
let remaining = data.length - rows.length - 1
getPreview(data) {
const head = data[0]
const rows = data.slice(1, 4)
const remaining = data.length - rows.length - 1
const columns = Math.max(...data.map((entry) => entry.length))
/**
* Fills the row with a minimum amount of empty columns.
*/
const fill = (row, columns) => {
for (let i = row.length; i < columns; i++) {
row.push('')
}
return row
}
// If the first row is not the header, a header containing columns named
// 'Column N' needs to be generated.
if (!firstRowHeader) {
head = []
for (let i = 1; i <= columns; i++) {
head.push(`Column ${i}`)
}
rows = data.slice(0, 3)
remaining = data.length - rows.length
}
head = fill(head, columns)
rows.map((row) => fill(row, columns))
rows.map((row) => this.fill(row, columns))
return { columns, head, rows, remaining }
},
/**
* Find the next un-unused column not present or used yet in the nextFreeIndexMap.
* Will append a number to the returned columnName if it is taken, where that
* number ensures the returned name is unique. Finally this function will update
* the nextFreeIndexMap so future calls will not use any columns returned by
* this function.
* @param originalColumnName The column name to find the next free unique value for.
* @param nextFreeIndexMap A map of column name to next free starting index.
* @param startingIndex The starting index to start from if no index is found in
* the map.
* @return {string} A column name possibly postfixed with a number to ensure it
* is unique.
*/
findNextFreeName(originalColumnName, nextFreeIndexMap, startingIndex) {
let i = nextFreeIndexMap.get(originalColumnName) || startingIndex
while (true) {
const nextColumnNameToCheck = `${originalColumnName} ${i}`
if (!nextFreeIndexMap.has(nextColumnNameToCheck)) {
nextFreeIndexMap.set(originalColumnName, i + 1)
return nextColumnNameToCheck
}
i++
}
},
/**
* Given a column name this function will return a new name which is guarrenteed
* to be unique and valid. If the originally provided name is unique and valid
* then it will be returned untouched.
*
* @param column The column name to check.
* @param nextFreeIndexMap A map of column names to an index number. A value of 0
* indicates that the key is a column name which exists in the table but has not
* yet been returned yet. A number higher than 0 indicates that the column has
* already occurred and the index needs to be appended to the name to generate a
* new unique column name.
* @return {string|*} A valid unique column name.
*/
makeColumnNameUniqueAndValidIfNotAlready(column, nextFreeIndexMap) {
if (column === '') {
return this.findNextFreeName('Field', nextFreeIndexMap, 1)
} else if (RESERVED_BASEROW_FIELD_NAMES.includes(column)) {
return this.findNextFreeName(column, nextFreeIndexMap, 2)
} else if (nextFreeIndexMap.get(column) > 0) {
return this.findNextFreeName(column, nextFreeIndexMap, 2)
} else {
nextFreeIndexMap.set(column, 2)
return column
}
},
/**
* Ensures that the uploaded field names are unique, non blank and don't use any
* reserved Baserow field names.
* @param {*[]} head An array of field names to be checked.
* @return A new array of field names which are guaranteed to be unique and valid.
*/
makeHeaderUniqueAndValid(head) {
const nextFreeIndexMap = new Map()
for (let i = 0; i < head.length; i++) {
nextFreeIndexMap.set(head[i], 0)
}
const uniqueAndValidHeader = []
for (let i = 0; i < head.length; i++) {
const column = head[i]
const trimmedColumn = column.trim()
const uniqueValidName = this.makeColumnNameUniqueAndValidIfNotAlready(
trimmedColumn,
nextFreeIndexMap
)
uniqueAndValidHeader.push(uniqueValidName)
}
return uniqueAndValidHeader
},
},
}

View file

@ -275,6 +275,13 @@ export const getters = {
getAll(state) {
return state.items
},
getAllWithPrimary(state) {
if (state.primary !== null) {
return [state.primary, ...state.items]
} else {
return state.items
}
},
}
export default {

View file

@ -1 +1,3 @@
export const trueString = ['y', 't', 'o', 'yes', 'true', 'on', '1']
// Please keep in sync with src/baserow/contrib/database/fields/handler.py:30
export const RESERVED_BASEROW_FIELD_NAMES = ['id', 'order']

View file

@ -0,0 +1,43 @@
/**
* @jest-environment jsdom
*/
import importer from '@baserow/modules/database/mixins/importer'
describe('test file importer', () => {
test('test field name id is invalid as is reserved by baserow', () => {
expect(importer.methods.makeHeaderUniqueAndValid(['id'])).toEqual(['id 2'])
expect(importer.methods.makeHeaderUniqueAndValid(['id', 'id 2'])).toEqual([
'id 3',
'id 2',
])
})
test('test field name order is invalid as is reserved by baserow', () => {
expect(importer.methods.makeHeaderUniqueAndValid(['order'])).toEqual([
'order 2',
])
expect(
importer.methods.makeHeaderUniqueAndValid(['order', 'order 2', 'order'])
).toEqual(['order 3', 'order 2', 'order 4'])
})
test('duplicate names are appended with numbers to make them unique', () => {
expect(importer.methods.makeHeaderUniqueAndValid(['a', 'a', 'a'])).toEqual([
'a',
'a 2',
'a 3',
])
expect(
importer.methods.makeHeaderUniqueAndValid(['a', 'a 2', 'a', 'a'])
).toEqual(['a', 'a 2', 'a 3', 'a 4'])
})
test('blank names are replaced by unique field names', () => {
expect(importer.methods.makeHeaderUniqueAndValid(['', '', ''])).toEqual([
'Field 1',
'Field 2',
'Field 3',
])
expect(
importer.methods.makeHeaderUniqueAndValid(['', 'Field 1', '', ''])
).toEqual(['Field 2', 'Field 1', 'Field 3', 'Field 4'])
})
})