1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-04 13:15:24 +00:00

Implement batch row update api

This commit is contained in:
Petr Stribny 2022-04-19 10:50:08 +00:00
parent 89ff68946b
commit 65c5754985
38 changed files with 1881 additions and 90 deletions

View file

@ -6,3 +6,10 @@ env =
testpaths =
tests
../premium/backend/tests
markers =
field_file: All tests related to file field
field_single_select: All tests related to single select field
field_multiple_select: All tests related to multiple select field
field_link_row: All tests related to link row field
field_formula: All tests related to formula field
api_rows: All tests to manipulate rows via HTTP API

View file

@ -32,5 +32,5 @@ ERROR_INVALID_USER_FILE_NAME_ERROR = (
ERROR_USER_FILE_DOES_NOT_EXIST = (
"ERROR_USER_FILE_DOES_NOT_EXIST",
HTTP_400_BAD_REQUEST,
"The user file {e.name_or_id} does not exist.",
"The user files {e.file_names_or_ids} do not exist.",
)

View file

@ -3,6 +3,7 @@ from typing import Dict, Union, Tuple, Callable, Optional, Type
from django.utils.encoding import force_str
from rest_framework import status
from rest_framework import serializers
from rest_framework.exceptions import APIException
from rest_framework.request import Request
from rest_framework.serializers import ModelSerializer
@ -120,6 +121,8 @@ def validate_data(
data,
partial=False,
exception_to_raise=RequestBodyValidationException,
many=False,
return_validated=False,
):
"""
Validates the provided data via the provided serializer class. If the data doesn't
@ -132,6 +135,10 @@ def validate_data(
:type data: dict
:param partial: Whether the data is a partial update.
:type partial: bool
:param many: Indicates whether the serializer should be constructed as a list.
:type many: bool
:param return_validated: Returns validated_data from DRF serializer
:type return_validated: bool
:return: The data after being validated by the serializer.
:rtype: dict
"""
@ -146,11 +153,14 @@ def validate_data(
else:
return {"error": force_str(error), "code": error.code}
serializer = serializer_class(data=data, partial=partial)
serializer = serializer_class(data=data, partial=partial, many=many)
if not serializer.is_valid():
detail = serialize_errors_recursive(serializer.errors)
raise exception_to_raise(detail)
if return_validated:
return serializer.validated_data
return serializer.data
@ -252,7 +262,12 @@ def type_from_data_or_registry(
def get_serializer_class(
model, field_names, field_overrides=None, base_class=None, meta_ref_name=None
model,
field_names,
field_overrides=None,
base_class=None,
meta_ref_name=None,
required_fields=None,
):
"""
Generates a model serializer based on the provided field names and field overrides.
@ -269,6 +284,9 @@ def get_serializer_class(
:param meta_ref_name: Optionally a custom ref name can be set. If not provided,
then the class name of the model and base class are used.
:type meta_ref_name: str
:param required_fields: List of field names that should be present even when
performing partial validation.
:type required_fields: list[str]
:return: The generated model serializer containing the provided fields.
:rtype: ModelSerializer
"""
@ -303,6 +321,16 @@ def get_serializer_class(
if field_overrides:
attrs.update(field_overrides)
def validate(self, value):
if required_fields:
for field_name in required_fields:
if field_name not in value:
raise serializers.ValidationError(
{f"{field_name}": "This field is required."}
)
return value
attrs["validate"] = validate
return type(str(model_.__name__ + "Serializer"), (base_class,), attrs)

View file

@ -418,8 +418,12 @@ if PRIVATE_BACKEND_HOSTNAME:
FROM_EMAIL = os.getenv("FROM_EMAIL", "no-reply@localhost")
RESET_PASSWORD_TOKEN_MAX_AGE = 60 * 60 * 48 # 48 hours
# How many rows can be requested at once.
ROW_PAGE_SIZE_LIMIT = int(os.getenv("BASEROW_ROW_PAGE_SIZE_LIMIT", 200))
BATCH_ROWS_SIZE_LIMIT = int(
os.getenv("BATCH_ROWS_SIZE_LIMIT", 200)
) # How many rows can be modified at once.
TRASH_PAGE_SIZE_LIMIT = 200 # How many trash entries can be requested at once.
ROW_COMMENT_PAGE_SIZE_LIMIT = 200 # How many row comments can be requested at once.

View file

@ -96,3 +96,8 @@ ERROR_INVALID_LOOKUP_TARGET_FIELD = (
"The provided target field does not exist or is in a different table to the table"
" linked to by the through field.",
)
ERROR_INVALID_SELECT_OPTION_VALUES = (
"ERROR_INVALID_SELECT_OPTION_VALUES",
HTTP_400_BAD_REQUEST,
"The provided select option ids {e.ids} are not valid select options.",
)

View file

@ -1,7 +1,13 @@
from rest_framework.status import HTTP_404_NOT_FOUND
from rest_framework.status import HTTP_404_NOT_FOUND, HTTP_400_BAD_REQUEST
ERROR_ROW_DOES_NOT_EXIST = (
"ERROR_ROW_DOES_NOT_EXIST",
HTTP_404_NOT_FOUND,
"The requested row does not exist.",
"The rows {e.ids} do not exist.",
)
ERROR_ROW_IDS_NOT_UNIQUE = (
"ERROR_ROW_IDS_NOT_UNIQUE",
HTTP_400_BAD_REQUEST,
"The provided row ids {e.ids} are not unique.",
)

View file

@ -2,6 +2,7 @@ import logging
from copy import deepcopy
from typing import Dict
from django.conf import settings
from rest_framework import serializers
from django.db.models.base import ModelBase
@ -30,6 +31,8 @@ def get_row_serializer_class(
field_names_to_include=None,
user_field_names=False,
field_kwargs=None,
include_id=False,
required_fields=None,
):
"""
Generates a Django rest framework model serializer based on the available fields
@ -59,6 +62,11 @@ def get_row_serializer_class(
:param field_kwargs: A dict containing additional kwargs per field. The key must
be the field name and the value a dict containing the kwargs.
:type field_kwargs: dict
:param include_id: Whether the generated serializer should contain the id field
:type include_id: bool
:param required_fields: List of field names that should be present even when
performing partial validation.
:type required_fields: list[str]
:return: The generated serializer.
:rtype: ModelSerializer
"""
@ -99,17 +107,43 @@ def get_row_serializer_class(
field_overrides[name] = serializer
field_names.append(name)
return get_serializer_class(model, field_names, field_overrides, base_class)
if include_id:
field_names.append("id")
field_overrides["id"] = serializers.IntegerField()
return get_serializer_class(
model, field_names, field_overrides, base_class, required_fields=required_fields
)
def get_example_row_serializer_class(add_id=False, user_field_names=False):
def get_batch_row_serializer_class(row_serializer_class):
class_name = "BatchRowSerializer"
def validate(self, value):
if "items" not in value:
raise serializers.ValidationError({"items": "This field is required."})
return value
fields = {
"items": serializers.ListField(
child=row_serializer_class(),
min_length=1,
max_length=settings.BATCH_ROWS_SIZE_LIMIT,
),
"validate": validate,
}
class_object = type(class_name, (serializers.Serializer,), fields)
return class_object
def get_example_row_serializer_class(example_type="get", user_field_names=False):
"""
Generates a serializer containing a field for each field type. It is only used for
example purposes in the openapi documentation.
:param add_id: Indicates whether the id field should be added. This could for
example differ for request or response documentation.
:type add_id: bool
:param example_type: Sets various parameters. Can be get, post, patch.
:type example_type: str
:param user_field_names: Whether this example serializer help text should indicate
the fields names can be switched using the `user_field_names` GET parameter.
:type user_field_names: bool
@ -117,13 +151,42 @@ def get_example_row_serializer_class(add_id=False, user_field_names=False):
:rtype: Serializer
"""
config = {
"get": {
"class_name": "ExampleRowResponseSerializer",
"add_id": True,
"add_order": True,
"read_only_fields": True,
},
"post": {
"class_name": "ExampleRowRequestSerializer",
"add_id": False,
"add_order": False,
"read_only_fields": False,
},
"patch": {
"class_name": "ExampleUpdateRowRequestSerializer",
"add_id": False,
"add_order": False,
"read_only_fields": False,
},
"patch_batch": {
"class_name": "ExampleBatchUpdateRowRequestSerializer",
"add_id": True,
"add_order": False,
"read_only_fields": False,
},
}
class_name = config[example_type]["class_name"]
add_id = config[example_type]["add_id"]
add_order = config[example_type]["add_order"]
add_readonly_fields = config[example_type]["read_only_fields"]
is_response_example = add_readonly_fields
if not hasattr(get_example_row_serializer_class, "cache"):
get_example_row_serializer_class.cache = {}
class_name = (
"ExampleRowResponseSerializer" if add_id else "ExampleRowRequestSerializer"
)
if user_field_names:
class_name += "WithUserFieldNames"
@ -134,8 +197,10 @@ def get_example_row_serializer_class(add_id=False, user_field_names=False):
if add_id:
fields["id"] = serializers.IntegerField(
read_only=True, help_text="The unique identifier of the row in the table."
read_only=False, help_text="The unique identifier of the row in the table."
)
if add_order:
fields["order"] = serializers.DecimalField(
max_digits=40,
decimal_places=20,
@ -160,6 +225,8 @@ def get_example_row_serializer_class(add_id=False, user_field_names=False):
)
for i, field_type in enumerate(field_types):
if field_type.read_only and not add_readonly_fields:
continue
instance = field_type.model_class()
kwargs = {
"help_text": f"This field represents the `{field_type.type}` field. The "
@ -168,7 +235,9 @@ def get_example_row_serializer_class(add_id=False, user_field_names=False):
f"{field_type.get_serializer_help_text(instance)}"
}
get_field_method = (
"get_response_serializer_field" if add_id else "get_serializer_field"
"get_response_serializer_field"
if is_response_example
else "get_serializer_field"
)
serializer_field = getattr(field_type, get_field_method)(instance, **kwargs)
fields[f"field_{i + 1}"] = serializer_field
@ -228,7 +297,7 @@ def remap_serialized_row_to_user_field_names(serialized_row: Dict, model: ModelB
example_pagination_row_serializer_class = get_example_pagination_serializer_class(
get_example_row_serializer_class(True, user_field_names=True)
get_example_row_serializer_class(example_type="get", user_field_names=True)
)
@ -247,3 +316,35 @@ class ListRowsQueryParamsSerializer(serializers.Serializer):
include = serializers.CharField(required=False)
exclude = serializers.CharField(required=False)
filter_type = serializers.CharField(required=False, default="")
class BatchUpdateRowsSerializer(serializers.Serializer):
items = serializers.ListField(
child=RowSerializer(),
min_length=1,
max_length=settings.BATCH_ROWS_SIZE_LIMIT,
)
def get_example_batch_rows_serializer_class(example_type="get", user_field_names=False):
config = {
"get": {
"class_name": "ExampleBatchRowsResponseSerializer",
},
"post": {
"class_name": "ExampleBatchRowsRequestSerializer",
},
"patch_batch": {"class_name": "ExampleBatchUpdateRowsRequestSerializer"},
}
class_name = config[example_type]["class_name"]
fields = {
"items": serializers.ListField(
child=get_example_row_serializer_class(
example_type=example_type, user_field_names=user_field_names
)(),
min_length=1,
max_length=settings.BATCH_ROWS_SIZE_LIMIT,
)
}
class_object = type(class_name, (serializers.Serializer,), fields)
return class_object

View file

@ -1,6 +1,6 @@
from django.urls import re_path
from .views import RowsView, RowView, RowMoveView
from .views import RowsView, RowView, RowMoveView, BatchRowsView
app_name = "baserow.contrib.database.api.rows"
@ -12,6 +12,11 @@ urlpatterns = [
RowView.as_view(),
name="item",
),
re_path(
r"table/(?P<table_id>[0-9]+)/batch/$",
BatchRowsView.as_view(),
name="batch",
),
re_path(
r"table/(?P<table_id>[0-9]+)/(?P<row_id>[0-9]+)/move/$",
RowMoveView.as_view(),

View file

@ -21,8 +21,12 @@ from baserow.contrib.database.api.fields.errors import (
ERROR_ORDER_BY_FIELD_NOT_FOUND,
ERROR_FILTER_FIELD_NOT_FOUND,
ERROR_FIELD_DOES_NOT_EXIST,
ERROR_INVALID_SELECT_OPTION_VALUES,
)
from baserow.contrib.database.api.rows.errors import (
ERROR_ROW_DOES_NOT_EXIST,
ERROR_ROW_IDS_NOT_UNIQUE,
)
from baserow.contrib.database.api.rows.errors import ERROR_ROW_DOES_NOT_EXIST
from baserow.contrib.database.api.rows.serializers import (
example_pagination_row_serializer_class,
)
@ -38,8 +42,9 @@ from baserow.contrib.database.fields.exceptions import (
OrderByFieldNotPossible,
FilterFieldNotFound,
FieldDoesNotExist,
AllProvidedMultipleSelectValuesMustBeSelectOption,
)
from baserow.contrib.database.rows.exceptions import RowDoesNotExist
from baserow.contrib.database.rows.exceptions import RowDoesNotExist, RowIdsNotUnique
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.table.exceptions import TableDoesNotExist
from baserow.contrib.database.table.handler import TableHandler
@ -58,8 +63,10 @@ from .serializers import (
MoveRowQueryParamsSerializer,
CreateRowQueryParamsSerializer,
RowSerializer,
get_batch_row_serializer_class,
get_example_row_serializer_class,
get_row_serializer_class,
get_example_batch_rows_serializer_class,
)
from baserow.contrib.database.fields.field_filters import (
FILTER_TYPE_AND,
@ -336,11 +343,19 @@ class RowsView(APIView):
"purposes, the field_ID must be replaced with the actual id of the field "
"or the name of the field if `user_field_names` is provided."
),
request=get_example_row_serializer_class(False, user_field_names=True),
request=get_example_row_serializer_class(
example_type="post", user_field_names=True
),
responses={
200: get_example_row_serializer_class(True, user_field_names=True),
200: get_example_row_serializer_class(
example_type="get", user_field_names=True
),
400: get_error_schema(
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
[
"ERROR_USER_NOT_IN_GROUP",
"ERROR_REQUEST_BODY_VALIDATION",
"ERROR_INVALID_SELECT_OPTION_VALUES",
]
),
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
404: get_error_schema(
@ -354,6 +369,7 @@ class RowsView(APIView):
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
AllProvidedMultipleSelectValuesMustBeSelectOption: ERROR_INVALID_SELECT_OPTION_VALUES,
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST,
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
}
@ -366,6 +382,7 @@ class RowsView(APIView):
"""
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, "create", table, False)
user_field_names = "user_field_names" in request.GET
model = table.get_model()
@ -446,7 +463,9 @@ class RowView(APIView):
"depends on the fields type."
),
responses={
200: get_example_row_serializer_class(True, user_field_names=True),
200: get_example_row_serializer_class(
example_type="get", user_field_names=True
),
400: get_error_schema(
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
),
@ -471,6 +490,7 @@ class RowView(APIView):
"""
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, "read", table, False)
user_field_names = "user_field_names" in request.GET
model = table.get_model()
@ -525,11 +545,19 @@ class RowView(APIView):
"the field_ID must be replaced with the actual id of the field or the name "
"of the field if `user_field_names` is provided."
),
request=get_example_row_serializer_class(False, user_field_names=True),
request=get_example_row_serializer_class(
example_type="patch", user_field_names=True
),
responses={
200: get_example_row_serializer_class(True, user_field_names=True),
200: get_example_row_serializer_class(
example_type="get", user_field_names=True
),
400: get_error_schema(
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
[
"ERROR_USER_NOT_IN_GROUP",
"ERROR_REQUEST_BODY_VALIDATION",
"ERROR_INVALID_SELECT_OPTION_VALUES",
]
),
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
404: get_error_schema(
@ -543,6 +571,7 @@ class RowView(APIView):
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
AllProvidedMultipleSelectValuesMustBeSelectOption: ERROR_INVALID_SELECT_OPTION_VALUES,
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST,
}
@ -554,6 +583,7 @@ class RowView(APIView):
"""
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, "update", table, False)
user_field_names = "user_field_names" in request.GET
@ -639,6 +669,7 @@ class RowView(APIView):
"""
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, "delete", table, False)
RowHandler().delete_row(request.user, table, row_id)
@ -691,7 +722,9 @@ class RowMoveView(APIView):
"parameter is not provided, then the row will be moved to the end.",
request=None,
responses={
200: get_example_row_serializer_class(True, user_field_names=True),
200: get_example_row_serializer_class(
example_type="get", user_field_names=True
),
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
404: get_error_schema(
@ -713,6 +746,7 @@ class RowMoveView(APIView):
"""Moves the row to another position."""
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, "update", table, False)
user_field_names = "user_field_names" in request.GET
@ -733,3 +767,118 @@ class RowMoveView(APIView):
)
serializer = serializer_class(row)
return Response(serializer.data)
class BatchRowsView(APIView):
authentication_classes = APIView.authentication_classes + [TokenAuthentication]
permission_classes = (IsAuthenticated,)
@extend_schema(
exclude=True,
parameters=[
OpenApiParameter(
name="table_id",
location=OpenApiParameter.PATH,
type=OpenApiTypes.INT,
description="Updates the rows in the table.",
),
OpenApiParameter(
name="user_field_names",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.BOOL,
description=(
"A flag query parameter which if provided this endpoint will "
"expect and return the user specified field names instead of "
"internal Baserow field names (field_123 etc)."
),
),
],
tags=["Database table rows"],
operation_id="batch_update_database_table_rows",
description=(
"Updates existing rows in the table if the user has access to the "
"related table's group. The accepted body fields are depending on the "
"fields that the table has. For a complete overview of fields use the "
"**list_database_table_fields** endpoint to list them all. None of the "
"fields are required, if they are not provided the value is not going to "
"be updated. "
"When you want to update a value for the field with id `10`, the key must "
"be named `field_10`. Or if the GET parameter `user_field_names` is "
"provided the key of the field to update must be the name of the field. "
"Multiple different fields to update can be provided for each row. In "
"the examples below you will find all the different field types, the "
"numbers/ids in the example are just there for example purposes, "
"the field_ID must be replaced with the actual id of the field or the name "
"of the field if `user_field_names` is provided."
),
request=get_example_batch_rows_serializer_class(
example_type="patch_batch", user_field_names=True
),
responses={
200: get_example_batch_rows_serializer_class(
example_type="get", user_field_names=True
),
400: get_error_schema(
[
"ERROR_USER_NOT_IN_GROUP",
"ERROR_REQUEST_BODY_VALIDATION",
"ERROR_ROW_IDS_NOT_UNIQUE",
"ERROR_INVALID_SELECT_OPTION_VALUES",
]
),
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
404: get_error_schema(
["ERROR_TABLE_DOES_NOT_EXIST", "ERROR_ROW_DOES_NOT_EXIST"]
),
},
)
@transaction.atomic
@map_exceptions(
{
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
RowIdsNotUnique: ERROR_ROW_IDS_NOT_UNIQUE,
AllProvidedMultipleSelectValuesMustBeSelectOption: ERROR_INVALID_SELECT_OPTION_VALUES,
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST,
}
)
def patch(self, request, table_id):
"""
Updates all provided rows at once for the table with
the given table_id.
"""
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, "update", table, False)
model = table.get_model()
user_field_names = "user_field_names" in request.GET
row_validation_serializer = get_row_serializer_class(
model,
user_field_names=user_field_names,
include_id=True,
required_fields=["id"],
)
validation_serializer = get_batch_row_serializer_class(
row_validation_serializer
)
data = validate_data(
validation_serializer, request.data, partial=True, return_validated=True
)
try:
rows = RowHandler().update_rows(request.user, table, data["items"], model)
except ValidationError as e:
raise RequestBodyValidationException(detail=e.message)
response_row_serializer_class = get_row_serializer_class(
model, RowSerializer, is_response=True, user_field_names=user_field_names
)
response_serializer_class = get_batch_row_serializer_class(
response_row_serializer_class
)
response_serializer = response_serializer_class({"items": rows})
return Response(response_serializer.data)

View file

@ -77,7 +77,7 @@ class SubmitFormViewView(APIView):
"on the fields that are in the form and the rules per field. If valid, "
"a new row will be created in the table."
),
request=get_example_row_serializer_class(False),
request=get_example_row_serializer_class(example_type="post"),
responses={
200: FormViewSubmittedSerializer,
404: get_error_schema(["ERROR_FORM_DOES_NOT_EXIST"]),

View file

@ -90,7 +90,9 @@ class GalleryViewView(APIView):
),
responses={
200: get_example_pagination_serializer_class(
get_example_row_serializer_class(add_id=True, user_field_names=False),
get_example_row_serializer_class(
example_type="get", user_field_names=False
),
additional_fields={
"field_options": FieldOptionsField(
serializer_class=GalleryViewFieldOptionsSerializer,

View file

@ -199,7 +199,9 @@ class GridViewView(APIView):
),
responses={
200: get_example_pagination_serializer_class(
get_example_row_serializer_class(add_id=True, user_field_names=False),
get_example_row_serializer_class(
example_type="get", user_field_names=False
),
additional_fields={
"field_options": FieldOptionsField(
serializer_class=GridViewFieldOptionsSerializer, required=False
@ -311,9 +313,9 @@ class GridViewView(APIView):
),
request=GridViewFilterSerializer,
responses={
200: get_example_row_serializer_class(add_id=True, user_field_names=False)(
many=True
),
200: get_example_row_serializer_class(
example_type="get", user_field_names=False
)(many=True),
400: get_error_schema(
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
),
@ -699,7 +701,9 @@ class PublicGridViewRowsView(APIView):
),
responses={
200: get_example_pagination_serializer_class(
get_example_row_serializer_class(add_id=True, user_field_names=False),
get_example_row_serializer_class(
example_type="get", user_field_names=False
),
additional_fields={
"field_options": FieldOptionsField(
serializer_class=GridViewFieldOptionsSerializer, required=False

View file

@ -58,6 +58,8 @@ class PathBasedUpdateStatementCollector:
path_to_starting_table_id_column = (
"__".join(path_to_starting_table) + "__id"
)
if isinstance(starting_row_id, list):
path_to_starting_table_id_column += "__in"
qs = qs.filter(**{path_to_starting_table_id_column: starting_row_id})
qs.update(**self.update_statements)
@ -117,7 +119,6 @@ class CachingFieldUpdateCollector(FieldCache):
used if self.starting_row_id is set so only rows which join back to the
starting row via this path are updated.
"""
self._updated_fields_per_table[field.table_id][field.id] = field
self._update_statement_collector.add_update_statement(
field, update_statement, via_path_to_starting_table

View file

@ -125,6 +125,12 @@ class AllProvidedMultipleSelectValuesMustBeSelectOption(Exception):
field.
"""
def __init__(self, ids, *args, **kwargs):
if not isinstance(ids, list):
ids = [ids]
self.ids = ids
super().__init__(*args, **kwargs)
class InvalidLookupThroughField(Exception):
"""

View file

@ -1165,7 +1165,7 @@ class LinkRowFieldType(FieldType):
# Trigger the newly created pending operations of all the models related to the
# created ManyToManyField. They need to be called manually because normally
# they are triggered when a new new model is registered. Not triggering them
# they are triggered when a new model is registered. Not triggering them
# can cause a memory leak because everytime a table model is generated, it will
# register new pending operations.
apps = model._meta.apps
@ -1505,16 +1505,7 @@ class FileFieldType(FieldType):
model_class = FileField
can_be_in_form_view = False
def prepare_value_for_db(self, instance, value):
if value is None:
return []
if not isinstance(value, list):
raise ValidationError("The provided value must be a list.")
if len(value) == 0:
return []
def _extract_file_names(self, value):
# Validates the provided object and extract the names from it. We need the name
# to validate if the file actually exists and to get the 'real' properties
# from it.
@ -1530,6 +1521,19 @@ class FileFieldType(FieldType):
raise ValidationError("The provided `visible_name` must be a string.")
provided_files.append(o)
return provided_files
def prepare_value_for_db(self, instance, value):
if value is None:
return []
if not isinstance(value, list):
raise ValidationError("The provided value must be a list.")
if len(value) == 0:
return []
provided_files = self._extract_file_names(value)
# Create a list of the serialized UserFiles in the originally provided order
# because that is also the order we need to store the serialized versions in.
@ -1547,14 +1551,44 @@ class FileFieldType(FieldType):
file.get("visible_name") or user_file.original_name
)
except StopIteration:
raise UserFileDoesNotExist(
file["name"], f"The provided file {file['name']} does not exist."
)
raise UserFileDoesNotExist(file["name"])
user_files.append(serialized)
return user_files
def prepare_value_for_db_in_bulk(self, instance, values_by_row):
provided_names_by_row = defaultdict(list)
unique_names = set()
for row_index, value in values_by_row.items():
provided_names_by_row[row_index] = self._extract_file_names(value)
unique_names.update(pn["name"] for pn in provided_names_by_row[row_index])
if len(unique_names) == 0:
return values_by_row
files = UserFile.objects.all().name(*unique_names)
if len(files) != len(unique_names):
invalid_names = sorted(
list(unique_names - set((file.name) for file in files))
)
raise UserFileDoesNotExist(invalid_names)
user_files_by_name = dict((file.name, file) for file in files)
for row_index, value in values_by_row.items():
serialized_files = []
for file_names in provided_names_by_row[row_index]:
user_file = user_files_by_name[file_names.get("name")]
serialized = user_file.serialize()
serialized["visible_name"] = (
file_names.get("visible_name") or user_file.original_name
)
serialized_files.append(serialized)
values_by_row[row_index] = serialized_files
return values_by_row
def get_serializer_field(self, instance, **kwargs):
required = kwargs.get("required", False)
return serializers.ListSerializer(
@ -1722,9 +1756,8 @@ class SingleSelectFieldType(SelectOptionBaseFieldType):
def get_serializer_field(self, instance, **kwargs):
required = kwargs.get("required", False)
field_serializer = serializers.PrimaryKeyRelatedField(
field_serializer = serializers.IntegerField(
**{
"queryset": SelectOption.objects.filter(field=instance),
"required": required,
"allow_null": not required,
**kwargs,
@ -1765,6 +1798,19 @@ class SingleSelectFieldType(SelectOptionBaseFieldType):
# then the provided value is invalid and a validation error can be raised.
raise ValidationError(f"The provided value is not a valid option.")
def prepare_value_for_db_in_bulk(self, instance, values_by_row):
unique_values = {value for value in values_by_row.values()}
selected_ids = SelectOption.objects.filter(
field=instance, id__in=unique_values
).values_list("id", flat=True)
if len(selected_ids) != len(unique_values):
invalid_ids = sorted(list(unique_values - set(selected_ids)))
raise AllProvidedMultipleSelectValuesMustBeSelectOption(invalid_ids)
return values_by_row
def get_serializer_help_text(self, instance):
return (
"This field accepts an `integer` representing the chosen select option id "
@ -1981,9 +2027,8 @@ class MultipleSelectFieldType(SelectOptionBaseFieldType):
def get_serializer_field(self, instance, **kwargs):
required = kwargs.get("required", False)
field_serializer = serializers.PrimaryKeyRelatedField(
field_serializer = serializers.IntegerField(
**{
"queryset": SelectOption.objects.filter(field=instance),
"required": required,
"allow_null": not required,
**kwargs,
@ -2023,15 +2068,30 @@ class MultipleSelectFieldType(SelectOptionBaseFieldType):
options = SelectOption.objects.filter(field=instance, id__in=value)
if len(options) != len(value):
raise AllProvidedMultipleSelectValuesMustBeSelectOption
raise AllProvidedMultipleSelectValuesMustBeSelectOption(value)
return value
def prepare_value_for_db_in_bulk(self, instance, values_by_row):
unique_values = set()
for row_index, value in values_by_row.items():
unique_values.update(value)
selected_ids = SelectOption.objects.filter(
field=instance, id__in=unique_values
).values_list("id", flat=True)
if len(selected_ids) != len(unique_values):
invalid_ids = sorted(list(unique_values - set(selected_ids)))
raise AllProvidedMultipleSelectValuesMustBeSelectOption(invalid_ids)
return values_by_row
def get_serializer_help_text(self, instance):
return (
"This field accepts a list of `integer` each of which representing the"
"This field accepts a list of `integer` each of which representing the "
"chosen select option id related to the field. Available ids can be found"
"when getting or listing the field. The response represents chosen field,"
"when getting or listing the field. The response represents chosen field, "
"but also the value and color is exposed."
)

View file

@ -133,7 +133,9 @@ class Field(
from baserow.contrib.database.fields.registries import field_type_registry
result = []
for field_dependency in self.dependants.select_related("dependant").all():
for field_dependency in (
self.dependants.select_related("dependant").order_by("id").all()
):
dependant_field = field_cache.lookup_specific(field_dependency.dependant)
if dependant_field is None:
# If somehow the dependant is trashed it will be None. We can't really

View file

@ -37,7 +37,7 @@ class FieldType(
"""
This abstract class represents a custom field type that can be added to the
field type registry. It must be extended so customisation can be done. Each field
type will have his own model that must extend the Field model, this is needed so
type will have its own model that must extend the Field model, this is needed so
that the user can set custom settings per field instance he has created.
Example:
@ -94,6 +94,23 @@ class FieldType(
return value
def prepare_value_for_db_in_bulk(self, instance, values_by_row):
"""
This method will work for every `prepare_value_for_db` that doesn't
execute a query. Fields that do should override this method.
:param instance: The field instance.
:type instance: Field
:param values_by_row: The values that needs to be inserted or updated,
indexed by row id as dict(index, values).
:return: The modified values in the same structure as it was passed in.
"""
for row_index, value in values_by_row.items():
values_by_row[row_index] = self.prepare_value_for_db(instance, value)
return values_by_row
def enhance_queryset(self, queryset, field, name):
"""
This hook can be used to enhance a queryset when fetching multiple rows of a

View file

@ -1,2 +1,16 @@
class RowDoesNotExist(Exception):
"""Raised when trying to get a row that doesn't exist."""
"""Raised when trying to get rows that don't exist."""
def __init__(self, ids, *args, **kwargs):
if not isinstance(ids, list):
ids = [ids]
self.ids = ids
super().__init__(*args, **kwargs)
class RowIdsNotUnique(Exception):
"""Raised when trying to update the same rows multiple times"""
def __init__(self, ids, *args, **kwargs):
self.ids = ids
super().__init__(*args, **kwargs)

View file

@ -1,4 +1,5 @@
import re
from collections import defaultdict
from decimal import Decimal
from math import floor, ceil
@ -7,17 +8,19 @@ from django.db.models import Max, F
from django.db.models.fields.related import ManyToManyField
from baserow.core.trash.handler import TrashHandler
from .exceptions import RowDoesNotExist
from .exceptions import RowDoesNotExist, RowIdsNotUnique
from .signals import (
before_row_update,
before_row_delete,
row_created,
row_updated,
rows_updated,
row_deleted,
)
from baserow.contrib.database.fields.dependencies.update_collector import (
CachingFieldUpdateCollector,
)
from baserow.core.utils import get_non_unique_values
class RowHandler:
@ -45,9 +48,58 @@ class RowHandler:
if field_id in values or field["name"] in values
}
def prepare_rows_in_bulk(self, fields, rows):
"""
Prepares a set of values in bulk for all rows so that they can be created or
updated in the database. It will check if the values can actually be set and
prepares them based on their field type.
:param fields: The returned fields object from the get_model method.
:type fields: dict
:param values: The rows and their values that need to be prepared.
:type values: dict
:return: The prepared values for all rows in the same structure as it was
passed in.
:rtype: dict
"""
field_ids = {}
prepared_values_by_field = defaultdict(dict)
# organize values by field name
for index, row in enumerate(rows):
for field_id, field in fields.items():
field_name = field["name"]
field_ids[field_name] = field_id
if field_name in row:
prepared_values_by_field[field_name][index] = row[field_name]
# bulk-prepare values per field
for field_name, batch_values in prepared_values_by_field.items():
field = fields[field_ids[field_name]]
field_type = field["type"]
prepared_values_by_field[
field_name
] = field_type.prepare_value_for_db_in_bulk(
field["field"],
batch_values,
)
# replace original values to keep ordering
prepared_rows = []
for index, row in enumerate(rows):
new_values = row
for field_id, field in fields.items():
field_name = field["name"]
if field_name in row:
new_values[field_name] = prepared_values_by_field[field_name][index]
prepared_rows.append(new_values)
return prepared_rows
def extract_field_ids_from_dict(self, values):
"""
Extracts the field ids from a dict containing the values that need to
Extracts the field ids from a dict containing the values that need to be
updated. For example keys like 'field_2', '3', 4 will be seen ass field ids.
:param values: The values where to extract the fields ids from.
@ -157,7 +209,7 @@ class RowHandler:
try:
row = model.objects.get(id=row_id)
except model.DoesNotExist:
raise RowDoesNotExist(f"The row with id {row_id} does not exist.")
raise RowDoesNotExist(row_id)
return row
@ -198,7 +250,7 @@ class RowHandler:
row_exists = model.objects.filter(id=row_id).exists()
if not row_exists and raise_error:
raise RowDoesNotExist(f"The row with id {row_id} does not exist.")
raise RowDoesNotExist(row_id)
else:
return row_exists
@ -379,7 +431,7 @@ class RowHandler:
model.objects.select_for_update().enhance_by_fields().get(id=row_id)
)
except model.DoesNotExist:
raise RowDoesNotExist(f"The row with id {row_id} does not exist.")
raise RowDoesNotExist(row_id)
updated_fields = []
updated_field_ids = set()
@ -445,6 +497,130 @@ class RowHandler:
return row
def update_rows(self, user, table, rows, model=None):
"""
Updates field values in batch based on provided rows with the new values.
:param user: The user of whose behalf the change is made.
:type user: User
:param table: The table for which the row must be updated.
:type table: Table
:param rows: The list of rows with new values that should be set.
:type rows: list
:param model: If the correct model has already been generated it can be
provided so that it does not have to be generated for a second time.
:type model: Model
:raises RowIdsNotUnique: When trying to update the same row multiple times.
:raises RowDoesNotExist: When any of the rows don't exist.
:return: The updated row instances.
:rtype: list[Model]
"""
group = table.database.group
group.has_user(user, raise_error=True)
if not model:
model = table.get_model()
rows = self.prepare_rows_in_bulk(model._field_objects, rows)
row_ids = [row["id"] for row in rows]
non_unique_ids = get_non_unique_values(row_ids)
if len(non_unique_ids) > 0:
raise RowIdsNotUnique(non_unique_ids)
rows_by_id = {}
for row in rows:
row_id = row.pop("id")
rows_by_id[row_id] = row
rows_to_update = model.objects.select_for_update().filter(id__in=row_ids)
if len(rows_to_update) != len(rows):
db_rows_ids = [db_row.id for db_row in rows_to_update]
raise RowDoesNotExist(sorted(list(set(row_ids) - set(db_rows_ids))))
updated_field_ids = set()
for obj in rows_to_update:
row_values = rows_by_id[obj.id]
for field_id, field in model._field_objects.items():
if field_id in row_values or field["name"] in row_values:
updated_field_ids.add(field_id)
before_return = before_row_update.send(
self,
row=list(rows_to_update),
user=user,
table=table,
model=model,
updated_field_ids=updated_field_ids,
)
for obj in rows_to_update:
row_values = rows_by_id[obj.id]
values, manytomany_values = self.extract_manytomany_values(
row_values, model
)
for name, value in values.items():
setattr(obj, name, value)
for name, value in manytomany_values.items():
getattr(obj, name).set(value)
fields_with_pre_save = model.fields_requiring_refresh_after_update()
for field_name in fields_with_pre_save:
setattr(
obj,
field_name,
model._meta.get_field(field_name).pre_save(obj, add=False),
)
# For now all fields that don't represent a relationship will be used in
# the bulk_update() call. This could be optimized in the future if we can
# select just fields that need to be updated (fields that are passed in +
# read only fields that need updating too)
bulk_update_fields = [
field["name"]
for field in model._field_objects.values()
if not isinstance(model._meta.get_field(field["name"]), ManyToManyField)
]
if len(bulk_update_fields) > 0:
model.objects.bulk_update(rows_to_update, bulk_update_fields)
updated_fields = [field["field"] for field in model._field_objects.values()]
update_collector = CachingFieldUpdateCollector(
table, starting_row_id=row_ids, existing_model=model
)
for field in updated_fields:
for (
dependant_field,
dependant_field_type,
path_to_starting_table,
) in field.dependant_fields_with_types(update_collector):
dependant_field_type.row_of_dependency_updated(
dependant_field,
rows_to_update[0],
update_collector,
path_to_starting_table,
)
update_collector.apply_updates_and_get_updated_fields()
rows_to_return = list(
model.objects.all().enhance_by_fields().filter(id__in=row_ids)
)
rows_updated.send(
self,
rows=rows_to_return,
user=user,
table=table,
model=model,
before_return=before_return,
updated_field_ids=updated_field_ids,
)
return rows_to_return
def move_row(self, user, table, row_id, before=None, model=None):
"""
Moves the row related to the row_id before another row or to the end if no
@ -474,7 +650,7 @@ class RowHandler:
try:
row = model.objects.select_for_update().get(id=row_id)
except model.DoesNotExist:
raise RowDoesNotExist(f"The row with id {row_id} does not exist.")
raise RowDoesNotExist(row_id)
before_return = before_row_update.send(
self, row=row, user=user, table=table, model=model, updated_field_ids=[]
@ -540,7 +716,7 @@ class RowHandler:
try:
row = model.objects.get(id=row_id)
except model.DoesNotExist:
raise RowDoesNotExist(f"The row with id {row_id} does not exist.")
raise RowDoesNotExist(row_id)
before_return = before_row_delete.send(
self, row=row, user=user, table=table, model=model

View file

@ -8,4 +8,5 @@ before_row_delete = Signal()
row_created = Signal()
row_updated = Signal()
rows_updated = Signal()
row_deleted = Signal()

View file

@ -311,7 +311,7 @@ class GeneratedTableModel(models.Model):
"""
Mixed into Model classes which have been generated by Baserow.
Can also be used to identify instances of generated baserow models
like `instance(possible_baserow_model, GeneratedTableModel)`.
like `isinstance(possible_baserow_model, GeneratedTableModel)`.
"""
@classmethod

View file

@ -118,6 +118,10 @@ def public_row_deleted(
def public_before_row_update(
sender, row, user, table, model, updated_field_ids, **kwargs
):
# TODO: Batch row updates are not yet supported for public grid.
# For now, this signal call will be ignored.
if isinstance(row, list):
return
# Generate a serialized version of the row before it is updated. The
# `row_updated` receiver needs this serialized version because it can't serialize
# the old row after it has been updated.

View file

@ -1,4 +1,4 @@
from typing import Dict, Any, Optional
from typing import Dict, Any, Optional, List
from django.db import transaction
from django.dispatch import receiver
@ -39,7 +39,9 @@ def before_row_update(sender, row, user, table, model, updated_field_ids, **kwar
# Generate a serialized version of the row before it is updated. The
# `row_updated` receiver needs this serialized version because it can't serialize
# the old row after it has been updated.
return get_row_serializer_class(model, RowSerializer, is_response=True)(row).data
return get_row_serializer_class(model, RowSerializer, is_response=True)(
row, many=isinstance(row, list)
).data
@receiver(row_signals.row_updated)
@ -65,6 +67,29 @@ def row_updated(
)
@receiver(row_signals.rows_updated)
def rows_updated(
sender, rows, user, table, model, before_return, updated_field_ids, **kwargs
):
table_page_type = page_registry.get("table")
transaction.on_commit(
lambda: table_page_type.broadcast(
RealtimeRowMessages.rows_updated(
table_id=table.id,
serialized_rows_before_update=dict(before_return)[before_row_update],
serialized_rows=get_row_serializer_class(
model, RowSerializer, is_response=True
)(rows, many=True).data,
metadata=row_metadata_registry.generate_and_merge_metadata_for_rows(
table, [row.id for row in rows]
),
),
getattr(user, "web_socket_id", None),
table_id=table.id,
)
)
@receiver(row_signals.before_row_delete)
def before_row_delete(sender, row, user, table, model, **kwargs):
# Generate a serialized version of the row before it is deleted. The
@ -137,3 +162,21 @@ class RealtimeRowMessages:
"row": serialized_row,
"metadata": metadata,
}
@staticmethod
def rows_updated(
table_id: int,
serialized_rows_before_update: List[Dict[str, Any]],
serialized_rows: List[Dict[str, Any]],
metadata: Dict[int, Dict[str, Any]],
) -> Dict[str, Any]:
return {
"type": "rows_updated",
"table_id": table_id,
# The web-frontend expects a serialized version of the rows before it
# was updated in order to estimate what position the row had in the
# view.
"rows_before_update": serialized_rows_before_update,
"rows": serialized_rows,
"metadata": metadata,
}

View file

@ -2,7 +2,7 @@ from django.conf import settings
from .table.signals import table_created, table_updated, table_deleted
from .views.signals import view_created, views_reordered, view_updated, view_deleted
from .rows.signals import row_created, row_updated, row_deleted
from .rows.signals import row_created, row_updated, rows_updated, row_deleted
from .fields.signals import field_created, field_updated, field_deleted
if settings.DISABLE_ANONYMOUS_PUBLIC_VIEW_WS_CONNECTIONS:
@ -57,6 +57,7 @@ __all__ = [
"view_deleted",
"row_created",
"row_updated",
"rows_updated",
"row_deleted",
"field_created",
"field_updated",

View file

@ -35,8 +35,10 @@ class InvalidUserFileNameError(Exception):
class UserFileDoesNotExist(Exception):
"""Raised when a user file with the provided name or id does not exist."""
def __init__(self, name_or_id, *args, **kwargs):
self.name_or_id = name_or_id
def __init__(self, file_names_or_ids, *args, **kwargs):
if not isinstance(file_names_or_ids, list):
file_names_or_ids = [file_names_or_ids]
self.file_names_or_ids = file_names_or_ids
super().__init__(*args, **kwargs)

View file

@ -86,6 +86,19 @@ def set_allowed_attrs(values, allowed_fields, instance):
return instance
def get_non_unique_values(values: List) -> List:
"""
Assembles all values that are not unique in the provided list
"""
unique_values = set()
non_unique_values = set()
for value in values:
if value in unique_values:
non_unique_values.add(value)
unique_values.add(value)
return list(non_unique_values)
def to_pascal_case(value):
"""
Converts the value string to PascalCase.

View file

@ -21,6 +21,22 @@ def _parse_date(date):
return parse_date(date)
def is_dict_subset(subset: dict, superset: dict) -> bool:
if isinstance(subset, dict):
return all(
key in superset and is_dict_subset(val, superset[key])
for key, val in subset.items()
)
if isinstance(subset, list) or isinstance(subset, set):
return all(
any(is_dict_subset(subitem, superitem) for superitem in superset)
for subitem in subset
)
return subset == superset
def setup_interesting_test_table(data_fixture, user_kwargs=None):
"""
Constructs a testing table with every field type, their sub types and any other

View file

@ -476,7 +476,9 @@ def test_file_field_type(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_USER_FILE_DOES_NOT_EXIST"
assert response_json["detail"] == "The user file not_existing.jpg does not exist."
assert (
response_json["detail"] == "The user files ['not_existing.jpg'] do not exist."
)
response = api_client.post(
reverse("api:database:rows:list", kwargs={"table_id": table.id}),
@ -1236,9 +1238,10 @@ def test_multiple_select_field_type(api_client, data_fixture):
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response_json["error"] == "ERROR_INVALID_SELECT_OPTION_VALUES"
assert (
response_json["detail"][f"field_{field_1_id}"][0][0]["code"] == "does_not_exist"
response_json["detail"]
== "The provided select option ids [999999] are not valid select options."
)
response = api_client.post(

View file

@ -0,0 +1,201 @@
import pytest
from django.shortcuts import reverse
from rest_framework.status import (
HTTP_200_OK,
HTTP_400_BAD_REQUEST,
)
from baserow.test_utils.helpers import is_dict_subset
@pytest.mark.django_db
@pytest.mark.field_file
@pytest.mark.api_rows
def test_batch_update_rows_file_field(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
file_field = data_fixture.create_file_field(table=table)
file1 = data_fixture.create_user_file(
original_name="test.txt",
is_image=True,
)
file2 = data_fixture.create_user_file(
original_name="test2.txt",
is_image=True,
)
file3 = data_fixture.create_user_file(
original_name="test3.txt",
is_image=True,
)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
row_3 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{file_field.id}": [
{"name": file3.name, "visible_name": "new name"}
],
},
{
f"id": row_2.id,
f"field_{file_field.id}": [
{"name": file3.name, "visible_name": "new name"},
{"name": file2.name, "visible_name": "new name"},
],
},
{
f"id": row_3.id,
f"field_{file_field.id}": [],
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"field_{file_field.id}": [{"name": file3.name, "is_image": True}],
"order": "1.00000000000000000000",
},
{
f"id": row_2.id,
f"field_{file_field.id}": [
{
"name": file2.name,
"is_image": True,
},
{
"name": file3.name,
"is_image": True,
},
],
"order": "1.00000000000000000000",
},
{
f"id": row_3.id,
f"field_{file_field.id}": [],
"order": "1.00000000000000000000",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert is_dict_subset(expected_response_body, response.json())
row_1.refresh_from_db()
row_2.refresh_from_db()
row_3.refresh_from_db()
assert len(getattr(row_1, f"field_{file_field.id}")) == 1
assert len(getattr(row_2, f"field_{file_field.id}")) == 2
assert len(getattr(row_3, f"field_{file_field.id}")) == 0
@pytest.mark.django_db
@pytest.mark.field_file
@pytest.mark.api_rows
def test_batch_update_rows_file_field_wrong_file(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
file_field = data_fixture.create_file_field(table=table)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
row_3 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
invalid_file_names = [
(
"EJzuFBNeEp58rcVg1T48bF58kl01w2pn_EIdGnULvJESuG09x4Z"
"BScablA51hrUP4jPohXi6RL7A0yhgEdgO448gGSVi7502E.txt"
),
(
"XJzuFBNeEp58rcVg1T48bF58kl01w2pn_EIdGnULvJESuG09x4Z"
"BScablA51hrUP4jPohXi6RL7A0yhgEdgO448gGSVi7503E.txt"
),
(
"YJzuFBNeEp58rcVg1T48bF58kl01w2pn_EIdGnULvJESuG09x4Z"
"BScablA51hrUP4jPohXi6RL7A0yhgEdgO448gGSVi7503E.txt"
),
]
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{file_field.id}": [
{"name": invalid_file_names[0], "visible_name": "new name"}
],
},
{
f"id": row_2.id,
f"field_{file_field.id}": [
{"name": invalid_file_names[1], "visible_name": "new name"},
{"name": invalid_file_names[2], "visible_name": "new name"},
],
},
{
f"id": row_3.id,
f"field_{file_field.id}": [],
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_USER_FILE_DOES_NOT_EXIST"
assert response.json()["detail"] == (
f"The user files ['{invalid_file_names[0]}', '{invalid_file_names[1]}',"
f" '{invalid_file_names[2]}'] do not exist."
)
@pytest.mark.django_db
@pytest.mark.field_file
@pytest.mark.api_rows
def test_batch_update_rows_file_field_zero_files(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
file_field = data_fixture.create_file_field(table=table)
model = table.get_model()
row_1 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{file_field.id}": [],
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"field_{file_field.id}": [],
"order": "1.00000000000000000000",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert is_dict_subset(expected_response_body, response.json())
assert len(getattr(row_1, f"field_{file_field.id}")) == 0

View file

@ -0,0 +1,85 @@
import pytest
from django.shortcuts import reverse
from rest_framework.status import (
HTTP_200_OK,
)
from baserow.contrib.database.fields.handler import FieldHandler
@pytest.mark.django_db
@pytest.mark.field_link_row
@pytest.mark.api_rows
def test_batch_update_rows_link_row_field(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
linked_table = data_fixture.create_database_table(
user=user, database=table.database
)
linked_field = data_fixture.create_text_field(
primary=True,
name="Primary",
table=linked_table,
)
linked_model = linked_table.get_model()
linked_row_1 = linked_model.objects.create(**{f"field_{linked_field.id}": "Row 1"})
linked_row_2 = linked_model.objects.create(**{f"field_{linked_field.id}": "Row 2"})
linked_row_3 = linked_model.objects.create(**{f"field_{linked_field.id}": "Row 3"})
link_field = FieldHandler().create_field(
user, table, "link_row", link_row_table=linked_table, name="Link"
)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
row_3 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{link_field.id}": [linked_row_3.id],
},
{
f"id": row_2.id,
f"field_{link_field.id}": [linked_row_3.id, linked_row_2.id],
},
{
f"id": row_3.id,
f"field_{link_field.id}": [],
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"field_{link_field.id}": [{"id": linked_row_3.id, "value": "Row 3"}],
"order": "1.00000000000000000000",
},
{
f"id": row_2.id,
f"field_{link_field.id}": [
{"id": linked_row_2.id, "value": "Row 2"},
{"id": linked_row_3.id, "value": "Row 3"},
],
"order": "1.00000000000000000000",
},
{
f"id": row_3.id,
f"field_{link_field.id}": [],
"order": "1.00000000000000000000",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == expected_response_body
assert getattr(row_1, f"field_{link_field.id}").count() == 1
assert getattr(row_2, f"field_{link_field.id}").count() == 2
assert getattr(row_3, f"field_{link_field.id}").count() == 0

View file

@ -0,0 +1,142 @@
import pytest
from django.shortcuts import reverse
from rest_framework.status import (
HTTP_200_OK,
HTTP_400_BAD_REQUEST,
)
from baserow.contrib.database.fields.models import SelectOption
@pytest.mark.django_db
@pytest.mark.field_multiple_select
@pytest.mark.api_rows
def test_batch_update_rows_multiple_select_field(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
multiple_select_field = data_fixture.create_multiple_select_field(table=table)
select_option_1 = SelectOption.objects.create(
field=multiple_select_field,
order=1,
value="Option 1",
color="blue",
)
select_option_2 = SelectOption.objects.create(
field=multiple_select_field,
order=1,
value="Option 2",
color="blue",
)
select_option_3 = SelectOption.objects.create(
field=multiple_select_field,
order=1,
value="Option 3",
color="blue",
)
multiple_select_field.select_options.set([select_option_1, select_option_2])
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
row_3 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{multiple_select_field.id}": [select_option_3.id],
},
{
f"id": row_2.id,
f"field_{multiple_select_field.id}": [
select_option_3.id,
select_option_2.id,
],
},
{
f"id": row_3.id,
f"field_{multiple_select_field.id}": [],
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"field_{multiple_select_field.id}": [
{"id": select_option_3.id, "color": "blue", "value": "Option 3"}
],
"order": "1.00000000000000000000",
},
{
f"id": row_2.id,
f"field_{multiple_select_field.id}": [
{"id": select_option_2.id, "color": "blue", "value": "Option 2"},
{"id": select_option_3.id, "color": "blue", "value": "Option 3"},
],
"order": "1.00000000000000000000",
},
{
f"id": row_3.id,
f"field_{multiple_select_field.id}": [],
"order": "1.00000000000000000000",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == expected_response_body
assert getattr(row_1, f"field_{multiple_select_field.id}").count() == 1
assert getattr(row_2, f"field_{multiple_select_field.id}").count() == 2
assert getattr(row_3, f"field_{multiple_select_field.id}").count() == 0
@pytest.mark.django_db
@pytest.mark.field_multiple_select
@pytest.mark.api_rows
def test_batch_update_rows_multiple_select_field_wrong_option(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
multiple_select_field = data_fixture.create_multiple_select_field(table=table)
select_option_1 = SelectOption.objects.create(
field=multiple_select_field,
order=1,
value="Option 1",
color="blue",
)
multiple_select_field.select_options.set([select_option_1])
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{multiple_select_field.id}": [787],
},
{
f"id": row_2.id,
f"field_{multiple_select_field.id}": [789, select_option_1.id],
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_INVALID_SELECT_OPTION_VALUES"
assert (
response.json()["detail"]
== "The provided select option ids [787, 789] are not valid select options."
)

View file

@ -0,0 +1,52 @@
import pytest
from django.shortcuts import reverse
from rest_framework.status import (
HTTP_400_BAD_REQUEST,
)
from baserow.contrib.database.fields.models import SelectOption
@pytest.mark.django_db
@pytest.mark.field_single_select
@pytest.mark.api_rows
def test_batch_update_rows_single_select_field_wrong_option(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
single_select_field = data_fixture.create_single_select_field(table=table)
select_option_1 = SelectOption.objects.create(
field=single_select_field,
order=1,
value="Option 1",
color="blue",
)
single_select_field.select_options.set([select_option_1])
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{single_select_field.id}": 787,
},
{
f"id": row_2.id,
f"field_{single_select_field.id}": select_option_1.id,
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_INVALID_SELECT_OPTION_VALUES"
assert (
response.json()["detail"]
== "The provided select option ids [787] are not valid select options."
)

View file

@ -0,0 +1,611 @@
import pytest
from django.shortcuts import reverse
from rest_framework.status import (
HTTP_200_OK,
HTTP_400_BAD_REQUEST,
HTTP_401_UNAUTHORIZED,
HTTP_404_NOT_FOUND,
)
from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler
from baserow.contrib.database.fields.field_cache import FieldCache
from baserow.contrib.database.tokens.handler import TokenHandler
from baserow.test_utils.helpers import is_dict_subset
from django.conf import settings
@pytest.mark.django_db
@pytest.mark.api_rows
@pytest.mark.parametrize("token_header", ["JWT invalid", "Token invalid"])
def test_batch_update_rows_invalid_token(api_client, data_fixture, token_header):
table = data_fixture.create_database_table()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
response = api_client.patch(
url,
{},
format="json",
HTTP_AUTHORIZATION=token_header,
)
assert response.status_code == HTTP_401_UNAUTHORIZED
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_token_no_update_permission(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
no_update_perm_token = TokenHandler().create_token(
user, table.database.group, "no permissions"
)
TokenHandler().update_token_permissions(
user, no_update_perm_token, True, True, False, True
)
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
response = api_client.patch(
url,
{},
format="json",
HTTP_AUTHORIZATION=f"Token {no_update_perm_token.key}",
)
assert response.status_code == HTTP_401_UNAUTHORIZED
assert response.json()["error"] == "ERROR_NO_PERMISSION_TO_TABLE"
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_user_not_in_group(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table()
request_body = {
"items": [
{
f"id": 1,
f"field_11": "green",
},
]
}
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_invalid_table_id(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
url = reverse("api:database:rows:batch", kwargs={"table_id": 14343})
response = api_client.patch(
url,
{},
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_TABLE_DOES_NOT_EXIST"
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_notexisting_row_ids(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
invalid_row_ids = [32, 3465]
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {"items": [{"id": id} for id in invalid_row_ids]}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_ROW_DOES_NOT_EXIST"
assert response.json()["detail"] == f"The rows {str(invalid_row_ids)} do not exist."
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_batch_size_limit(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
num_rows = settings.BATCH_ROWS_SIZE_LIMIT + 1
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {"items": [{"id": i} for i in range(num_rows)]}
expected_error_detail = {
"items": [
{
"code": "max_length",
"error": f"Ensure this field has no more than {settings.BATCH_ROWS_SIZE_LIMIT} elements.",
},
],
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response.json()["detail"] == expected_error_detail
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_no_payload(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response.json()["detail"]["items"][0]["error"] == "This field is required."
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_field_validation(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
number_field = data_fixture.create_number_field(
table=table, order=1, name="Horsepower"
)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{number_field.id}": 120,
},
{
f"id": row_2.id,
f"field_{number_field.id}": -200,
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert (
response.json()["detail"]["items"]["1"][f"field_{number_field.id}"][0]["code"]
== "min_value"
)
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_missing_row_ids(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
number_field = data_fixture.create_number_field(
table=table, order=1, name="Horsepower"
)
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {"items": [{f"field_{number_field.id}": 123}]}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert (
response.json()["detail"]["items"]["0"]["id"][0]["error"]
== "This field is required."
)
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_repeated_row_ids(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
repeated_row_id = 32
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {"items": [{"id": repeated_row_id} for i in range(2)]}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_ROW_IDS_NOT_UNIQUE"
assert (
response.json()["detail"]
== f"The provided row ids {str([repeated_row_id])} are not unique."
)
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(
table=table, order=0, name="Color", text_default="white"
)
number_field = data_fixture.create_number_field(
table=table, order=1, name="Horsepower"
)
boolean_field = data_fixture.create_boolean_field(
table=table, order=2, name="For sale"
)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{text_field.id}": "green",
f"field_{number_field.id}": 120,
f"field_{boolean_field.id}": True,
},
{
f"id": row_2.id,
f"field_{text_field.id}": "yellow",
f"field_{number_field.id}": 240,
f"field_{boolean_field.id}": False,
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"field_{text_field.id}": "green",
f"field_{number_field.id}": "120",
f"field_{boolean_field.id}": True,
"order": "1.00000000000000000000",
},
{
f"id": row_2.id,
f"field_{text_field.id}": "yellow",
f"field_{number_field.id}": "240",
f"field_{boolean_field.id}": False,
"order": "1.00000000000000000000",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == expected_response_body
row_1.refresh_from_db()
row_2.refresh_from_db()
assert getattr(row_1, f"field_{text_field.id}") == "green"
assert getattr(row_2, f"field_{text_field.id}") == "yellow"
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_different_fields_provided(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(
table=table, order=0, name="Color", text_default="white"
)
number_field = data_fixture.create_number_field(
table=table, order=1, name="Horsepower"
)
boolean_field = data_fixture.create_boolean_field(
table=table, order=2, name="For sale"
)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{number_field.id}": 120,
},
{
f"id": row_2.id,
f"field_{text_field.id}": "yellow",
f"field_{boolean_field.id}": True,
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"field_{text_field.id}": "white",
f"field_{number_field.id}": "120",
f"field_{boolean_field.id}": False,
"order": "1.00000000000000000000",
},
{
f"id": row_2.id,
f"field_{text_field.id}": "yellow",
f"field_{number_field.id}": None,
f"field_{boolean_field.id}": True,
"order": "1.00000000000000000000",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == expected_response_body
row_1.refresh_from_db()
row_2.refresh_from_db()
assert getattr(row_1, f"field_{text_field.id}") == "white"
assert getattr(row_2, f"field_{text_field.id}") == "yellow"
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_user_field_names(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field_name = "Color"
number_field_name = "Horsepower"
boolean_field_name = "For sale"
text_field = data_fixture.create_text_field(
table=table, order=0, name=text_field_name, text_default="white"
)
number_field = data_fixture.create_number_field(
table=table, order=1, name=number_field_name
)
boolean_field = data_fixture.create_boolean_field(
table=table, order=2, name=boolean_field_name
)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = (
reverse("api:database:rows:batch", kwargs={"table_id": table.id})
+ "?user_field_names"
)
request_body = {
"items": [
{
f"id": row_1.id,
f"{number_field_name}": 120,
},
{
f"id": row_2.id,
f"{text_field_name}": "yellow",
f"{boolean_field_name}": True,
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"{text_field_name}": "white",
f"{number_field_name}": "120",
f"{boolean_field_name}": False,
"order": "1.00000000000000000000",
},
{
f"id": row_2.id,
f"{text_field_name}": "yellow",
f"{number_field_name}": None,
f"{boolean_field_name}": True,
"order": "1.00000000000000000000",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == expected_response_body
row_1.refresh_from_db()
row_2.refresh_from_db()
assert getattr(row_1, f"field_{text_field.id}") == "white"
assert getattr(row_2, f"field_{text_field.id}") == "yellow"
@pytest.mark.django_db
@pytest.mark.api_rows
def test_batch_update_rows_readonly_fields(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
created_on_field = data_fixture.create_created_on_field(table=table)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{created_on_field.id}": "2019-08-24T14:15:22Z",
},
{
f"id": row_2.id,
f"field_{created_on_field.id}": "2019-08-24T14:15:22Z",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert (
response.json()["detail"]
== "Field of type created_on is read only and should not be set manually."
)
@pytest.mark.django_db
@pytest.mark.field_formula
@pytest.mark.api_rows
def test_batch_update_rows_dependent_fields(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
number_field = data_fixture.create_number_field(table=table, order=1, name="Number")
formula_field = data_fixture.create_formula_field(
table=table,
order=2,
name="Number times two",
formula="field('Number')*2",
formula_type="number",
)
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
url = reverse("api:database:rows:batch", kwargs={"table_id": table.id})
request_body = {
"items": [
{
f"id": row_1.id,
f"field_{number_field.id}": 120,
},
{
f"id": row_2.id,
f"field_{number_field.id}": 240,
},
]
}
expected_response_body = {
"items": [
{
f"id": row_1.id,
f"field_{formula_field.id}": f"{str(120*2)}",
},
{
f"id": row_2.id,
f"field_{formula_field.id}": f"{str(240*2)}",
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert is_dict_subset(expected_response_body, response.json())
@pytest.mark.django_db
@pytest.mark.field_formula
@pytest.mark.api_rows
def test_batch_update_rows_dependent_fields_diff_table(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()
table, table_b, link_field = data_fixture.create_two_linked_tables(user=user)
number_field = data_fixture.create_number_field(
table=table_b, order=1, name="Number"
)
formula_field = data_fixture.create_formula_field(
table=table,
order=2,
name="Number times two",
formula=f"lookup('{link_field.name}', '{number_field.name}')*2",
formula_type="number",
)
FieldDependencyHandler.rebuild_dependencies(formula_field, FieldCache())
model_b = table_b.get_model()
row_b_1 = model_b.objects.create()
row_b_2 = model_b.objects.create()
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
getattr(row_1, f"field_{link_field.id}").set([row_b_1.id, row_b_2.id])
getattr(row_2, f"field_{link_field.id}").set([row_b_2.id])
row_1.save()
row_2.save()
url = reverse("api:database:rows:batch", kwargs={"table_id": table_b.id})
request_body = {
"items": [
{
f"id": row_b_1.id,
f"field_{number_field.id}": 120,
},
{
f"id": row_b_2.id,
f"field_{number_field.id}": 240,
},
]
}
response = api_client.patch(
url,
request_body,
format="json",
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
row_1.refresh_from_db()
row_2.refresh_from_db()
assert getattr(row_1, f"field_{formula_field.id}")[0]["value"] == 120 * 2
assert getattr(row_1, f"field_{formula_field.id}")[1]["value"] == 240 * 2
assert getattr(row_2, f"field_{formula_field.id}")[0]["value"] == 240 * 2

View file

@ -183,17 +183,20 @@ def test_get_table_serializer(data_fixture):
@pytest.mark.django_db
def test_get_example_row_serializer_class():
request_serializer = get_example_row_serializer_class()
response_serializer = get_example_row_serializer_class(add_id=True)
request_serializer = get_example_row_serializer_class(example_type="post")
response_serializer = get_example_row_serializer_class(example_type="get")
assert len(request_serializer._declared_fields) == (
len(field_type_registry.registry.values())
num_request_fields = len(request_serializer._declared_fields)
num_response_fields = len(response_serializer._declared_fields)
num_readonly_fields = len(
[ftype for ftype in field_type_registry.registry.values() if ftype.read_only]
)
assert len(response_serializer._declared_fields) == (
len(request_serializer._declared_fields) + 2 # fields + id + order
)
assert len(response_serializer._declared_fields) == (
len(field_type_registry.registry.values()) + 2 # fields + id + order
num_extra_response_fields = 2 # id + order
num_difference = num_readonly_fields + num_extra_response_fields
assert num_request_fields == num_response_fields - num_difference
assert num_response_fields == (
len(field_type_registry.registry.values()) + num_extra_response_fields
)
assert isinstance(

View file

@ -444,7 +444,7 @@ def test_single_select_field_type_api_row_views(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response_json["detail"][f"field_{field.id}"][0]["code"] == "incorrect_type"
assert response_json["detail"][f"field_{field.id}"][0]["code"] == "invalid"
response = api_client.post(
reverse("api:database:rows:list", kwargs={"table_id": table.id}),
@ -455,7 +455,7 @@ def test_single_select_field_type_api_row_views(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response_json["detail"][f"field_{field.id}"][0]["code"] == "does_not_exist"
assert response_json["detail"] == "The provided value is not a valid option."
response = api_client.post(
reverse("api:database:rows:list", kwargs={"table_id": table.id}),
@ -466,7 +466,7 @@ def test_single_select_field_type_api_row_views(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response_json["detail"][f"field_{field.id}"][0]["code"] == "does_not_exist"
assert response_json["detail"] == "The provided value is not a valid option."
response = api_client.post(
reverse("api:database:rows:list", kwargs={"table_id": table.id}),

View file

@ -145,6 +145,7 @@ are subscribed to the page.
* `field_restored`
* `row_created`
* `row_updated`
* `rows_updated`
* `row_deleted`
* `before_row_update`
* `before_row_delete`

View file

@ -20,7 +20,7 @@ class KanbanViewExampleResponseStackSerializer(serializers.Serializer):
results = serializers.ListSerializer(
help_text="All the rows that belong in this group related with the provided "
"`limit` and `offset`.",
child=get_example_row_serializer_class(True, False)(),
child=get_example_row_serializer_class(example_type="get")(),
)

View file

@ -183,6 +183,32 @@ export const registerRealtimeEvents = (realtime) => {
store.dispatch('rowModal/updated', { values: data.row })
})
realtime.registerEvent('rows_updated', async (context, data) => {
// TODO: Rewrite
// This is currently a naive implementation of batch rows updates.
const { app, store } = context
for (const viewType of Object.values(app.$registry.getAll('view'))) {
for (let i = 0; i < data.rows.length; i++) {
const row = data.rows[i]
const rowBeforeUpdate = data.rows_before_update[i]
await viewType.rowUpdated(
context,
data.table_id,
store.getters['field/getAll'],
store.getters['field/getPrimary'],
rowBeforeUpdate,
row,
data.metadata,
'page/'
)
}
}
for (let i = 0; i < data.rows.length; i++) {
store.dispatch('rowModal/updated', { values: data.rows[i] })
}
})
realtime.registerEvent('row_deleted', (context, data) => {
const { app, store } = context
for (const viewType of Object.values(app.$registry.getAll('view'))) {