1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-18 03:13:47 +00:00

Fix problems with duplicate/snapshots by introducing more robust locking mechansim and ensuring endpoints dont get blocked by it

This commit is contained in:
Nigel Gott 2022-07-25 08:41:05 +00:00
parent a453b9cf6e
commit c5cc1d3ec5
44 changed files with 770 additions and 125 deletions

View file

@ -82,6 +82,7 @@ DATABASE_NAME=baserow
# BASEROW_GROUP_STORAGE_USAGE_QUEUE=
# BASEROW_COUNT_ROWS_ENABLED=
# DISABLE_ANONYMOUS_PUBLIC_VIEW_WS_CONNECTIONS=
# BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR=
# BASEROW_FULL_HEALTHCHECKS=
# BASEROW_DISABLE_MODEL_CACHE=
# BASEROW_JOB_SOFT_TIME_LIMIT=

View file

@ -1,5 +1,7 @@
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
from rest_framework.status import (
HTTP_400_BAD_REQUEST,
HTTP_404_NOT_FOUND,
)
ERROR_GROUP_DOES_NOT_EXIST = (
"ERROR_GROUP_DOES_NOT_EXIST",

View file

@ -1,4 +1,4 @@
from rest_framework.status import HTTP_400_BAD_REQUEST
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_409_CONFLICT
from django.conf import settings
# None of these are passwords
@ -14,3 +14,10 @@ ERROR_CLIENT_SESSION_ID_HEADER_NOT_SET = (
f"The {settings.CLIENT_SESSION_ID_HEADER} must be set when using this endpoint.",
)
ERROR_DISABLED_RESET_PASSWORD = "ERROR_DISABLED_RESET_PASSWORD" # nosec
ERROR_UNDO_REDO_LOCK_CONFLICT = (
"ERROR_UNDO_REDO_LOCK_CONFLICT",
HTTP_409_CONFLICT,
"An operation is running in the background or triggered by another user preventing "
"your undo/redo action. Please wait until the other operation finishes.",
)

View file

@ -34,6 +34,7 @@ from baserow.core.exceptions import (
BaseURLHostnameNotAllowed,
GroupInvitationEmailMismatch,
GroupInvitationDoesNotExist,
LockConflict,
)
from baserow.core.models import GroupInvitation, Template
from baserow.core.user.exceptions import (
@ -55,6 +56,7 @@ from .errors import (
ERROR_DISABLED_SIGNUP,
ERROR_CLIENT_SESSION_ID_HEADER_NOT_SET,
ERROR_DISABLED_RESET_PASSWORD,
ERROR_UNDO_REDO_LOCK_CONFLICT,
)
from .exceptions import ClientSessionIdHeaderNotSetException
from .schemas import create_user_response_schema, authenticate_user_schema
@ -441,6 +443,12 @@ class DashboardView(APIView):
return Response(dashboard_serializer.data)
UNDO_REDO_EXCEPTIONS_MAP = {
ClientSessionIdHeaderNotSetException: ERROR_CLIENT_SESSION_ID_HEADER_NOT_SET,
LockConflict: ERROR_UNDO_REDO_LOCK_CONFLICT,
}
class UndoView(APIView):
permission_classes = (IsAuthenticated,)
@ -472,9 +480,7 @@ class UndoView(APIView):
responses={200: UndoRedoResponseSerializer},
)
@validate_body(UndoRedoRequestSerializer)
@map_exceptions(
{ClientSessionIdHeaderNotSetException: ERROR_CLIENT_SESSION_ID_HEADER_NOT_SET}
)
@map_exceptions(UNDO_REDO_EXCEPTIONS_MAP)
@transaction.atomic
def patch(self, request, data: List[ActionScopeStr]):
session_id = get_untrusted_client_session_id(request.user)
@ -516,9 +522,7 @@ class RedoView(APIView):
responses={200: UndoRedoResponseSerializer},
)
@validate_body(UndoRedoRequestSerializer)
@map_exceptions(
{ClientSessionIdHeaderNotSetException: ERROR_CLIENT_SESSION_ID_HEADER_NOT_SET}
)
@map_exceptions(UNDO_REDO_EXCEPTIONS_MAP)
@transaction.atomic
def patch(self, request, data: List[ActionScopeStr]):
session_id = get_untrusted_client_session_id(request.user)

View file

@ -685,6 +685,9 @@ BASEROW_SYNC_TEMPLATES_TIME_LIMIT = int(
APPEND_SLASH = False
BASEROW_DISABLE_MODEL_CACHE = bool(os.getenv("BASEROW_DISABLE_MODEL_CACHE", ""))
BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR = bool(
os.getenv("BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR", False)
)
# Indicates whether we are running the tests or not. Set to True in the test.py settings
# file used by pytest.ini

View file

@ -1,4 +1,8 @@
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
from rest_framework.status import (
HTTP_400_BAD_REQUEST,
HTTP_404_NOT_FOUND,
HTTP_409_CONFLICT,
)
from baserow.contrib.database.fields.constants import RESERVED_BASEROW_FIELD_NAMES
@ -101,3 +105,9 @@ ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES = (
HTTP_400_BAD_REQUEST,
"The requested field type is not compatible with generating unique values.",
)
ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT = (
"ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT",
HTTP_409_CONFLICT,
"The requested field is already being updated or used by another operation, "
"please try again after other concurrent operations have finished.",
)

View file

@ -1,5 +1,5 @@
from django.db import transaction
from django.conf import settings
from django.db import transaction
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
from drf_spectacular.utils import extend_schema
from rest_framework.decorators import permission_classes as method_permission_classes
@ -21,7 +21,6 @@ from baserow.api.schemas import (
from baserow.api.trash.errors import ERROR_CANNOT_DELETE_ALREADY_DELETED_ITEM
from baserow.api.utils import DiscriminatorCustomFieldsMappingSerializer
from baserow.api.utils import validate_data_custom_fields, type_from_data_or_registry
from baserow.core.db import specific_iterator
from baserow.contrib.database.api.fields.errors import (
ERROR_CANNOT_DELETE_PRIMARY_FIELD,
ERROR_CANNOT_CHANGE_FIELD_TYPE,
@ -33,10 +32,23 @@ from baserow.contrib.database.api.fields.errors import (
ERROR_FIELD_SELF_REFERENCE,
ERROR_FIELD_CIRCULAR_REFERENCE,
ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES,
ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT,
)
from baserow.contrib.database.api.tables.errors import (
ERROR_TABLE_DOES_NOT_EXIST,
ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT,
)
from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIST
from baserow.contrib.database.api.tokens.authentications import TokenAuthentication
from baserow.contrib.database.api.tokens.errors import ERROR_NO_PERMISSION_TO_TABLE
from baserow.contrib.database.fields.actions import (
UpdateFieldActionType,
CreateFieldActionType,
DeleteFieldActionType,
)
from baserow.contrib.database.fields.dependencies.exceptions import (
SelfReferenceFieldDependencyError,
CircularFieldDependencyError,
)
from baserow.contrib.database.fields.exceptions import (
CannotDeletePrimaryField,
CannotChangeFieldType,
@ -46,15 +58,20 @@ from baserow.contrib.database.fields.exceptions import (
FieldWithSameNameAlreadyExists,
InvalidBaserowFieldName,
IncompatibleFieldTypeForUniqueValues,
FailedToLockFieldDueToConflict,
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.table.exceptions import TableDoesNotExist
from baserow.contrib.database.table.exceptions import (
TableDoesNotExist,
FailedToLockTableDueToConflict,
)
from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.tokens.exceptions import NoPermissionToTable
from baserow.contrib.database.tokens.handler import TokenHandler
from baserow.core.action.registries import action_type_registry
from baserow.core.db import specific_iterator
from baserow.core.exceptions import UserNotInGroup
from baserow.core.trash.exceptions import CannotDeleteAlreadyDeletedItem
from .serializers import (
@ -66,15 +83,6 @@ from .serializers import (
UniqueRowValueParamsSerializer,
UniqueRowValuesSerializer,
)
from baserow.contrib.database.fields.dependencies.exceptions import (
SelfReferenceFieldDependencyError,
CircularFieldDependencyError,
)
from baserow.contrib.database.fields.actions import (
UpdateFieldActionType,
CreateFieldActionType,
DeleteFieldActionType,
)
class FieldsView(APIView):
@ -213,6 +221,7 @@ class FieldsView(APIView):
InvalidBaserowFieldName: ERROR_INVALID_BASEROW_FIELD_NAME,
SelfReferenceFieldDependencyError: ERROR_FIELD_SELF_REFERENCE,
CircularFieldDependencyError: ERROR_FIELD_CIRCULAR_REFERENCE,
FailedToLockTableDueToConflict: ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT,
}
)
def post(self, request, data, table_id):
@ -220,7 +229,9 @@ class FieldsView(APIView):
type_name = data.pop("type")
field_type = field_type_registry.get(type_name)
table = TableHandler().get_table(table_id)
table = TableHandler().get_table_for_update(
table_id, nowait=not settings.BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR
)
table.database.group.has_user(request.user, raise_error=True)
# field_create permission doesn't exists, so any call of this endpoint with a
@ -340,6 +351,7 @@ class FieldView(APIView):
InvalidBaserowFieldName: ERROR_INVALID_BASEROW_FIELD_NAME,
SelfReferenceFieldDependencyError: ERROR_FIELD_SELF_REFERENCE,
CircularFieldDependencyError: ERROR_FIELD_CIRCULAR_REFERENCE,
FailedToLockFieldDueToConflict: ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT,
}
)
def patch(self, request, field_id):

View file

@ -1,6 +1,10 @@
from django.conf import settings
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
from rest_framework.status import (
HTTP_400_BAD_REQUEST,
HTTP_404_NOT_FOUND,
HTTP_409_CONFLICT,
)
ERROR_TABLE_DOES_NOT_EXIST = (
"ERROR_TABLE_DOES_NOT_EXIST",
@ -42,3 +46,9 @@ ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES = (
"Field names must be unique in Baserow per table however the initial table data "
"provided contains duplicate field names, please make them unique and try again.",
)
ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT = (
"ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT",
HTTP_409_CONFLICT,
"The requested table is already being updated or used by another operation, "
"please try again after other concurrent operations have finished.",
)

View file

@ -1,3 +1,4 @@
from django.conf import settings
from django.db import transaction
from baserow.api.sessions import get_untrusted_client_session_id
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
@ -37,6 +38,7 @@ from baserow.contrib.database.table.exceptions import (
InitialTableDataLimitExceeded,
InitialSyncTableDataLimitExceeded,
InitialTableDataDuplicateName,
FailedToLockTableDueToConflict,
)
from baserow.contrib.database.table.actions import (
DeleteTableActionType,
@ -58,6 +60,7 @@ from .errors import (
ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED,
ERROR_INITIAL_SYNC_TABLE_DATA_LIMIT_EXCEEDED,
ERROR_INITIAL_TABLE_DATA_HAS_DUPLICATE_NAMES,
ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT,
)
from .serializers import (
TableSerializer,
@ -342,6 +345,7 @@ class TableView(APIView):
{
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
FailedToLockTableDueToConflict: ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT,
}
)
@validate_body(TableUpdateSerializer)
@ -350,7 +354,10 @@ class TableView(APIView):
table = action_type_registry.get_by_type(UpdateTableActionType).do(
request.user,
TableHandler().get_table_for_update(table_id),
TableHandler().get_table_for_update(
table_id,
nowait=not settings.BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR,
),
name=data["name"],
)
@ -388,13 +395,18 @@ class TableView(APIView):
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
CannotDeleteAlreadyDeletedItem: ERROR_CANNOT_DELETE_ALREADY_DELETED_ITEM,
FailedToLockTableDueToConflict: ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT,
}
)
def delete(self, request, table_id):
"""Deletes an existing table."""
action_type_registry.get_by_type(DeleteTableActionType).do(
request.user, TableHandler().get_table_for_update(table_id)
request.user,
TableHandler().get_table_for_update(
table_id,
nowait=not settings.BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR,
),
)
return Response(status=204)

View file

@ -1,10 +1,11 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from typing import Any, Dict, Optional
from zipfile import ZipFile
from django.core.management.color import no_style
from django.core.files.storage import Storage
from django.core.management.color import no_style
from django.db import connection
from django.db.transaction import Atomic
from django.urls import path, include
from django.utils import timezone
@ -16,7 +17,6 @@ from baserow.contrib.database.fields.dependencies.update_collector import (
FieldUpdateCollector,
)
from baserow.contrib.database.fields.field_cache import FieldCache
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.models import Database, Table
from baserow.contrib.database.views.registries import view_type_registry
@ -26,6 +26,7 @@ from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import ChildProgressBuilder
from baserow.core.utils import grouper
from .constants import IMPORT_SERIALIZED_IMPORTING, IMPORT_SERIALIZED_IMPORTING_TABLE
from .db.atomic import read_repeatable_single_database_atomic_transaction
from .export_serialized import DatabaseExportSerializedStructure
@ -56,19 +57,8 @@ class DatabaseApplicationType(ApplicationType):
path("database/", include(api_urls, namespace=self.type)),
]
def _lock_table_fields_for_share(self, table_ids: List[int]):
"""
Locks for share all the fields in the tables with the provided `table_ids`.
This function set locks FOR SHARE to prevent ALTER TABLE and other
DDL (non MVCC friendly) commands to change the table schema and break the
transaction isolation level, causing potential errors or data inconsistencies
during long read-only operations (e.g. export serialized).
"""
return Field.objects.raw(
"SELECT * FROM database_field WHERE table_id IN %s FOR SHARE", [table_ids]
)
def export_safe_transaction_context(self, application) -> Atomic:
return read_repeatable_single_database_atomic_transaction(application.id)
def export_serialized(
self,
@ -91,10 +81,6 @@ class DatabaseApplicationType(ApplicationType):
"view_set__viewsort_set",
)
table_ids = [table.id for table in tables]
if table_ids:
self._lock_table_fields_for_share(table_ids)
serialized_tables = []
for table in tables:
fields = table.field_set.all()

View file

@ -0,0 +1,95 @@
from django.db.transaction import Atomic
from psycopg2 import sql
from baserow.core.db import transaction_atomic, IsolationLevel
def read_repeatable_single_database_atomic_transaction(
database_id: int,
) -> Atomic:
"""
If you want to safely read the contents of a Baserow database inside of a single
transaction and be guaranteed to see a single snapshot of the metadata and user
data contained within the Baserow db tables then use this atomic transaction context
manager.
This manager does two things to ensure this:
1. It runs in the REPEATABLE READ postgres isolation level, meaning all queries
will see a snapshot of the database starting at the first SELECT etc statement
run instead the transaction.
2. This query runs that first transaction itself and intentionally locks all field
and table metadata rows in this first SELECT statement FOR SHARE. This means
once the transaction has obtained this lock it can proceed safely without
having to worry about fields being updated during the length of the transaction.
We need to lock these rows as otherwise Baserow's various endpoints can
execute ALTER TABLE and DROP TABLE statements which are not MVCC safe and will
break the snapshot obtained by REPEATABLE READ, see
https://www.postgresql.org/docs/current/mvcc-caveats.html for more info.
:param database_id: The database to obtain table and field locks for to ensure
safe reading.
:return: An atomic context manager.
"""
# It is critical we obtain the locks in the first SELECT statement run in the
# REPEATABLE READ transaction so we are given a snapshot that is guaranteed to never
# have harmful MVCC operations run on it.
first_statement = sql.SQL(
"""
SELECT * FROM database_field
INNER JOIN database_table ON database_field.table_id = database_table.id
WHERE database_table.database_id = %s FOR SHARE OF database_field, database_table
"""
)
first_statement_args = [database_id]
return transaction_atomic(
isolation_level=IsolationLevel.REPEATABLE_READ,
first_sql_to_run_in_transaction_with_args=(
first_statement,
first_statement_args,
),
)
def read_committed_single_table_transaction(
table_id: int,
) -> Atomic:
"""
If you want to safely read the contents of a Baserow table inside of a single
transaction and be guaranteed that the fields wont change during the transaction no
unsafe MVCC operations can occur during the transaction then use this context
manager.
This manager does one things to ensure this:
1. This query runs that first transaction itself and intentionally locks all field
and the table's metadata row in this first SELECT statement FOR SHARE. This
means once the transaction has obtained this lock it can proceed safely without
having to worry about fields being updated during the length of the transaction.
We need to lock these rows as otherwise Baserow's various endpoints can
execute ALTER TABLE and DROP TABLE statements which are not MVCC safe and can
cause
https://www.postgresql.org/docs/current/mvcc-caveats.html for more info.
This manager uses READ COMMITTED and as such has a lower overhead, but does not get
the snapshot like reading guarantees that REAPEATABLE READ does.
:param table_id: The table to obtain a table and field locks for to ensure
safe reading.
:return: An atomic context manager.
"""
first_statement = sql.SQL(
"""
SELECT * FROM database_field
INNER JOIN database_table ON database_field.table_id = database_table.id
WHERE database_table.id = %s FOR SHARE OF database_field, database_table
"""
)
first_statement_args = [table_id]
return transaction_atomic(
first_sql_to_run_in_transaction_with_args=(
first_statement,
first_statement_args,
),
)

View file

@ -3,6 +3,7 @@ from django.core.exceptions import ValidationError
from baserow.core.exceptions import (
InstanceTypeDoesNotExist,
InstanceTypeAlreadyRegistered,
LockConflict,
)
@ -177,3 +178,10 @@ class InvalidLookupTargetField(Exception):
class IncompatibleFieldTypeForUniqueValues(Exception):
"""Raised when the unique values of an incompatible field are requested."""
class FailedToLockFieldDueToConflict(LockConflict):
"""
Raised when a user tried to update a field which was locked by another
concurrent operation
"""

View file

@ -1,4 +1,5 @@
import logging
import traceback
from copy import deepcopy
from typing import (
Dict,
@ -17,7 +18,7 @@ from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.db import connection
from django.db.models import QuerySet
from django.db.utils import ProgrammingError, DataError
from django.db.utils import ProgrammingError, DataError, DatabaseError
from psycopg2 import sql
from baserow.contrib.database.db.schema import (
@ -55,6 +56,7 @@ from .exceptions import (
InvalidBaserowFieldName,
MaxFieldNameLengthExceeded,
IncompatibleFieldTypeForUniqueValues,
FailedToLockFieldDueToConflict,
)
from .field_cache import FieldCache
from .models import Field, SelectOption, SpecificFieldForUpdate
@ -170,15 +172,57 @@ class FieldHandler:
return field
def get_specific_field_for_update(
self, field_id: int, field_model: Optional[Type[T]] = None
self,
field_id: int,
field_model: Optional[Type[T]] = None,
nowait: Optional[bool] = None,
lock_table=True,
allow_trash=False,
) -> SpecificFieldForUpdate:
return cast(
SpecificFieldForUpdate,
self.get_field(
"""
Returns the .specific field which has been locked FOR UPDATE.
:param field_id: The field to lock and retrieve the specific instance of.
:param field_model: The field_model to query using, provide a specific one if
you want an exception raised if the field is not of this field_model type.
:param nowait: Whether to wait to get the lock on the row or not. If set to
True and the row is already locked a FailedToLockFieldDueToConflict
exception will be raised.
:param lock_table: Whether to also lock the fields table FOR UPDATE also.
:param allow_trash: Whether trashed fields should also be included in the lock.
:return: A specific locked field instance
"""
if nowait is None:
nowait = not settings.BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR
if allow_trash:
queryset = Field.objects_and_trash
else:
queryset = Field.objects
if lock_table:
queryset = queryset.select_related("table").select_for_update(
of=("self", "table"), nowait=nowait
)
else:
queryset = queryset.select_for_update(of=("self",), nowait=nowait)
try:
specific_field = self.get_field(
field_id,
field_model,
base_queryset=Field.objects.select_for_update(of=("self",)),
).specific,
base_queryset=queryset,
).specific
except DatabaseError as e:
if "could not obtain lock on row" in traceback.format_exc():
raise FailedToLockFieldDueToConflict() from e
else:
raise e
return cast(
SpecificFieldForUpdate,
specific_field,
)
def create_field(

View file

@ -1,3 +1,6 @@
from baserow.core.exceptions import LockConflict
class TableDoesNotExist(Exception):
"""Raised when trying to get a table that doesn't exist."""
@ -40,3 +43,10 @@ class InitialTableDataDuplicateName(Exception):
"""
Raised when the initial table data contains duplicate field names.
"""
class FailedToLockTableDueToConflict(LockConflict):
"""
Raised when the table is in use by some concurrent operation and the lock cannot
be obtained immediately.
"""

View file

@ -1,9 +1,10 @@
import logging
import traceback
from typing import Any, cast, NewType, List, Tuple, Optional, Dict
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.db import ProgrammingError
from django.db import ProgrammingError, DatabaseError
from django.db.models import QuerySet, Sum
from django.utils import timezone
from django.utils import translation
@ -34,6 +35,7 @@ from .exceptions import (
InitialTableDataLimitExceeded,
InitialSyncTableDataLimitExceeded,
InitialTableDataDuplicateName,
FailedToLockTableDueToConflict,
)
from .models import Table
from .signals import table_updated, table_deleted, tables_reordered
@ -70,19 +72,34 @@ class TableHandler:
return table
def get_table_for_update(self, table_id: int) -> TableForUpdate:
def get_table_for_update(
self, table_id: int, nowait: bool = False
) -> TableForUpdate:
"""
Provide a type hint for tables that need to be updated.
:param table_id: The id of the table that needs to be updated.
:param nowait: Whether to wait to get the lock on the table or raise a
DatabaseError not able to do so immediately.
:return: The table that needs to be updated.
:raises: FailedToLockTableDueToConflict if nowait is True and the table was not
able to be locked for update immediately.
"""
return cast(
TableForUpdate,
self.get_table(
table_id, base_queryset=Table.objects.select_for_update(of=("self",))
),
)
try:
return cast(
TableForUpdate,
self.get_table(
table_id,
base_queryset=Table.objects.select_for_update(
of=("self",), nowait=nowait
),
),
)
except DatabaseError as e:
if "could not obtain lock on row" in traceback.format_exc():
raise FailedToLockTableDueToConflict() from e
else:
raise e
def get_tables_order(self, database: Database) -> List[int]:
"""

View file

@ -23,6 +23,7 @@ from baserow.core.trash.exceptions import RelatedTableTrashedException
from baserow.core.trash.registries import TrashableItemType
from .models import TrashedRows
from baserow.contrib.database.fields.field_cache import FieldCache
from baserow.contrib.database.fields.exceptions import FieldDoesNotExist
User = get_user_model()
@ -163,7 +164,15 @@ class FieldTrashableItemType(TrashableItemType):
# for this field no longer exists.
trash_item_lookup_cache["row_table_model_cache"].pop(field.table.id, None)
field = field.specific
try:
field = FieldHandler().get_specific_field_for_update(
field.id,
nowait=False,
lock_table=False,
allow_trash=True,
)
except FieldDoesNotExist:
raise TrashItemDoesNotExist()
field_type = field_type_registry.get_by_model(field)
# Remove the field from the table schema.

View file

@ -12,6 +12,7 @@ from django.utils import timezone
from baserow.core.action.models import Action
from baserow.core.action.registries import action_type_registry, ActionScopeStr
from baserow.core.exceptions import LockConflict
logger = logging.getLogger(__name__)
@ -96,6 +97,8 @@ class ActionHandler:
with transaction.atomic():
for action in actions_being_undone:
cls._undo_action(user, action, undone_at)
except LockConflict:
raise
except Exception:
# if any single action fails, rollback and set the same error for all.
tb = traceback.format_exc()
@ -197,6 +200,8 @@ class ActionHandler:
with transaction.atomic():
for action in actions_being_redone:
cls._redo_action(user, action)
except LockConflict:
raise
except Exception:
# if just one action fails, rollback and set the same error for all.
tb = traceback.format_exc()

View file

@ -1,10 +1,12 @@
import contextlib
from collections import defaultdict
from typing import Iterable, Optional
from typing import Iterable, Optional, Tuple, List, Any
from django.contrib.contenttypes.models import ContentType
from django.db import DEFAULT_DB_ALIAS
from django.db import DEFAULT_DB_ALIAS, transaction
from django.db.models import QuerySet, Model
from django.db.transaction import Atomic, get_connection
from psycopg2 import sql
class LockedAtomicTransaction(Atomic):
@ -139,36 +141,24 @@ class IsolationLevel:
SERIALIZABLE = "SERIALIZABLE"
class BaserowAtomic(Atomic):
def __init__(
self,
using,
savepoint,
durable,
isolation_level: Optional[str] = None,
):
super().__init__(using, savepoint, durable)
self.isolation_level = isolation_level
def __enter__(self):
super().__enter__()
if self.isolation_level:
cursor = get_connection(self.using).cursor()
cursor.execute("SET TRANSACTION ISOLATION LEVEL %s" % self.isolation_level)
@contextlib.contextmanager
def transaction_atomic(
using=None,
savepoint=True,
durable=False,
isolation_level: Optional[str] = None,
first_sql_to_run_in_transaction_with_args: Optional[
Tuple[sql.SQL, List[Any]]
] = None,
):
# Bare decorator: @atomic -- although the first argument is called
# `using`, it's actually the function being decorated.
if callable(using):
return BaserowAtomic(DEFAULT_DB_ALIAS, savepoint, durable, isolation_level)(
using
)
# Decorator: @atomic(...) or context manager: with atomic(...): ...
else:
return BaserowAtomic(using, savepoint, durable, isolation_level)
with transaction.atomic(using, savepoint, durable) as a:
if isolation_level or first_sql_to_run_in_transaction_with_args:
cursor = get_connection(using).cursor()
if isolation_level:
cursor.execute("SET TRANSACTION ISOLATION LEVEL %s" % isolation_level)
if first_sql_to_run_in_transaction_with_args:
first_sql, first_args = first_sql_to_run_in_transaction_with_args
cursor.execute(first_sql, first_args)
yield a

View file

@ -131,3 +131,10 @@ class TrashItemDoesNotExist(Exception):
"""
Raised when the trash item does not exist in the database.
"""
class LockConflict(Exception):
"""
Generic base class used for exceptions raised when an operation fails as part of
Baserow has been locked for some operation.
"""

View file

@ -937,9 +937,10 @@ class CoreHandler:
for a in applications:
application = a.specific
application_type = application_type_registry.get_by_model(application)
exported_application = application_type.export_serialized(
application, files_zip, storage
)
with application_type.export_safe_transaction_context(application):
exported_application = application_type.export_serialized(
application, files_zip, storage
)
exported_applications.append(exported_application)
return exported_applications

View file

@ -1,18 +1,18 @@
from typing import Any, Dict
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.models import AbstractUser
from rest_framework import serializers
from baserow.api.applications.serializers import ApplicationSerializer
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP, ERROR_GROUP_DOES_NOT_EXIST
from baserow.core.action.registries import action_type_registry
from baserow.core.actions import DuplicateApplicationActionType
from baserow.core.db import transaction_atomic, IsolationLevel
from baserow.core.exceptions import UserNotInGroup, GroupDoesNotExist
from baserow.core.handler import CoreHandler
from baserow.core.jobs.registries import JobType
from baserow.core.jobs.types import AnyJob
from baserow.core.models import Application, DuplicateApplicationJob
from baserow.core.jobs.registries import JobType
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP, ERROR_GROUP_DOES_NOT_EXIST
from baserow.core.action.registries import action_type_registry
from baserow.core.registries import application_type_registry
from baserow.core.utils import Progress
@ -40,8 +40,14 @@ class DuplicateApplicationJobType(JobType):
"duplicated_application": ApplicationSerializer(read_only=True),
}
def transaction_atomic_context(self):
return transaction_atomic(isolation_level=IsolationLevel.REPEATABLE_READ)
def transaction_atomic_context(self, job: "DuplicateApplicationJob"):
application = (
CoreHandler()
.get_user_application(job.user, job.original_application_id)
.specific
)
application_type = application_type_registry.get_by_model(application)
return application_type.export_safe_transaction_context(application)
def prepare_values(
self, values: Dict[str, Any], user: AbstractUser

View file

@ -38,7 +38,7 @@ class JobType(
messages.
"""
def transaction_atomic_context(self):
def transaction_atomic_context(self, job: Job):
"""
This method gives the possibility to change the transaction context per request.
"""

View file

@ -1,6 +1,7 @@
from datetime import timedelta
from django.conf import settings
from django.db import transaction
from baserow.core.jobs.registries import job_type_registry
from baserow.config.celery import app
@ -25,13 +26,14 @@ def run_async_job(self, job_id: int):
from .cache import job_progress_key
job = Job.objects.get(id=job_id).specific
job_type = job_type_registry.get_by_model(job)
job.state = JOB_STARTED
job.save()
with transaction.atomic():
job = Job.objects.get(id=job_id).specific
job_type = job_type_registry.get_by_model(job)
job.state = JOB_STARTED
job.save()
try:
with job_type.transaction_atomic_context():
with job_type.transaction_atomic_context(job):
JobHandler().run(job)
job.state = JOB_FINISHED

View file

@ -1,7 +1,13 @@
from typing import Any, Dict, Optional, TYPE_CHECKING
from zipfile import ZipFile
from django.core.files.storage import Storage
from django.db.transaction import Atomic
from baserow.contrib.database.constants import IMPORT_SERIALIZED_IMPORTING
from baserow.core.utils import ChildProgressBuilder, Progress
from .exceptions import ApplicationTypeAlreadyRegistered, ApplicationTypeDoesNotExist
from .export_serialized import CoreExportSerializedStructure
from .registry import (
Instance,
Registry,
@ -11,9 +17,6 @@ from .registry import (
APIUrlsInstanceMixin,
ImportExportMixin,
)
from .export_serialized import CoreExportSerializedStructure
from baserow.core.utils import ChildProgressBuilder, Progress
from baserow.contrib.database.constants import IMPORT_SERIALIZED_IMPORTING
if TYPE_CHECKING:
from .models import Application, Group
@ -177,6 +180,22 @@ class ApplicationType(
:type application: Application
"""
def export_safe_transaction_context(self, application: "Application") -> Atomic:
"""
Should return an Atomic context (such as transaction.atomic or
baserow.contrib.database.db.atomic.read_repeatable_single_database_atomic_transaction)
which can be used to safely run a database transaction to export an application
of this type.
:param application: The application that we are about to export.
:return: An Atomic context object that will be used to open a transaction safely
to export an application of this type.
"""
raise NotImplementedError(
"Must be implemented by the specific application type"
)
def export_serialized(
self,
application: "Application",

View file

@ -1,8 +1,11 @@
import contextlib
from contextlib import contextmanager
from decimal import Decimal
from typing import Any, Dict, List, Optional, Type
import psycopg2
from django.contrib.auth.models import AbstractUser
from django.db import connection
from django.utils.dateparse import parse_datetime, parse_date
from django.utils.timezone import make_aware, utc
from freezegun import freeze_time
@ -373,3 +376,18 @@ def assert_undo_redo_actions_fails_with_error(
for action, expected_action_type in zip(actions, expected_action_types):
assert action, "Action is None, but should be of type {expected_action_type}"
assert action.error is not None, "Action has no error, but should have one"
@contextlib.contextmanager
def independent_test_db_connection():
d = connection.settings_dict
conn = psycopg2.connect(
host=d["HOST"],
database=d["NAME"],
user=d["USER"],
password=d["PASSWORD"],
port=d["PORT"],
)
conn.autocommit = False
yield conn
conn.close()

View file

@ -1,13 +1,16 @@
import uuid
import pytest
import pytest
from django.contrib.auth import get_user_model
from django.shortcuts import reverse
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_409_CONFLICT
from baserow.api.user.serializers import UndoRedoResultCodeField
from baserow.contrib.database.fields.models import TextField, NumberField
from baserow.core.action.models import Action
from baserow.core.actions import CreateGroupActionType
from baserow.core.models import Group
from baserow.test_utils.helpers import independent_test_db_connection
User = get_user_model()
@ -409,3 +412,90 @@ def test_invalid_undo_redo_action_group_header_raise_error(api_client, data_fixt
"An invalid ClientUndoRedoActionGroupId header was provided. "
"It must be a valid Version 4 UUID."
]
@pytest.mark.django_db(transaction=True)
@pytest.mark.undo_redo
def test_undoing_when_field_locked_fails_and_doesnt_skip(api_client, data_fixture):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(user, table=table)
same_session_id = "test"
response = api_client.patch(
reverse("api:database:fields:item", kwargs={"field_id": field.id}),
{"name": "Test 1", "type": "number"},
format="json",
HTTP_CLIENTSESSIONID=same_session_id,
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_field where id = {field.id} FOR SHARE"
)
assert len(cursor.fetchall()) == 1
response = api_client.patch(
reverse("api:user:undo"),
{"scopes": {"table": table.id}},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
HTTP_CLIENTSESSIONID=same_session_id,
)
assert response.status_code == HTTP_409_CONFLICT
assert response.json()["error"] == "ERROR_UNDO_REDO_LOCK_CONFLICT"
assert NumberField.objects.filter(id=field.id).exists()
assert not TextField.objects.filter(id=field.id).exists()
assert not Action.objects.get().is_undone()
@pytest.mark.django_db(transaction=True)
@pytest.mark.undo_redo
def test_redoing_when_field_locked_fails_and_doesnt_skip(api_client, data_fixture):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(user, table=table)
same_session_id = "test"
response = api_client.patch(
reverse("api:database:fields:item", kwargs={"field_id": field.id}),
{"name": "Test 1", "type": "number"},
format="json",
HTTP_CLIENTSESSIONID=same_session_id,
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
api_client.patch(
reverse("api:user:undo"),
{"scopes": {"table": table.id}},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
HTTP_CLIENTSESSIONID=same_session_id,
)
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_field where id = {field.id} FOR SHARE"
)
assert len(cursor.fetchall()) == 1
response = api_client.patch(
reverse("api:user:redo"),
{"scopes": {"table": table.id}},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
HTTP_CLIENTSESSIONID=same_session_id,
)
assert response.status_code == HTTP_409_CONFLICT
assert response.json()["error"] == "ERROR_UNDO_REDO_LOCK_CONFLICT"
assert not NumberField.objects.filter(id=field.id).exists()
assert TextField.objects.filter(id=field.id).exists()
assert Action.objects.get().is_undone()

View file

@ -6,6 +6,7 @@ from rest_framework.status import (
HTTP_401_UNAUTHORIZED,
HTTP_404_NOT_FOUND,
HTTP_204_NO_CONTENT,
HTTP_409_CONFLICT,
)
from django.shortcuts import reverse
@ -13,6 +14,7 @@ from django.conf import settings
from baserow.contrib.database.fields.models import Field, TextField, NumberField
from baserow.contrib.database.tokens.handler import TokenHandler
from baserow.test_utils.helpers import independent_test_db_connection
@pytest.mark.django_db
@ -603,3 +605,156 @@ def test_unique_row_values_incompatible_field_type(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json["error"] == "ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES"
@pytest.mark.django_db(transaction=True)
def test_update_field_returns_with_error_if_cant_lock_field_if_locked_for_update(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(user, table=table)
url = reverse("api:database:fields:item", kwargs={"field_id": text_field.id})
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_field where id = {text_field.id} "
f"FOR UPDATE"
)
response = api_client.patch(
url,
{"name": "Test 1"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_update_field_returns_with_error_if_cant_lock_field_if_locked_for_share(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(user, table=table)
url = reverse("api:database:fields:item", kwargs={"field_id": text_field.id})
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_field where id = {text_field.id} FOR SHARE"
)
response = api_client.patch(
url,
{"name": "Test 1"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_update_field_returns_with_error_if_cant_lock_table_if_locked_for_update(
api_client, data_fixture, django_assert_num_queries
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(user, table=table)
url = reverse("api:database:fields:item", kwargs={"field_id": text_field.id})
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR UPDATE"
)
response = api_client.patch(
url,
{"name": "Test 1"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_update_field_returns_with_error_if_cant_lock_table_if_locked_for_share(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(user, table=table)
url = reverse("api:database:fields:item", kwargs={"field_id": text_field.id})
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR SHARE"
)
response = api_client.patch(
url,
{"name": "Test 1"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_create_field_returns_with_error_if_cant_lock_table_if_locked_for_update(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR UPDATE"
)
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{"name": "Test 1", "type": "text"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_create_field_returns_with_error_if_cant_lock_table_if_locked_for_share(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR SHARE"
)
response = api_client.post(
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
{"name": "Test 1", "type": "text"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT"

View file

@ -12,10 +12,12 @@ from rest_framework.status import (
HTTP_204_NO_CONTENT,
HTTP_400_BAD_REQUEST,
HTTP_404_NOT_FOUND,
HTTP_409_CONFLICT,
)
from baserow.contrib.database.file_import.models import FileImportJob
from baserow.contrib.database.table.models import Table
from baserow.test_utils.helpers import independent_test_db_connection
@pytest.mark.django_db
@ -447,3 +449,97 @@ def test_get_database_application_with_tables(api_client, data_fixture):
assert len(response_json["tables"]) == 2
assert response_json["tables"][0]["id"] == table_1.id
assert response_json["tables"][1]["id"] == table_2.id
@pytest.mark.django_db(transaction=True)
def test_update_table_returns_with_error_if_cant_lock_table_if_locked_for_update(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR UPDATE"
)
response = api_client.patch(
reverse("api:database:tables:item", kwargs={"table_id": table.id}),
{"name": "Test 1"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_update_table_returns_with_error_if_cant_lock_table_if_locked_for_share(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR SHARE"
)
response = api_client.patch(
reverse("api:database:tables:item", kwargs={"table_id": table.id}),
{"name": "Test 1"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_delete_table_returns_with_error_if_cant_lock_table_if_locked_for_update(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR UPDATE"
)
response = api_client.delete(
reverse("api:database:tables:item", kwargs={"table_id": table.id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT"
@pytest.mark.django_db(transaction=True)
def test_delete_table_returns_with_error_if_cant_lock_table_if_locked_for_share(
api_client, data_fixture
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
with independent_test_db_connection() as conn:
with conn.cursor() as cursor:
# nosec
cursor.execute(
f"SELECT * FROM database_table where id = {table.id} FOR SHARE"
)
response = api_client.delete(
reverse("api:database:tables:item", kwargs={"table_id": table.id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_409_CONFLICT
assert response_json["error"] == "ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT"

View file

@ -172,7 +172,7 @@ def test_created_on_field_type_wrong_timezone(data_fixture):
)
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
def test_import_export_last_modified_field(data_fixture):
user = data_fixture.create_user()
imported_group = data_fixture.create_group(user=user)

View file

@ -185,7 +185,7 @@ def test_file_field_type(data_fixture):
assert results[2].text is None
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
def test_import_export_file_field(data_fixture, tmpdir):
user = data_fixture.create_user()
imported_group = data_fixture.create_group(user=user)

View file

@ -176,7 +176,7 @@ def test_last_modified_field_type_wrong_timezone(data_fixture):
)
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
def test_import_export_last_modified_field(data_fixture):
user = data_fixture.create_user()
imported_group = data_fixture.create_group(user=user)

View file

@ -752,7 +752,7 @@ def test_link_row_field_type_api_row_views(api_client, data_fixture):
assert len(response_json[f"field_{link_row_field.id}"]) == 0
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
@pytest.mark.field_link_row
def test_import_export_link_row_field(data_fixture):
user = data_fixture.create_user()

View file

@ -522,7 +522,7 @@ def test_import_export_lookup_field_trashed_target_field(data_fixture, api_clien
)
@pytest.mark.django_db()
@pytest.mark.django_db(transaction=True)
def test_import_export_tables_with_lookup_fields(
data_fixture, django_assert_num_queries
):

View file

@ -578,7 +578,7 @@ def test_import_export_multiple_select_field(data_fixture):
assert imported_select_option.order == select_option.order
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
def test_get_set_export_serialized_value_multiple_select_field(
data_fixture, django_assert_num_queries
):

View file

@ -756,7 +756,7 @@ def test_import_export_single_select_field(data_fixture):
assert imported_select_option.order == select_option.order
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
def test_get_set_export_serialized_value_single_select_field(data_fixture):
user = data_fixture.create_user()
group = data_fixture.create_group(user=user)

View file

@ -946,7 +946,7 @@ def test_get_template(data_fixture):
)
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
def test_export_import_group_application(data_fixture):
group = data_fixture.create_group()
imported_group = data_fixture.create_group()

View file

@ -207,7 +207,7 @@ def test_an_app_marked_for_perm_deletion_no_longer_shows_up_in_trash_structure(
assert group["applications"].count() == 0
@pytest.mark.django_db
@pytest.mark.django_db(transaction=True)
def test_perm_deleting_a_parent_with_a_trashed_child_also_cleans_up_the_child_entry(
data_fixture,
):

View file

@ -32,6 +32,7 @@ For example:
### Breaking Changes
* API endpoints `undo` and `redo` now returns a list of actions undone/redone instead of a single action.
* Removed `primary` from all `components`and `stores` where it isn't absolutely required. [#1057](https://gitlab.com/bramw/baserow/-/issues/1057)
* Concurrent field updates will now respond with a 409 instead of blocking until the previous update finished, set the env var BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR to revert to the old behaviour. [#1097](https://gitlab.com/bramw/baserow/-/issues/1097)
* **breaking change** Webhooks `row.created`, `row.updated` and `row.deleted` are
replaced with `rows.created`, `rows.updated` and `rows.deleted`, containing multiple
@ -39,6 +40,7 @@ For example:
body will contain only the first changed row instead of all rows. It is highly
recommended to convert all webhooks to the new types.
* Fix not being able to paste multiple cells when a formula field of array or single select type was in an error state. [#1084](https://gitlab.com/bramw/baserow/-/issues/1084)
recommended converting all webhooks to the new types.
## Released (2022-07-05 1.10.2)

View file

@ -85,6 +85,7 @@ x-backend-variables: &backend-variables
BASEROW_GROUP_STORAGE_USAGE_QUEUE:
BASEROW_COUNT_ROWS_ENABLED:
DISABLE_ANONYMOUS_PUBLIC_VIEW_WS_CONNECTIONS:
BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR:
BASEROW_FULL_HEALTHCHECKS:
BASEROW_DISABLE_MODEL_CACHE:
BASEROW_PLUGIN_DIR:
@ -93,6 +94,7 @@ x-backend-variables: &backend-variables
BASEROW_MAX_FILE_IMPORT_ERROR_COUNT:
BASEROW_JOB_SOFT_TIME_LIMIT:
BASEROW_IMPORT_TOLERATED_TYPE_ERROR_THRESHOLD:
BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT:
services:

View file

@ -104,6 +104,7 @@ x-backend-variables: &backend-variables
BASEROW_GROUP_STORAGE_USAGE_QUEUE:
BASEROW_COUNT_ROWS_ENABLED:
DISABLE_ANONYMOUS_PUBLIC_VIEW_WS_CONNECTIONS:
BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR:
BASEROW_FULL_HEALTHCHECKS:
BASEROW_DISABLE_MODEL_CACHE:
BASEROW_PLUGIN_DIR:
@ -112,6 +113,7 @@ x-backend-variables: &backend-variables
BASEROW_MAX_FILE_IMPORT_ERROR_COUNT:
BASEROW_JOB_SOFT_TIME_LIMIT:
BASEROW_IMPORT_TOLERATED_TYPE_ERROR_THRESHOLD:
BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT:
services:

View file

@ -101,6 +101,7 @@ x-backend-variables: &backend-variables
BASEROW_GROUP_STORAGE_USAGE_QUEUE:
BASEROW_COUNT_ROWS_ENABLED:
DISABLE_ANONYMOUS_PUBLIC_VIEW_WS_CONNECTIONS:
BASEROW_BLOCK_INSTEAD_OF_409_CONFLICT_ERROR:
BASEROW_FULL_HEALTHCHECKS:
BASEROW_DISABLE_MODEL_CACHE:
BASEROW_PLUGIN_DIR:

View file

@ -47,7 +47,7 @@ The installation methods referred to in the variable descriptions are:
| INITIAL\_TABLE\_DATA\_LIMIT | The amount of rows that can be imported when creating a table. Defaults to empty which means unlimited rows. | |
| BASEROW\_ROW\_PAGE\_SIZE\_LIMIT | The maximum number of rows that can be requested at once. | 200 |
| BASEROW\_FILE_UPLOAD\_SIZE\_LIMIT\_MB | The max file size in MB allowed to be uploaded by users into a Baserow File Field. | 1048576 (1 TB or 1024*1024) |
|BATCH\_ROWS\_SIZE\_LIMIT|Controls how many rows can be created, deleted or updated at once using the batch endpoints.|200|
| BATCH\_ROWS\_SIZE\_LIMIT |Controls how many rows can be created, deleted or updated at once using the batch endpoints.|200|
### Backend Database Configuration
| Name | Description | Defaults |
@ -95,6 +95,7 @@ The installation methods referred to in the variable descriptions are:
| ADDITIONAL\_APPS | A comma separated list of additional django applications to add to the INSTALLED\_APPS django setting | |
| HOURS\_UNTIL\_TRASH\_PERMANENTLY\_DELETED | Items from the trash will be permanently deleted after this number of hours. | |
| DISABLE\_ANONYMOUS\_PUBLIC\_VIEW\_WS\_CONNECTIONS | When sharing views publicly a websocket connection is opened to provide realtime updates to viewers of the public link. To disable this set any non empty value. When disabled publicly shared links will need to be refreshed to see any updates to the view. | |
| BASEROW\_BLOCK\_INSTEAD\_OF\_409\_CONFLICT\_ERROR | When updating or creating various resources in Baserow if another concurrent operation is ongoing (like a snapshot, duplication, import etc) which would be affected by your modification a 409 HTTP error will be returned. If you instead would prefer Baserow to not return a 409 and just block waiting until the operation finishes and then to perform the requested operation set this flag to any non-empty value. | |
| BASEROW\_FULL\_HEALTHCHECKS | When set to any non empty value will additionally check in the backend's healthcheck at /_health/ if storage can be written to (causes lots of small filesystem writes) and a more general check if enough disk and memory is available. | |
| BASEROW\_JOB\_CLEANUP\_INTERVAL\_MINUTES | How often the job cleanup task will run.|5|
| BASEROW\_JOB\_EXPIRATION\_TIME\_LIMIT | How long before a Baserow job will be kept before being cleaned up.| 30 * 24 * 60 (24 days) |

View file

@ -221,7 +221,13 @@
"groupUserIsLastAdminTitle": "Can't leave the group",
"groupUserIsLastAdminDescription": "It's not possible to leave the group because you're the last admin or because other admin's accounts are pending deletion. Please delete the group or give another user admin permissions.",
"errorMaxJobCountExceededTitle": "Couldn't start action.",
"errorMaxJobCountExceededDescription": "You cannot start any more of these actions as you have others still running in the background. Please try again once the previous actions complete."
"errorMaxJobCountExceededDescription": "You cannot start any more of these actions as you have others still running in the background. Please try again once the previous actions complete.",
"failedToLockFieldDueToConflictTitle": "Can't change field",
"failedToLockFieldDueToConflictDescription": "Another operation is currently updating or using this field, please wait until it finishes before trying again.",
"failedToLockTableDueToConflictTitle": "Can't create field",
"failedToLockTableDueToConflictDescription": "Another operation is currently updating or using this table, please wait until it finishes before trying again.",
"failedToUndoRedoDueToConflictTitle": "Can't undo/redo",
"failedToUndoRedoDueToConflictDescription": "Another operation is currently running blocking your undo or redo, please wait until it finishes before trying again."
},
"importerType": {
"csv": "Import a CSV file",

View file

@ -81,6 +81,18 @@ export class ClientErrorMap {
app.i18n.t('clientHandler.errorMaxJobCountExceededTitle'),
app.i18n.t('clientHandler.errorMaxJobCountExceededDescription')
),
ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT: new ResponseErrorMessage(
app.i18n.t('clientHandler.failedToLockFieldDueToConflictTitle'),
app.i18n.t('clientHandler.failedToLockFieldDueToConflictDescription')
),
ERROR_FAILED_TO_LOCK_TABLE_DUE_TO_CONFLICT: new ResponseErrorMessage(
app.i18n.t('clientHandler.failedToLockTableDueToConflictTitle'),
app.i18n.t('clientHandler.failedToLockTableDueToConflictDescription')
),
ERROR_UNDO_REDO_LOCK_CONFLICT: new ResponseErrorMessage(
app.i18n.t('clientHandler.failedToUndoRedoDueToConflictTitle'),
app.i18n.t('clientHandler.failedToUndoRedoDueToConflictDescription')
),
}
}