diff --git a/backend/src/baserow/contrib/database/api/tables/serializers.py b/backend/src/baserow/contrib/database/api/tables/serializers.py
index 96943f655..a98a162de 100644
--- a/backend/src/baserow/contrib/database/api/tables/serializers.py
+++ b/backend/src/baserow/contrib/database/api/tables/serializers.py
@@ -1,9 +1,71 @@
+from django.utils.functional import lazy
+
 from rest_framework import serializers
+from rest_framework.exceptions import ValidationError
 
 from baserow.contrib.database.api.data_sync.serializers import DataSyncSerializer
+from baserow.contrib.database.fields.registries import field_type_registry
 from baserow.contrib.database.table.models import Table
 
 
+class TableImportConfiguration(serializers.Serializer):
+    """
+    Additional table import configuration.
+    """
+
+    upsert_fields = serializers.ListField(
+        child=serializers.IntegerField(min_value=1),
+        min_length=1,
+        allow_null=True,
+        allow_empty=True,
+        default=None,
+        help_text=lazy(
+            lambda: (
+                "A list of field IDs in the table used to generate a value for "
+                "identifying a row during the upsert process in file import. Each "
+                "field ID must reference an existing field in the table, which will "
+                "be used to match provided values against existing ones to determine "
+                "whether a row should be inserted or updated.\n "
+                "Field types that can be used in upsert fields: "
+                f"{','.join([f.type for f in field_type_registry.get_all() if f.can_upsert])}. "
+                "If specified, `upsert_values` should also be provided."
+            )
+        ),
+    )
+    upsert_values = serializers.ListField(
+        allow_empty=True,
+        allow_null=True,
+        default=None,
+        child=serializers.ListField(
+            min_length=1,
+        ),
+        help_text=(
+            "A list of values that are identifying rows in imported data.\n "
+            "The number of rows in `upsert_values` should be equal to the number of "
+            "rows in imported data. Each row in `upsert_values` should contain a "
+            "list of values that match the number and field types of fields selected "
+            "in `upsert_fields`. Based on `upsert_fields`, a similar upsert values "
+            "will be calculated for each row in the table.\n "
+            "There's no guarantee of uniqueness of row identification calculated from "
+            "`upsert_values` nor from the table. Repeated upsert values are compared "
+            "in order with matching values in the table. The imported data must be in "
+            "the same order as the table rows for correct matching."
+        ),
+    )
+
+    def validate(self, attrs):
+        if attrs.get("upsert_fields") and not len(attrs.get("upsert_values") or []):
+            raise ValidationError(
+                {
+                    "upsert_value": (
+                        "upsert_values must not be empty "
+                        "when upsert_fields are provided."
+                    )
+                }
+            )
+        return attrs
+
+
 class TableSerializer(serializers.ModelSerializer):
     data_sync = DataSyncSerializer()
 
@@ -74,10 +136,26 @@ class TableImportSerializer(serializers.Serializer):
             "for adding two rows to a table with two writable fields."
         ),
     )
+    configuration = TableImportConfiguration(required=False, default=None)
 
     class Meta:
         fields = ("data",)
 
+    def validate(self, attrs):
+        if attrs.get("configuration"):
+            if attrs["configuration"].get("upsert_values"):
+                if len(attrs["configuration"].get("upsert_values")) != len(
+                    attrs["data"]
+                ):
+                    msg = (
+                        "`data` and `configuration.upsert_values` "
+                        "should have the same length."
+                    )
+                    raise ValidationError(
+                        {"data": msg, "configuration": {"upsert_values": msg}}
+                    )
+        return attrs
+
 
 class TableUpdateSerializer(serializers.ModelSerializer):
     class Meta:
diff --git a/backend/src/baserow/contrib/database/api/tables/views.py b/backend/src/baserow/contrib/database/api/tables/views.py
index b1f52867b..380bccacd 100644
--- a/backend/src/baserow/contrib/database/api/tables/views.py
+++ b/backend/src/baserow/contrib/database/api/tables/views.py
@@ -489,14 +489,14 @@ class AsyncTableImportView(APIView):
             workspace=table.database.workspace,
             context=table,
         )
-
+        configuration = data.get("configuration")
         data = data["data"]
-
         file_import_job = JobHandler().create_and_start_job(
             request.user,
             "file_import",
             data=data,
             table=table,
+            configuration=configuration,
         )
 
         serializer = job_type_registry.get_serializer(file_import_job, JobSerializer)
diff --git a/backend/src/baserow/contrib/database/fields/field_types.py b/backend/src/baserow/contrib/database/fields/field_types.py
index fbf893c95..4d8430f26 100755
--- a/backend/src/baserow/contrib/database/fields/field_types.py
+++ b/backend/src/baserow/contrib/database/fields/field_types.py
@@ -412,6 +412,8 @@ class TextFieldType(CollationSortMixin, FieldType):
     serializer_field_names = ["text_default"]
     _can_group_by = True
 
+    can_upsert = True
+
     def get_serializer_field(self, instance, **kwargs):
         required = kwargs.get("required", False)
         return serializers.CharField(
@@ -456,6 +458,7 @@ class LongTextFieldType(CollationSortMixin, FieldType):
     model_class = LongTextField
     allowed_fields = ["long_text_enable_rich_text"]
     serializer_field_names = ["long_text_enable_rich_text"]
+    can_upsert = True
 
     def check_can_group_by(self, field: Field, sort_type: str) -> bool:
         return not field.long_text_enable_rich_text
@@ -570,6 +573,7 @@ class NumberFieldType(FieldType):
     }
     _can_group_by = True
     _db_column_fields = ["number_decimal_places"]
+    can_upsert = True
 
     def prepare_value_for_db(self, instance: NumberField, value):
         if value is None:
@@ -811,6 +815,7 @@ class RatingFieldType(FieldType):
     serializer_field_names = ["max_value", "color", "style"]
     _can_group_by = True
     _db_column_fields = []
+    can_upsert = True
 
     def prepare_value_for_db(self, instance, value):
         if not value:
@@ -936,6 +941,7 @@ class BooleanFieldType(FieldType):
     type = "boolean"
     model_class = BooleanField
     _can_group_by = True
+    can_upsert = True
 
     def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
         """
@@ -1025,6 +1031,7 @@ class DateFieldType(FieldType):
     }
     _can_group_by = True
     _db_column_fields = ["date_include_time"]
+    can_upsert = True
 
     def can_represent_date(self, field):
         return True
@@ -1931,6 +1938,7 @@ class DurationFieldType(FieldType):
     serializer_field_names = ["duration_format"]
     _can_group_by = True
     _db_column_fields = []
+    can_upsert = True
 
     def get_model_field(self, instance: DurationField, **kwargs):
         return DurationModelField(instance.duration_format, null=True, **kwargs)
@@ -3483,6 +3491,7 @@ class LinkRowFieldType(
 class EmailFieldType(CollationSortMixin, CharFieldMatchingRegexFieldType):
     type = "email"
     model_class = EmailField
+    can_upsert = True
 
     @property
     def regex(self):
@@ -4742,6 +4751,7 @@ class PhoneNumberFieldType(CollationSortMixin, CharFieldMatchingRegexFieldType):
 
     type = "phone_number"
     model_class = PhoneNumberField
+    can_upsert = True
 
     MAX_PHONE_NUMBER_LENGTH = 100
 
diff --git a/backend/src/baserow/contrib/database/fields/registries.py b/backend/src/baserow/contrib/database/fields/registries.py
index 986c8e829..7126b297e 100644
--- a/backend/src/baserow/contrib/database/fields/registries.py
+++ b/backend/src/baserow/contrib/database/fields/registries.py
@@ -210,6 +210,12 @@ class FieldType(
     some fields can depend on it like the `lookup` field.
     """
 
+    can_upsert = False
+    """
+    A field of this type may be used to calculate a match value during import, that
+    allows to update existing rows with imported data instead of adding them.
+    """
+
     @property
     def db_column_fields(self) -> Set[str]:
         if self._db_column_fields is not None:
diff --git a/backend/src/baserow/contrib/database/file_import/job_types.py b/backend/src/baserow/contrib/database/file_import/job_types.py
index c79d21f8a..753e2d4ec 100644
--- a/backend/src/baserow/contrib/database/file_import/job_types.py
+++ b/backend/src/baserow/contrib/database/file_import/job_types.py
@@ -26,6 +26,7 @@ from baserow.contrib.database.fields.exceptions import (
 )
 from baserow.contrib.database.rows.actions import ImportRowsActionType
 from baserow.contrib.database.rows.exceptions import ReportMaxErrorCountExceeded
+from baserow.contrib.database.rows.types import FileImportDict
 from baserow.contrib.database.table.actions import CreateTableActionType
 from baserow.contrib.database.table.exceptions import (
     InitialTableDataDuplicateName,
@@ -91,6 +92,7 @@ class FileImportJobType(JobType):
 
         filtered_dict = dict(**values)
         filtered_dict.pop("data")
+        filtered_dict.pop("configuration", None)
         return filtered_dict
 
     def after_job_creation(self, job, values):
@@ -99,7 +101,10 @@ class FileImportJobType(JobType):
         """
 
         data_file = ContentFile(
-            json.dumps(values["data"], ensure_ascii=False).encode("utf8")
+            json.dumps(
+                {"data": values["data"], "configuration": values.get("configuration")},
+                ensure_ascii=False,
+            ).encode("utf8")
         )
         job.data_file.save(None, data_file)
 
@@ -154,8 +159,7 @@ class FileImportJobType(JobType):
         """
 
         with job.data_file.open("r") as fin:
-            data = json.load(fin)
-
+            data: FileImportDict = json.load(fin)
         try:
             if job.table is None:
                 new_table, error_report = action_type_registry.get_by_type(
@@ -164,7 +168,7 @@ class FileImportJobType(JobType):
                     job.user,
                     job.database,
                     name=job.name,
-                    data=data,
+                    data=data["data"],
                     first_row_header=job.first_row_header,
                     progress=progress,
                 )
diff --git a/backend/src/baserow/contrib/database/plugins.py b/backend/src/baserow/contrib/database/plugins.py
index 1d7075fb5..a54dcf27c 100755
--- a/backend/src/baserow/contrib/database/plugins.py
+++ b/backend/src/baserow/contrib/database/plugins.py
@@ -66,7 +66,7 @@ class DatabasePlugin(Plugin):
                 ["John", "Von Neumann", "", True],
                 ["Blaise", "Pascal", "", True],
             ]
-            row_handler.import_rows(user, table, data, send_realtime_update=False)
+            row_handler.import_rows(user, table, data=data, send_realtime_update=False)
 
             # Creating the example projects table.
             table = table_handler.create_table_and_fields(
@@ -86,4 +86,4 @@ class DatabasePlugin(Plugin):
                 [_("Computer architecture"), str(date(1945, 1, 1)), False],
                 [_("Cellular Automata"), str(date(1952, 6, 1)), False],
             ]
-            row_handler.import_rows(user, table, data, send_realtime_update=False)
+            row_handler.import_rows(user, table, data=data, send_realtime_update=False)
diff --git a/backend/src/baserow/contrib/database/populate.py b/backend/src/baserow/contrib/database/populate.py
index 64c696f29..06659e181 100644
--- a/backend/src/baserow/contrib/database/populate.py
+++ b/backend/src/baserow/contrib/database/populate.py
@@ -95,7 +95,9 @@ def load_test_data():
             ("Rabbit", select_by_name["Meat"], fake.sentence(nb_words=10)),
         ]
 
-        RowHandler().import_rows(user, products_table, data, send_realtime_update=False)
+        RowHandler().import_rows(
+            user, products_table, data=data, send_realtime_update=False
+        )
 
     try:
         suppliers_table = Table.objects.get(name="Suppliers", database=database)
@@ -195,7 +197,7 @@ def load_test_data():
         ]
 
         RowHandler().import_rows(
-            user, suppliers_table, data, send_realtime_update=False
+            user, suppliers_table, data=data, send_realtime_update=False
         )
 
     try:
@@ -253,7 +255,7 @@ def load_test_data():
         ]
 
         RowHandler().import_rows(
-            user, retailers_table, data, send_realtime_update=False
+            user, retailers_table, data=data, send_realtime_update=False
         )
 
     try:
@@ -358,5 +360,5 @@ def load_test_data():
         ]
 
         RowHandler().import_rows(
-            user, user_accounts_table, data, send_realtime_update=False
+            user, user_accounts_table, data=data, send_realtime_update=False
         )
diff --git a/backend/src/baserow/contrib/database/rows/actions.py b/backend/src/baserow/contrib/database/rows/actions.py
index e52088efa..2228511e7 100755
--- a/backend/src/baserow/contrib/database/rows/actions.py
+++ b/backend/src/baserow/contrib/database/rows/actions.py
@@ -6,6 +6,8 @@ from typing import Any, Dict, List, Optional, Tuple, Type
 from django.contrib.auth.models import AbstractUser
 from django.utils.translation import gettext_lazy as _
 
+from loguru import logger
+
 from baserow.contrib.database.action.scopes import (
     TABLE_ACTION_CONTEXT,
     TableActionScopeType,
@@ -18,6 +20,7 @@ from baserow.contrib.database.rows.handler import (
     GeneratedTableModelForUpdate,
     RowHandler,
 )
+from baserow.contrib.database.rows.types import FileImportDict
 from baserow.contrib.database.table.handler import TableHandler
 from baserow.contrib.database.table.models import GeneratedTableModel, Table
 from baserow.core.action.models import Action
@@ -178,13 +181,17 @@ class CreateRowsActionType(UndoableActionType):
                 "Can't create rows because it has a data sync."
             )
 
-        rows = RowHandler().create_rows(
-            user,
-            table,
-            rows_values,
-            before_row=before_row,
-            model=model,
-            send_webhook_events=send_webhook_events,
+        rows = (
+            RowHandler()
+            .create_rows(
+                user,
+                table,
+                rows_values,
+                before_row=before_row,
+                model=model,
+                send_webhook_events=send_webhook_events,
+            )
+            .created_rows
         )
 
         workspace = table.database.workspace
@@ -244,7 +251,7 @@ class ImportRowsActionType(UndoableActionType):
         cls,
         user: AbstractUser,
         table: Table,
-        data=List[List[Any]],
+        data: FileImportDict,
         progress: Optional[Progress] = None,
     ) -> Tuple[List[GeneratedTableModel], Dict[str, Any]]:
         """
@@ -270,9 +277,14 @@ class ImportRowsActionType(UndoableActionType):
             )
 
         created_rows, error_report = RowHandler().import_rows(
-            user, table, data, progress=progress
+            user,
+            table,
+            data=data["data"],
+            configuration=data.get("configuration") or {},
+            progress=progress,
         )
-
+        if error_report:
+            logger.warning(f"Errors during rows import: {error_report}")
         workspace = table.database.workspace
         params = cls.Params(
             table.id,
diff --git a/backend/src/baserow/contrib/database/rows/exceptions.py b/backend/src/baserow/contrib/database/rows/exceptions.py
index 96f5fbfce..b494b4586 100644
--- a/backend/src/baserow/contrib/database/rows/exceptions.py
+++ b/backend/src/baserow/contrib/database/rows/exceptions.py
@@ -36,3 +36,12 @@ class CannotDeleteRowsInTable(Exception):
     """
     Raised when it's not possible to delete rows in the table.
     """
+
+
+class InvalidRowLength(Exception):
+    """
+    Row's length doesn't match expected length based on schema.
+    """
+
+    def __init__(self, row_idx: int):
+        self.row_idx = row_idx
diff --git a/backend/src/baserow/contrib/database/rows/handler.py b/backend/src/baserow/contrib/database/rows/handler.py
index 54b0c4667..a7b2f7f16 100644
--- a/backend/src/baserow/contrib/database/rows/handler.py
+++ b/backend/src/baserow/contrib/database/rows/handler.py
@@ -1,14 +1,13 @@
 from collections import defaultdict
 from copy import deepcopy
 from decimal import Decimal
+from functools import cached_property
 from typing import (
     TYPE_CHECKING,
     Any,
     Dict,
     Iterable,
     List,
-    NamedTuple,
-    NewType,
     Optional,
     Set,
     Tuple,
@@ -17,24 +16,37 @@ from typing import (
     cast,
 )
 
+from django import db
 from django.contrib.auth.models import AbstractUser
 from django.core.exceptions import ValidationError
 from django.db import connection, transaction
+from django.db.models import Field as DjangoField
 from django.db.models import Model, QuerySet, Window
 from django.db.models.expressions import RawSQL
 from django.db.models.fields.related import ForeignKey, ManyToManyField
 from django.db.models.functions import RowNumber
 from django.utils.encoding import force_str
 
+from celery.utils import chunks
 from opentelemetry import metrics, trace
 
 from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler
 from baserow.contrib.database.fields.dependencies.update_collector import (
     FieldUpdateCollector,
 )
+from baserow.contrib.database.fields.exceptions import (
+    FieldNotInTable,
+    IncompatibleField,
+)
 from baserow.contrib.database.fields.field_cache import FieldCache
-from baserow.contrib.database.fields.registries import field_type_registry
+from baserow.contrib.database.fields.registries import FieldType, field_type_registry
 from baserow.contrib.database.fields.utils import get_field_id_from_field_key
+from baserow.contrib.database.search.handler import SearchHandler
+from baserow.contrib.database.table.constants import (
+    CREATED_BY_COLUMN_NAME,
+    LAST_MODIFIED_BY_COLUMN_NAME,
+    ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME,
+)
 from baserow.contrib.database.table.models import GeneratedTableModel, Table
 from baserow.contrib.database.table.operations import (
     CreateRowDatabaseTableOperationType,
@@ -49,20 +61,15 @@ from baserow.core.db import (
 )
 from baserow.core.exceptions import CannotCalculateIntermediateOrder
 from baserow.core.handler import CoreHandler
+from baserow.core.psycopg import sql
 from baserow.core.telemetry.utils import baserow_trace_methods
 from baserow.core.trash.handler import TrashHandler
 from baserow.core.trash.registries import trash_item_type_registry
 from baserow.core.utils import Progress, get_non_unique_values, grouper
 
-from ..search.handler import SearchHandler
-from ..table.constants import (
-    CREATED_BY_COLUMN_NAME,
-    LAST_MODIFIED_BY_COLUMN_NAME,
-    ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME,
-)
 from .constants import ROW_IMPORT_CREATION, ROW_IMPORT_VALIDATION
 from .error_report import RowErrorReport
-from .exceptions import RowDoesNotExist, RowIdsNotUnique
+from .exceptions import InvalidRowLength, RowDoesNotExist, RowIdsNotUnique
 from .operations import (
     DeleteDatabaseRowOperationType,
     MoveRowDatabaseRowOperationType,
@@ -77,19 +84,23 @@ from .signals import (
     rows_deleted,
     rows_updated,
 )
+from .types import (
+    CreatedRowsData,
+    FieldsMetadata,
+    FileImportConfiguration,
+    GeneratedTableModelForUpdate,
+    RowId,
+    RowsForUpdate,
+    UpdatedRowsData,
+)
 
 if TYPE_CHECKING:
+    from django.db.backends.utils import CursorWrapper
+
     from baserow.contrib.database.fields.models import Field
 
 tracer = trace.get_tracer(__name__)
 
-GeneratedTableModelForUpdate = NewType(
-    "GeneratedTableModelForUpdate", GeneratedTableModel
-)
-
-RowsForUpdate = NewType("RowsForUpdate", QuerySet)
-
-
 BATCH_SIZE = 1024
 
 meter = metrics.get_meter(__name__)
@@ -139,29 +150,18 @@ def prepare_field_errors(field_errors):
     }
 
 
-FieldsMetadata = NewType("FieldsMetadata", Dict[str, Any])
-RowValues = NewType("RowValues", Dict[str, Any])
-RowId = NewType("RowId", int)
-
-
-class UpdatedRowsWithOldValuesAndMetadata(NamedTuple):
-    updated_rows: List[GeneratedTableModelForUpdate]
-    original_rows_values_by_id: Dict[RowId, RowValues]
-    updated_fields_metadata_by_row_id: Dict[RowId, FieldsMetadata]
-
-
 class RowM2MChangeTracker:
     def __init__(self):
         self._deleted_m2m_rels: Dict[
-            str, Dict["Field", Dict[GeneratedTableModel, Set[int]]]
+            str, Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]
         ] = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
         self._created_m2m_rels: Dict[
-            str, Dict["Field", Dict[GeneratedTableModel, Set[int]]]
+            str, Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]
         ] = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
 
     def track_m2m_update_for_field_and_row(
         self,
-        field: "Field",
+        field: "DjangoField",
         field_name: str,
         row: GeneratedTableModel,
         new_values: Iterable[int],
@@ -181,7 +181,7 @@ class RowM2MChangeTracker:
     def track_m2m_created_for_new_row(
         self,
         row: GeneratedTableModel,
-        field: "Field",
+        field: "DjangoField",
         new_values: Iterable[Union[int, Model]],
     ):
         field_type = field_type_registry.get_by_model(field)
@@ -197,7 +197,7 @@ class RowM2MChangeTracker:
 
     def get_created_m2m_rels_per_field_for_type(
         self, field_type
-    ) -> Dict["Field", Dict[GeneratedTableModel, Set[int]]]:
+    ) -> Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]:
         return self._created_m2m_rels[field_type]
 
     def get_deleted_link_row_rels_for_update_collector(
@@ -1021,7 +1021,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         updated_field_ids: Set[int],
         m2m_change_tracker: Optional[RowM2MChangeTracker] = None,
         skip_search_updates: bool = False,
-    ) -> List["Field"]:
+    ) -> List["DjangoField"]:
         """
         Prepares a list of fields that are dependent on the updated fields and updates
         them.
@@ -1088,7 +1088,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         send_webhook_events: bool = True,
         generate_error_report: bool = False,
         skip_search_update: bool = False,
-    ) -> List[GeneratedTableModel]:
+    ) -> CreatedRowsData:
         """
         Creates new rows for a given table without checking permissions. It also calls
         the rows_created signal.
@@ -1223,9 +1223,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
             dependant_fields=dependant_fields,
         )
 
-        if generate_error_report:
-            return inserted_rows, report
-        return rows_to_return
+        return CreatedRowsData(rows_to_return, report)
 
     def create_rows(
         self,
@@ -1238,7 +1236,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         send_webhook_events: bool = True,
         generate_error_report: bool = False,
         skip_search_update: bool = False,
-    ) -> List[GeneratedTableModel]:
+    ) -> CreatedRowsData:
         """
         Creates new rows for a given table if the user
         belongs to the related workspace. It also calls the rows_created signal.
@@ -1289,7 +1287,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         self,
         model: Type[GeneratedTableModel],
         created_rows: List[GeneratedTableModel],
-    ) -> List["Field"]:
+    ) -> List["DjangoField"]:
         """
         Generates a list of dependant fields that need to be updated after the rows have
         been created and updates them.
@@ -1443,11 +1441,11 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
 
         return report
 
-    def create_rows_by_batch(
+    def force_create_rows_by_batch(
         self,
         user: AbstractUser,
         table: Table,
-        rows: List[Dict[str, Any]],
+        rows_values: List[Dict[str, Any]],
         progress: Optional[Progress] = None,
         model: Optional[Type[GeneratedTableModel]] = None,
     ) -> Tuple[List[GeneratedTableModel], Dict[str, Dict[str, Any]]]:
@@ -1457,13 +1455,13 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
 
         :param user: The user of whose behalf the rows are created.
         :param table: The table for which the rows should be created.
-        :param rows: List of rows values for rows that need to be created.
+        :param rows_values: List of rows values for rows that need to be created.
         :param progress: Give a progress instance to track the progress of the import.
         :param model: Optional model to prevent recomputing table model.
         :return: The created rows and the error report.
         """
 
-        if not rows:
+        if not rows_values:
             return [], {}
 
         if progress:
@@ -1474,7 +1472,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
 
         report = {}
         all_created_rows = []
-        for count, chunk in enumerate(grouper(BATCH_SIZE, rows)):
+        for count, chunk in enumerate(grouper(BATCH_SIZE, rows_values)):
             row_start_index = count * BATCH_SIZE
             created_rows, creation_report = self.create_rows(
                 user=user,
@@ -1503,11 +1501,64 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
 
         return all_created_rows, report
 
+    def force_update_rows_by_batch(
+        self,
+        user: AbstractUser,
+        table: Table,
+        rows_values: List[Dict[str, Any]],
+        progress: Progress,
+        model: Optional[Type[GeneratedTableModel]] = None,
+    ) -> Tuple[List[Dict[str, Any] | None], Dict[str, Dict[str, Any]]]:
+        """
+        Creates rows by batch and generates an error report instead of failing on first
+        error.
+
+        :param user: The user of whose behalf the rows are created.
+        :param table: The table for which the rows should be created.
+        :param rows_values: List of rows values for rows that need to be created.
+        :param progress: Give a progress instance to track the progress of the import.
+        :param model: Optional model to prevent recomputing table model.
+        :return: The created rows and the error report.
+        """
+
+        if not rows_values:
+            return [], {}
+
+        progress.increment(state=ROW_IMPORT_CREATION)
+
+        if model is None:
+            model = table.get_model()
+
+        report = {}
+        all_updated_rows = []
+        for count, chunk in enumerate(grouper(BATCH_SIZE, rows_values)):
+            updated_rows = self.force_update_rows(
+                user=user,
+                table=table,
+                model=model,
+                rows_values=chunk,
+                send_realtime_update=False,
+                send_webhook_events=False,
+                # Don't trigger loads of search updates for every batch of rows we
+                # create but instead a single one for this entire table at the end.
+                skip_search_update=True,
+                generate_error_report=True,
+            )
+
+            if progress:
+                progress.increment(len(chunk))
+            report.update(updated_rows.errors)
+            all_updated_rows.extend(updated_rows.updated_rows)
+
+        SearchHandler.field_value_updated_or_created(table)
+        return all_updated_rows, report
+
     def import_rows(
         self,
         user: AbstractUser,
         table: Table,
-        data: List[List[Any]],
+        data: list[list[Any]],
+        configuration: FileImportConfiguration | None = None,
         validate: bool = True,
         progress: Optional[Progress] = None,
         send_realtime_update: bool = True,
@@ -1523,12 +1574,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         :param user: The user of whose behalf the rows are created.
         :param table: The table for which the rows should be created.
         :param data: List of rows values for rows that need to be created.
+        :param configuration: Optional import configuration dict.
         :param validate: If True the data are validated before the import.
         :param progress: Give a progress instance to track the progress of the
             import.
         :param send_realtime_update: The parameter passed to the rows_created
             signal indicating if a realtime update should be send.
 
+        :raises InvalidRowLength:
+
         :return: The created row instances and the error report.
         """
 
@@ -1541,6 +1595,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         )
 
         error_report = RowErrorReport(data)
+        configuration = configuration or {}
+        update_handler = UpsertRowsMappingHandler(
+            table=table,
+            upsert_fields=configuration.get("upsert_fields") or [],
+            upsert_values=configuration.get("upsert_values") or [],
+        )
+        # Pre-run upsert configuration validation.
+        # Can raise InvalidRowLength
+        update_handler.validate()
 
         model = table.get_model()
 
@@ -1605,10 +1668,40 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
             else None
         )
 
-        created_rows, creation_report = self.create_rows_by_batch(
-            user, table, valid_rows, progress=creation_sub_progress, model=model
+        # split rows to insert and update lists. If there's no upsert field selected,
+        # this will not populate rows_values_to_update.
+        update_map = update_handler.process_map
+
+        rows_values_to_create = []
+        rows_values_to_update = []
+        if update_map:
+            for current_idx, import_idx in original_row_index_mapping.items():
+                row = valid_rows[current_idx]
+                if update_idx := update_map.get(import_idx):
+                    row["id"] = update_idx
+                    rows_values_to_update.append(row)
+                else:
+                    rows_values_to_create.append(row)
+        else:
+            rows_values_to_create = valid_rows
+
+        created_rows, creation_report = self.force_create_rows_by_batch(
+            user,
+            table,
+            rows_values_to_create,
+            progress=creation_sub_progress,
+            model=model,
         )
 
+        if rows_values_to_update:
+            updated_rows, updated_report = self.force_update_rows_by_batch(
+                user,
+                table,
+                rows_values_to_update,
+                progress=creation_sub_progress,
+                model=model,
+            )
+
         # Add errors to global report
         for index, error in creation_report.items():
             error_report.add_error(
@@ -1616,6 +1709,13 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
                 error,
             )
 
+        if rows_values_to_update:
+            for index, error in updated_report.items():
+                error_report.add_error(
+                    original_row_index_mapping[int(index)],
+                    error,
+                )
+
         if send_realtime_update:
             # Just send a single table_updated here as realtime update instead
             # of rows_created because we might import a lot of rows.
@@ -1626,7 +1726,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
     def get_fields_metadata_for_row_history(
         self,
         row: GeneratedTableModelForUpdate,
-        updated_fields: List["Field"],
+        updated_fields: List["DjangoField"],
         metadata,
     ) -> FieldsMetadata:
         """
@@ -1648,7 +1748,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
     def get_fields_metadata_for_rows(
         self,
         rows: List[GeneratedTableModelForUpdate],
-        updated_fields: List["Field"],
+        updated_fields: List["DjangoField"],
         fields_metadata_by_row_id=None,
     ) -> Dict[RowId, FieldsMetadata]:
         """
@@ -1684,7 +1784,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         send_realtime_update: bool = True,
         send_webhook_events: bool = True,
         skip_search_update: bool = False,
-    ) -> UpdatedRowsWithOldValuesAndMetadata:
+        generate_error_report: bool = False,
+    ) -> UpdatedRowsData:
         """
         Updates field values in batch based on provided rows with the new
         values.
@@ -1704,6 +1805,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         :param skip_search_update: If you want to instead trigger the search handler
             cells update later on after many create_rows calls then set this to True
             but make sure you trigger it eventually.
+        :param generate_error_report: Generate error report if set to True.
         :raises RowIdsNotUnique: When trying to update the same row multiple
             times.
         :raises RowDoesNotExist: When any of the rows don't exist.
@@ -1716,9 +1818,12 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
 
         user_id = user and user.id
 
-        prepared_rows_values, _ = self.prepare_rows_in_bulk(
-            model._field_objects, rows_values
+        prepared_rows_values, errors = self.prepare_rows_in_bulk(
+            model._field_objects,
+            rows_values,
+            generate_error_report=generate_error_report,
         )
+        report = {index: err for index, err in errors.items()}
         row_ids = [r["id"] for r in prepared_rows_values]
 
         non_unique_ids = get_non_unique_values(row_ids)
@@ -1924,13 +2029,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         fields_metadata_by_row_id = self.get_fields_metadata_for_rows(
             updated_rows_to_return, updated_fields, fields_metadata_by_row_id
         )
-
-        return UpdatedRowsWithOldValuesAndMetadata(
+        updated_rows = UpdatedRowsData(
             updated_rows_to_return,
             original_row_values_by_id,
             fields_metadata_by_row_id,
+            report,
         )
 
+        return updated_rows
+
     def update_rows(
         self,
         user: AbstractUser,
@@ -1941,7 +2048,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
         send_realtime_update: bool = True,
         send_webhook_events: bool = True,
         skip_search_update: bool = False,
-    ) -> UpdatedRowsWithOldValuesAndMetadata:
+        generate_error_report: bool = False,
+    ) -> UpdatedRowsData:
         """
         Updates field values in batch based on provided rows with the new
         values.
@@ -1984,6 +2092,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
             send_realtime_update,
             send_webhook_events,
             skip_search_update,
+            generate_error_report=generate_error_report,
         )
 
     def get_rows(
@@ -2436,3 +2545,233 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
             self,
             table=table,
         )
+
+
+def merge_values_expression(
+    row: list[str | int | float | None],
+    field_handlers: "list[UpsertFieldHandler]",
+    query_params: list,
+) -> sql.Composable:
+    """
+    Create a sql expression that will produce text value from a list of row values. Any
+    value, that should be interpolated, will be added to provided `query_params` list.
+
+    :param row: a list of values in a row
+    :param field_handlers: a list of field types for a row. The number of handlers
+            should equal the number of values in a row.
+    :param query_params: param values container
+    :return:
+    """
+
+    fields = []
+
+    for val, field_handler in zip(row, field_handlers):
+        fields.append(field_handler.get_field_concat_expression())
+        query_params.append(field_handler.prepare_value(val))
+
+    return UpsertRowsMappingHandler.SEPARATOR.join(fields)
+
+
+class UpsertFieldHandler:
+    """
+    Helper class to handle field's upsert handling.
+    """
+
+    def __init__(self, table: Table, field_id: id):
+        self.table = table
+        # TODO: here we are using field id, but it may be so the field_id
+        #  is `'id'` string.
+        try:
+            self._field_def = field_def = next(
+                (
+                    f
+                    for f in table.get_model().get_field_objects()
+                    if f["field"].id == field_id
+                )
+            )
+        except StopIteration:
+            raise FieldNotInTable(field_id)
+
+        self.field: Field = field_def["field"]
+        self.field_type: FieldType = field_def["type"]
+        if not self.field_type.can_upsert:
+            raise IncompatibleField(self.field.id)
+        self.field_name = self.field.db_column
+
+    def prepare_value(self, value: str) -> Any:
+        return self.field_type.prepare_value_for_db(self.field, value)
+
+    def get_field_concat_expression(self) -> sql.Composable:
+        column_type = sql.SQL(self.get_column_type() or "text")
+        return sql.SQL(" COALESCE(CAST({}::{} AS TEXT), '<NULL>')::TEXT ").format(
+            sql.Placeholder(), column_type
+        )
+
+    def get_column_type(self) -> str | None:
+        table_field: DjangoField = self.field_type.get_model_field(self.field)
+        return table_field.db_type(db.connection)
+
+
+class UpsertRowsMappingHandler:
+    """
+    Helper class for mapping new rows values to existing table rows during an upsert
+    operation.
+
+    This class processes upsert values from the provided data and matches them with
+    existing row IDs in the database. The resulting mapping helps determine which
+    imported rows should update existing ones.
+
+    ### Usage:
+
+    >>> importrows = ImportRowsMappingHandler(table, [1234], [['a'], ['b']])
+
+    # Returns a dictionary where:
+    # - Keys represent the index of the upsert values in the imported dataset.
+    # - Values represent the corresponding row ID in the database.
+    >>> importrows.process_map
+    {0: 1, 1: 2}
+
+    # In this example:
+    # - The first imported value ['a'] (index 0) corresponds to the row with ID 1.
+    # - The second imported value ['b'] (index 1) corresponds to the row with ID 2.
+    """
+
+    SEPARATOR = sql.SQL(" || '__-__' || ")
+    PER_CHUNK = 100
+
+    def __init__(
+        self, table: Table, upsert_fields: list[int], upsert_values: list[list[Any]]
+    ):
+        self.table = table
+        self.table_name = table.get_database_table_name()
+        self.import_fields = [UpsertFieldHandler(table, fidx) for fidx in upsert_fields]
+        self.upsert_values = upsert_values
+
+    def validate(self):
+        """
+        Validates if upsert configuration conforms formal requirements
+        :raises InvalidRowLength:
+        """
+
+        expected_length = len(self.import_fields)
+        for ridx, uval in enumerate(self.upsert_values):
+            if len(uval) != expected_length:
+                raise InvalidRowLength(ridx)
+
+    @cached_property
+    def process_map(self) -> dict[int, int]:
+        """
+        Calculates a map between import row indexes and table row ids.
+        """
+
+        # no upsert value fields, no need for mapping
+        if not self.import_fields:
+            return {}
+
+        script_template = sql.SQL(
+            """
+        CREATE TEMP TABLE table_upsert_indexes (id INT, upsert_value TEXT, group_index INT);
+
+        CREATE TEMP TABLE table_import (id INT, upsert_value TEXT);
+
+        CREATE TEMP VIEW table_import_indexes AS
+                SELECT id, upsert_value, RANK()
+                        OVER (PARTITION BY upsert_value ORDER BY id, upsert_value )
+                        AS group_index
+                FROM table_import ORDER BY id ;
+        """
+        )
+
+        self.execute(script_template)
+        self.insert_table_values()
+        self.insert_imported_values()
+        # this is just a list of pairs, not very usable.
+        calculated = self.calculate_map()
+
+        # map import row idx -> update row_id in table
+        return {r[1]: r[0] for r in calculated}
+
+    @cached_property
+    def connection(self):
+        return db.connection
+
+    @cached_property
+    def cursor(self):
+        return self.connection.cursor()
+
+    def execute(self, query, *args, **kwargs) -> "CursorWrapper":
+        self.cursor.execute(query, *args, **kwargs)
+        return self.cursor
+
+    def insert_table_values(self):
+        """
+        Populates temp upsert comparison table with values from an exsisting table.
+        Values from multiple source columns will be normalized to one text value.
+        """
+
+        columns = self.SEPARATOR.join(
+            [
+                sql.SQL("COALESCE(CAST({} AS TEXT), '<NULL>')::TEXT").format(
+                    sql.Identifier(field.field_name)
+                )
+                for field in self.import_fields
+            ]
+        )
+
+        query = sql.SQL(
+            """WITH subq AS (SELECT r.id,  {} AS upsert_value FROM {} r WHERE NOT trashed)
+                INSERT INTO table_upsert_indexes (id, upsert_value, group_index)
+                SELECT id, upsert_value, RANK()
+                        OVER (PARTITION BY upsert_value ORDER BY id, upsert_value )
+                        AS group_index
+                FROM subq ORDER BY id """
+        ).format(
+            columns, sql.Identifier(self.table_name)
+        )  # nosec B608
+
+        self.execute(query)
+
+    def insert_imported_values(self):
+        """
+        Builds and executes bulk insert queries for upsert comparison values
+        from import data.
+        """
+
+        for _chunk in chunks(enumerate(self.upsert_values), self.PER_CHUNK):
+            # put all params (processed values) for the query into a container
+            query_params = []
+            rows_query = []
+            for rowidx, row in _chunk:
+                # per-row insert query
+                query_params.append(rowidx)
+                row_to_add = sql.SQL("({}, {})").format(
+                    sql.Placeholder(),
+                    merge_values_expression(row, self.import_fields, query_params),
+                )
+                rows_query.append(row_to_add)
+
+            rows_placeholder = sql.SQL(",\n").join(rows_query)
+            script_template = sql.SQL(
+                "INSERT INTO table_import (id, upsert_value) VALUES {};"
+            ).format(
+                rows_placeholder
+            )  # nosec B608
+            self.execute(script_template, query_params)
+
+    def calculate_map(self) -> list[tuple[int, int]]:
+        """
+        Calculates a map between imported row index -> table row id
+        that can be used to detect if a row that is imported should be updated
+        (mapping exists) or inserted as a new one.
+        """
+
+        q = sql.SQL(
+            """
+        SELECT t.id, i.id
+            FROM table_upsert_indexes t
+            JOIN table_import_indexes i
+                ON (i.upsert_value = t.upsert_value
+                    AND i.group_index = t.group_index);
+        """
+        )
+        return self.execute(q).fetchall()
diff --git a/backend/src/baserow/contrib/database/rows/types.py b/backend/src/baserow/contrib/database/rows/types.py
new file mode 100644
index 000000000..149dfc8c9
--- /dev/null
+++ b/backend/src/baserow/contrib/database/rows/types.py
@@ -0,0 +1,39 @@
+import typing
+from typing import Any, NamedTuple, NewType
+
+from django.db.models import QuerySet
+
+from baserow.contrib.database.table.models import GeneratedTableModel
+
+GeneratedTableModelForUpdate = NewType(
+    "GeneratedTableModelForUpdate", GeneratedTableModel
+)
+
+RowsForUpdate = NewType("RowsForUpdate", QuerySet)
+
+
+class FileImportConfiguration(typing.TypedDict):
+    upsert_fields: list[int]
+    upsert_values: list[list[typing.Any]]
+
+
+class FileImportDict(typing.TypedDict):
+    data: list[list[typing.Any]]
+    configuration: FileImportConfiguration | None
+
+
+FieldsMetadata = NewType("FieldsMetadata", dict[str, Any])
+RowValues = NewType("RowValues", dict[str, Any])
+RowId = NewType("RowId", int)
+
+
+class UpdatedRowsData(NamedTuple):
+    updated_rows: list[GeneratedTableModelForUpdate]
+    original_rows_values_by_id: dict[RowId, RowValues]
+    updated_fields_metadata_by_row_id: dict[RowId, FieldsMetadata]
+    errors: dict[int, dict[str, Any]] | None = None
+
+
+class CreatedRowsData(NamedTuple):
+    created_rows: list[GeneratedTableModel]
+    errors: dict[int, dict[str, Any]] | None = None
diff --git a/backend/src/baserow/contrib/database/table/handler.py b/backend/src/baserow/contrib/database/table/handler.py
index 221d3e0fd..c0e832edd 100644
--- a/backend/src/baserow/contrib/database/table/handler.py
+++ b/backend/src/baserow/contrib/database/table/handler.py
@@ -486,7 +486,11 @@ class TableHandler(metaclass=baserow_trace_methods(tracer)):
         table = self.create_table_and_fields(user, database, name, fields)
 
         _, error_report = RowHandler().import_rows(
-            user, table, data, progress=progress, send_realtime_update=False
+            user,
+            table,
+            data=data,
+            progress=progress,
+            send_realtime_update=False,
         )
 
         table_created.send(self, table=table, user=user)
diff --git a/backend/src/baserow/core/jobs/tasks.py b/backend/src/baserow/core/jobs/tasks.py
index b8ce994db..d4024a94c 100644
--- a/backend/src/baserow/core/jobs/tasks.py
+++ b/backend/src/baserow/core/jobs/tasks.py
@@ -64,7 +64,6 @@ def run_async_job(self, job_id: int):
 
         job.set_state_failed(str(e), error)
         job.save()
-
         raise
     finally:
         # Delete the import job cached entry because the transaction has been committed
diff --git a/backend/src/baserow/test_utils/fixtures/file_import.py b/backend/src/baserow/test_utils/fixtures/file_import.py
index 29bae68d2..86333cd2a 100644
--- a/backend/src/baserow/test_utils/fixtures/file_import.py
+++ b/backend/src/baserow/test_utils/fixtures/file_import.py
@@ -33,6 +33,7 @@ class FileImportFixtures:
                 for field_index in range(column_count):
                     row.append(f"data_{index}_{field_index}")
                 data.append(row)
+            data = {"data": data}
         else:
             data = kwargs.pop("data")
 
diff --git a/backend/src/baserow/test_utils/fixtures/row.py b/backend/src/baserow/test_utils/fixtures/row.py
index a140f2098..004b761c9 100644
--- a/backend/src/baserow/test_utils/fixtures/row.py
+++ b/backend/src/baserow/test_utils/fixtures/row.py
@@ -78,7 +78,7 @@ class RowFixture:
                 for row in rows
             ],
         )
-        return created_rows
+        return created_rows.created_rows
 
     def get_rows(self, fields: List[Field]) -> List[List[Any]]:
         model = fields[0].table.get_model()
diff --git a/backend/src/baserow/test_utils/fixtures/table.py b/backend/src/baserow/test_utils/fixtures/table.py
index 8d615713f..97319027f 100644
--- a/backend/src/baserow/test_utils/fixtures/table.py
+++ b/backend/src/baserow/test_utils/fixtures/table.py
@@ -57,16 +57,20 @@ class TableFixtures:
                 )
             )
         if rows:
-            created_rows = RowHandler().force_create_rows(
-                user=user,
-                table=table,
-                rows_values=[
-                    {
-                        f"field_{field.id}": row[index]
-                        for index, field in enumerate(fields)
-                    }
-                    for row in rows
-                ],
+            created_rows = (
+                RowHandler()
+                .force_create_rows(
+                    user=user,
+                    table=table,
+                    rows_values=[
+                        {
+                            f"field_{field.id}": row[index]
+                            for index, field in enumerate(fields)
+                        }
+                        for row in rows
+                    ],
+                )
+                .created_rows
             )
         else:
             created_rows = []
diff --git a/backend/src/baserow/test_utils/helpers.py b/backend/src/baserow/test_utils/helpers.py
index 64fa1bb99..2d6444b88 100644
--- a/backend/src/baserow/test_utils/helpers.py
+++ b/backend/src/baserow/test_utils/helpers.py
@@ -318,7 +318,7 @@ def setup_interesting_test_table(
 
         blank_row, row = row_handler.force_create_rows(
             user, table, [{}, row_values], model=model
-        )
+        ).created_rows
 
     # Setup the link rows
     linked_row_1, linked_row_2, linked_row_3 = row_handler.force_create_rows(
@@ -337,7 +337,7 @@ def setup_interesting_test_table(
                 link_table_primary_text_field.db_column: "",
             },
         ],
-    )
+    ).created_rows
     linked_row_4, linked_row_5, linked_row_6 = row_handler.force_create_rows(
         user=user,
         table=decimal_link_table,
@@ -352,7 +352,7 @@ def setup_interesting_test_table(
                 decimal_table_primary_decimal_field.db_column: None,
             },
         ],
-    )
+    ).created_rows
     with freeze_time("2020-01-01 12:00"):
         user_file_1 = data_fixture.create_user_file(
             original_name=f"name{file_suffix}.txt",
@@ -372,7 +372,7 @@ def setup_interesting_test_table(
                 file_link_table_primary_file_field.db_column: None,
             },
         ],
-    )
+    ).created_rows
     link_row_9, link_row_10 = row_handler.force_create_rows(
         user=user,
         table=multiple_collaborators_link_table,
@@ -389,7 +389,7 @@ def setup_interesting_test_table(
                 ],
             },
         ],
-    )
+    ).created_rows
 
     link_row_field_id = name_to_field_id["link_row"]
     link_row_field_without_related_id = name_to_field_id["link_row_without_related"]
diff --git a/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py b/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py
index bb5e12d8c..195ac5163 100644
--- a/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py
+++ b/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py
@@ -712,17 +712,21 @@ def test_dispatch_local_baserow_upsert_row_workflow_action_with_unmatching_index
             ],
         )
         field = table.field_set.get()
-        rows = RowHandler().create_rows(
-            user,
-            table,
-            rows_values=[
-                {f"field_{field.id}": "Community Engagement"},
-                {f"field_{field.id}": "Construction"},
-                {f"field_{field.id}": "Complex Construction Design"},
-                {f"field_{field.id}": "Simple Construction Design"},
-                {f"field_{field.id}": "Landscape Design"},
-                {f"field_{field.id}": "Infrastructure Design"},
-            ],
+        rows = (
+            RowHandler()
+            .create_rows(
+                user,
+                table,
+                rows_values=[
+                    {f"field_{field.id}": "Community Engagement"},
+                    {f"field_{field.id}": "Construction"},
+                    {f"field_{field.id}": "Complex Construction Design"},
+                    {f"field_{field.id}": "Simple Construction Design"},
+                    {f"field_{field.id}": "Landscape Design"},
+                    {f"field_{field.id}": "Infrastructure Design"},
+                ],
+            )
+            .created_rows
         )
 
     builder = data_fixture.create_builder_application(workspace=workspace)
diff --git a/backend/tests/baserow/contrib/database/api/rows/test_row_views.py b/backend/tests/baserow/contrib/database/api/rows/test_row_views.py
index 71897f9b7..b73868445 100644
--- a/backend/tests/baserow/contrib/database/api/rows/test_row_views.py
+++ b/backend/tests/baserow/contrib/database/api/rows/test_row_views.py
@@ -3270,14 +3270,18 @@ def test_get_row_adjacent(api_client, data_fixture):
     table = data_fixture.create_database_table(name="table", user=user)
     field = data_fixture.create_text_field(name="some name", table=table)
 
-    [row_1, row_2, row_3] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{field.id}": "some value"},
-            {f"field_{field.id}": "some value"},
-            {f"field_{field.id}": "some value"},
-        ],
+    [row_1, row_2, row_3] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{field.id}": "some value"},
+                {f"field_{field.id}": "some value"},
+                {f"field_{field.id}": "some value"},
+            ],
+        )
+        .created_rows
     )
 
     # Get the next row
@@ -3325,14 +3329,18 @@ def test_get_row_adjacent_view_id_provided(api_client, data_fixture):
         user, field=field, view=view, type="contains", value="a"
     )
 
-    [row_1, row_2, row_3] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{field.id}": "ab"},
-            {f"field_{field.id}": "b"},
-            {f"field_{field.id}": "a"},
-        ],
+    [row_1, row_2, row_3] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{field.id}": "ab"},
+                {f"field_{field.id}": "b"},
+                {f"field_{field.id}": "a"},
+            ],
+        )
+        .created_rows
     )
 
     response = api_client.get(
@@ -3358,14 +3366,18 @@ def test_get_row_adjacent_view_id_no_adjacent_row(api_client, data_fixture):
     table = data_fixture.create_database_table(name="table", user=user)
     field = data_fixture.create_text_field(name="field", table=table)
 
-    [row_1, row_2, row_3] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{field.id}": "a"},
-            {f"field_{field.id}": "b"},
-            {f"field_{field.id}": "c"},
-        ],
+    [row_1, row_2, row_3] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{field.id}": "a"},
+                {f"field_{field.id}": "b"},
+                {f"field_{field.id}": "c"},
+            ],
+        )
+        .created_rows
     )
 
     response = api_client.get(
@@ -3469,14 +3481,18 @@ def test_get_row_adjacent_search(api_client, data_fixture, search_mode):
     table = data_fixture.create_database_table(name="table", user=user)
     field = data_fixture.create_text_field(name="field", table=table)
 
-    [row_1, row_2, row_3] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{field.id}": "a"},
-            {f"field_{field.id}": "ab"},
-            {f"field_{field.id}": "c"},
-        ],
+    [row_1, row_2, row_3] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{field.id}": "a"},
+                {f"field_{field.id}": "ab"},
+                {f"field_{field.id}": "c"},
+            ],
+        )
+        .created_rows
     )
     SearchHandler.update_tsvector_columns(
         table, update_tsvectors_for_changed_rows_only=False
@@ -4432,7 +4448,7 @@ def test_link_row_field_validate_input_data_for_read_only_primary_fields(
         user=user, table_b=table_b
     )
 
-    (row_b1,) = RowHandler().create_rows(user, table_b, [{}])
+    (row_b1,) = RowHandler().create_rows(user, table_b, [{}]).created_rows
     row_b1_pk = str(getattr(row_b1, pk_field.db_column))
 
     # using a valid value as reference to the row should work
diff --git a/backend/tests/baserow/contrib/database/api/tables/test_table_views.py b/backend/tests/baserow/contrib/database/api/tables/test_table_views.py
index d9b3c3fe9..bf24afa95 100644
--- a/backend/tests/baserow/contrib/database/api/tables/test_table_views.py
+++ b/backend/tests/baserow/contrib/database/api/tables/test_table_views.py
@@ -17,6 +17,7 @@ from rest_framework.status import (
 from baserow.contrib.database.data_sync.handler import DataSyncHandler
 from baserow.contrib.database.file_import.models import FileImportJob
 from baserow.contrib.database.table.models import Table
+from baserow.core.jobs.models import Job
 from baserow.test_utils.helpers import (
     assert_serialized_rows_contain_same_values,
     independent_test_db_connection,
@@ -248,7 +249,7 @@ def test_create_table_with_data(
     with patch_filefield_storage():
         with job.data_file.open("r") as fin:
             data = json.load(fin)
-            assert data == [
+            assert data.get("data") == [
                 ["A", "B", "C", "D"],
                 ["1-1", "1-2", "1-3", "1-4", "1-5"],
                 ["2-1", "2-2", "2-3"],
@@ -647,3 +648,144 @@ def test_async_duplicate_interesting_table(api_client, data_fixture):
 
     for original_row, duplicated_row in zip(original_rows, duplicated_rows):
         assert_serialized_rows_contain_same_values(original_row, duplicated_row)
+
+
+@pytest.mark.django_db
+def test_import_table_call(api_client, data_fixture):
+    """
+    A simple test to check import table validation
+    """
+
+    user, token = data_fixture.create_user_and_token()
+    database = data_fixture.create_database_application(user=user)
+    table = data_fixture.create_database_table(database=database)
+    data_fixture.create_text_field(table=table, user=user)
+    data_fixture.create_number_field(table=table, user=user)
+
+    url = reverse("api:database:tables:import_async", kwargs={"table_id": table.id})
+
+    valid_data_no_configuration = {"data": [["1", 1], ["2", 1]]}
+
+    response = api_client.post(
+        url,
+        HTTP_AUTHORIZATION=f"JWT {token}",
+        data=valid_data_no_configuration,
+        format="json",
+    )
+    assert response.status_code == HTTP_200_OK
+    rdata = response.json()
+
+    assert isinstance(rdata.get("id"), int)
+    assert rdata.get("type") == "file_import"
+    Job.objects.all().delete()
+
+    valid_data_with_configuration = {"data": [["1", 1], ["2", 1]], "configuration": {}}
+    response = api_client.post(
+        url,
+        HTTP_AUTHORIZATION=f"JWT {token}",
+        data=valid_data_with_configuration,
+        format="json",
+    )
+    rdata = response.json()
+
+    assert response.status_code == HTTP_200_OK
+    assert isinstance(rdata.get("id"), int)
+    assert rdata.get("type") == "file_import"
+    Job.objects.all().delete()
+
+    invalid_data_with_configuration = {
+        "data": [["1", 1], ["2", 1]],
+        "configuration": {"upsert_fields": []},
+    }
+    response = api_client.post(
+        url,
+        HTTP_AUTHORIZATION=f"JWT {token}",
+        data=invalid_data_with_configuration,
+        format="json",
+    )
+    rdata = response.json()
+
+    assert response.status_code == HTTP_400_BAD_REQUEST
+    assert rdata == {
+        "error": "ERROR_REQUEST_BODY_VALIDATION",
+        "detail": {
+            "configuration": {
+                "upsert_fields": [
+                    {
+                        "error": "Ensure this field has at least 1 elements.",
+                        "code": "min_length",
+                    }
+                ]
+            }
+        },
+    }
+    Job.objects.all().delete()
+
+    invalid_data = {}
+    response = api_client.post(
+        url, HTTP_AUTHORIZATION=f"JWT {token}", data=invalid_data
+    )
+    rdata = response.json()
+    assert response.status_code == HTTP_400_BAD_REQUEST
+    assert rdata == {
+        "error": "ERROR_REQUEST_BODY_VALIDATION",
+        "detail": {"data": [{"error": "This field is required.", "code": "required"}]},
+    }
+
+    invalid_data = {
+        "data": [["1", 1], ["2", 1]],
+        "configuration": {"upsert_fields": [1, 2]},
+    }
+    response = api_client.post(
+        url,
+        HTTP_AUTHORIZATION=f"JWT {token}",
+        data=invalid_data,
+        format="json",
+    )
+    assert response.status_code == HTTP_400_BAD_REQUEST
+    rdata = response.json()
+
+    assert rdata == {
+        "error": "ERROR_REQUEST_BODY_VALIDATION",
+        "detail": {
+            "configuration": {
+                "upsert_value": [
+                    {
+                        "error": "upsert_values must not be empty when upsert_fields are provided.",
+                        "code": "invalid",
+                    }
+                ]
+            }
+        },
+    }
+
+    invalid_data = {
+        "data": [["1", 1], ["2", 1]],
+        "configuration": {"upsert_fields": [1, 2], "upsert_values": [["a"]]},
+    }
+    response = api_client.post(
+        url,
+        HTTP_AUTHORIZATION=f"JWT {token}",
+        data=invalid_data,
+        format="json",
+    )
+    assert response.status_code == HTTP_400_BAD_REQUEST
+    rdata = response.json()
+
+    assert rdata == {
+        "error": "ERROR_REQUEST_BODY_VALIDATION",
+        "detail": {
+            "data": [
+                {
+                    "error": "`data` and `configuration.upsert_values` should have the same length.",
+                    "code": "invalid",
+                }
+            ],
+            "configuration": {
+                "upsert_values": {
+                    "error": "`data` and `configuration.upsert_values` should have the same length.",
+                    "code": "invalid",
+                }
+            },
+        },
+    }
diff --git a/backend/tests/baserow/contrib/database/field/test_autonumber_field_type.py b/backend/tests/baserow/contrib/database/field/test_autonumber_field_type.py
index da3b4b3c8..4c4c7391e 100644
--- a/backend/tests/baserow/contrib/database/field/test_autonumber_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_autonumber_field_type.py
@@ -618,8 +618,10 @@ def test_autonumber_field_can_be_referenced_in_formula(data_fixture):
     user = data_fixture.create_user()
     table = data_fixture.create_database_table(user=user)
     data_fixture.create_autonumber_field(name="autonumber", table=table)
-    row_1, row_2 = RowHandler().create_rows(
-        user=user, table=table, rows_values=[{}, {}]
+    row_1, row_2 = (
+        RowHandler()
+        .create_rows(user=user, table=table, rows_values=[{}, {}])
+        .created_rows
     )
 
     formula_field = data_fixture.create_formula_field(
@@ -633,8 +635,10 @@ def test_autonumber_field_can_be_referenced_in_formula(data_fixture):
         {"id": row_2.id, f"field_{formula_field.id}": 4},
     ]
 
-    (row_3,) = RowHandler().create_rows(
-        user=user, table=table, rows_values=[{}], model=model
+    (row_3,) = (
+        RowHandler()
+        .create_rows(user=user, table=table, rows_values=[{}], model=model)
+        .created_rows
     )
     row_values = model.objects.all().values("id", f"field_{formula_field.id}")
     assert list(row_values) == [
@@ -660,12 +664,17 @@ def test_autonumber_field_can_be_looked_up(data_fixture):
     row_b_2 = model_b.objects.create()
 
     model_a = table_a.get_model()
-    (row,) = RowHandler().create_rows(
-        user=user,
-        table=table_a,
-        rows_values=[
-            {f"field_{link_field.id}": [row_b_1.id, row_b_2.id]},
-        ],
-        model=model_a,
+    (row,) = (
+        RowHandler()
+        .create_rows(
+            user=user,
+            table=table_a,
+            rows_values=[
+                {f"field_{link_field.id}": [row_b_1.id, row_b_2.id]},
+            ],
+            model=model_a,
+        )
+        .created_rows
     )
+
     assert getattr(row, f"field_{formula_field.id}") == 3
diff --git a/backend/tests/baserow/contrib/database/field/test_boolean_field_type.py b/backend/tests/baserow/contrib/database/field/test_boolean_field_type.py
index 5e00bd140..a53eaf06d 100644
--- a/backend/tests/baserow/contrib/database/field/test_boolean_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_boolean_field_type.py
@@ -138,7 +138,7 @@ def test_boolean_field_adjacent_row(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_c.id, previous=True, view=grid_view
diff --git a/backend/tests/baserow/contrib/database/field/test_created_by_field_type.py b/backend/tests/baserow/contrib/database/field/test_created_by_field_type.py
index af69ceafc..9a21e8cd5 100644
--- a/backend/tests/baserow/contrib/database/field/test_created_by_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_created_by_field_type.py
@@ -132,7 +132,7 @@ def test_create_rows_created_by(data_fixture):
 
     rows = row_handler.create_rows(
         user=user, table=table, rows_values=[{}, {}], model=model
-    )
+    ).created_rows
 
     assert getattr(rows[0], f"field_{field.id}") == user
 
diff --git a/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py b/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py
index 2b429e6c1..4b587f011 100644
--- a/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py
@@ -237,7 +237,7 @@ def test_created_on_field_adjacent_row(data_fixture):
             {},
         ],
         model=table_model,
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_b.id, previous=True, view=grid_view
diff --git a/backend/tests/baserow/contrib/database/field/test_date_field_type.py b/backend/tests/baserow/contrib/database/field/test_date_field_type.py
index 72fb1df01..e912681f6 100644
--- a/backend/tests/baserow/contrib/database/field/test_date_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_date_field_type.py
@@ -661,7 +661,7 @@ def test_date_field_adjacent_row(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_b.id, previous=True, view=grid_view
@@ -699,7 +699,7 @@ def test_get_group_by_metadata_in_rows_with_date_field(data_fixture):
                 f"field_{date_field.id}": "2010-01-02 12:01:21",
             },
         ],
-    )
+    ).created_rows
 
     model = table.get_model()
 
diff --git a/backend/tests/baserow/contrib/database/field/test_duration_field_type.py b/backend/tests/baserow/contrib/database/field/test_duration_field_type.py
index 3148d728c..4d823d91c 100644
--- a/backend/tests/baserow/contrib/database/field/test_duration_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_duration_field_type.py
@@ -98,7 +98,7 @@ def test_create_duration_field_rows(data_fixture):
             {f"field_{duration_field.id}": timedelta(seconds=3661)},
         ],
         model=model,
-    )
+    ).created_rows
 
     assert len(rows) == 2
     assert getattr(rows[0], f"field_{duration_field.id}") == timedelta(seconds=3660)
@@ -779,20 +779,24 @@ def test_duration_field_view_filters(data_fixture):
     )
 
     model = table.get_model()
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {field.db_column: None},
-            {field.db_column: "0:1.123"},
-            {field.db_column: 1.123},
-            {field.db_column: 60},  # 1min
-            {field.db_column: "24:0:0"},  # 1day
-            {field.db_column: "1 0"},  # 1day
-            {field.db_column: 3601},  # 1hour 1sec
-            {field.db_column: "1:0:0"},  # 1 hour
-        ],
-        model=model,
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {field.db_column: None},
+                {field.db_column: "0:1.123"},
+                {field.db_column: 1.123},
+                {field.db_column: 60},  # 1min
+                {field.db_column: "24:0:0"},  # 1day
+                {field.db_column: "1 0"},  # 1day
+                {field.db_column: 3601},  # 1hour 1sec
+                {field.db_column: "1:0:0"},  # 1 hour
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     #
@@ -1105,14 +1109,18 @@ def test_duration_field_can_be_looked_up(data_fixture):
     )
 
     model_b = table_b.get_model()
-    row_b_1, row_b_2 = RowHandler().create_rows(
-        user=user,
-        table=table_b,
-        rows_values=[
-            {duration_field.db_column: 24 * 3600},
-            {duration_field.db_column: 60},
-        ],
-        model=model_b,
+    row_b_1, row_b_2 = (
+        RowHandler()
+        .create_rows(
+            user=user,
+            table=table_b,
+            rows_values=[
+                {duration_field.db_column: 24 * 3600},
+                {duration_field.db_column: 60},
+            ],
+            model=model_b,
+        )
+        .created_rows
     )
 
     assert list(model_b.objects.values_list(duration_formula.db_column, flat=True)) == [
@@ -1121,13 +1129,17 @@ def test_duration_field_can_be_looked_up(data_fixture):
     ]
 
     model_a = table_a.get_model()
-    (row,) = RowHandler().create_rows(
-        user=user,
-        table=table_a,
-        rows_values=[
-            {f"field_{link_field.id}": [row_b_1.id, row_b_2.id]},
-        ],
-        model=model_a,
+    (row,) = (
+        RowHandler()
+        .create_rows(
+            user=user,
+            table=table_a,
+            rows_values=[
+                {f"field_{link_field.id}": [row_b_1.id, row_b_2.id]},
+            ],
+            model=model_a,
+        )
+        .created_rows
     )
     assert getattr(row, f"field_{lookup_field.id}") == [
         {"id": row_b_1.id, "value": "1 day"},
diff --git a/backend/tests/baserow/contrib/database/field/test_duration_formula_field_filters.py b/backend/tests/baserow/contrib/database/field/test_duration_formula_field_filters.py
index 7d7e55183..ded72748e 100644
--- a/backend/tests/baserow/contrib/database/field/test_duration_formula_field_filters.py
+++ b/backend/tests/baserow/contrib/database/field/test_duration_formula_field_filters.py
@@ -79,7 +79,7 @@ def duration_formula_filter_proc(
         {src_field_name: 61, refname: "1m 1s"},
     ]
 
-    created = t.row_handler.create_rows(
+    t.row_handler.create_rows(
         user=t.user,
         table=t.table,
         rows_values=rows,
diff --git a/backend/tests/baserow/contrib/database/field/test_field_single_select_options.py b/backend/tests/baserow/contrib/database/field/test_field_single_select_options.py
index 4cdd4f7a6..8afc5b533 100644
--- a/backend/tests/baserow/contrib/database/field/test_field_single_select_options.py
+++ b/backend/tests/baserow/contrib/database/field/test_field_single_select_options.py
@@ -40,12 +40,16 @@ def test_migration_rows_with_deleted_singleselect_options(
         field=single_select_field, value=f"Option B"
     )
 
-    _, row_with_b = RowHandler().force_create_rows(
-        user=user,
-        table=table,
-        rows_values=[
-            {single_select_field.db_column: opt.id} for opt in (option_a, option_b)
-        ],
+    row_with_b = (
+        RowHandler()
+        .force_create_rows(
+            user=user,
+            table=table,
+            rows_values=[
+                {single_select_field.db_column: opt.id} for opt in (option_a, option_b)
+            ],
+        )
+        .created_rows[1]
     )
 
     single_select_field_type = field_type_registry.get_by_model(single_select_field)
@@ -95,12 +99,16 @@ def test_single_select_ids_are_removed_from_rows_when_deleted(data_fixture):
     option_a = data_fixture.create_select_option(field=single_select_field, value=f"A")
     option_b = data_fixture.create_select_option(field=single_select_field, value=f"B")
 
-    _, row_with_b = RowHandler().force_create_rows(
-        user=user,
-        table=table,
-        rows_values=[
-            {single_select_field.db_column: opt.id} for opt in (option_a, option_b)
-        ],
+    row_with_b = (
+        RowHandler()
+        .force_create_rows(
+            user=user,
+            table=table,
+            rows_values=[
+                {single_select_field.db_column: opt.id} for opt in (option_a, option_b)
+            ],
+        )
+        .created_rows[1]
     )
 
     # Keep only A, and remove B
diff --git a/backend/tests/baserow/contrib/database/field/test_field_tasks.py b/backend/tests/baserow/contrib/database/field/test_field_tasks.py
index 30fa13c2c..0cb5f098b 100644
--- a/backend/tests/baserow/contrib/database/field/test_field_tasks.py
+++ b/backend/tests/baserow/contrib/database/field/test_field_tasks.py
@@ -481,14 +481,18 @@ def test_run_delete_mentions_marked_for_deletion(data_fixture):
 
     # Create a user mention
     with freeze_time("2023-02-27 9:00"):
-        row_1, row_2 = RowHandler().create_rows(
-            user=user,
-            table=table,
-            rows_values=[
-                {f"field_{rich_text_field.id}": f"Hello @{user.id}!"},
-                {f"field_{rich_text_field.id}": f"Hi @{user.id}!"},
-            ],
-            model=model,
+        row_1, row_2 = (
+            RowHandler()
+            .create_rows(
+                user=user,
+                table=table,
+                rows_values=[
+                    {f"field_{rich_text_field.id}": f"Hello @{user.id}!"},
+                    {f"field_{rich_text_field.id}": f"Hi @{user.id}!"},
+                ],
+                model=model,
+            )
+            .created_rows
         )
 
     mentions = RichTextFieldMention.objects.all()
diff --git a/backend/tests/baserow/contrib/database/field/test_formula_field_type.py b/backend/tests/baserow/contrib/database/field/test_formula_field_type.py
index 5d4535bdc..53dbf81fb 100644
--- a/backend/tests/baserow/contrib/database/field/test_formula_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_formula_field_type.py
@@ -1091,13 +1091,17 @@ def test_inserting_a_row_with_lookup_field_immediately_populates_it_with_empty_l
     primary_a_field = table_a.field_set.get(primary=True)
     primary_b_field = table_b.field_set.get(primary=True)
     target_field = data_fixture.create_text_field(name="target", table=table_b)
-    row_1, row_2 = RowHandler().create_rows(
-        user,
-        table_b,
-        rows_values=[
-            {primary_b_field.db_column: "1", target_field.db_column: "target 1"},
-            {primary_b_field.db_column: "2", target_field.db_column: "target 2"},
-        ],
+    row_1, row_2 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table_b,
+            rows_values=[
+                {primary_b_field.db_column: "1", target_field.db_column: "target 1"},
+                {primary_b_field.db_column: "2", target_field.db_column: "target 2"},
+            ],
+        )
+        .created_rows
     )
     RowHandler().create_rows(
         user,
@@ -1373,7 +1377,7 @@ def test_formula_field_adjacent_row(data_fixture):
                 f"field_{text_field.id}": "C",
             },
         ],
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_b.id, previous=True, view=grid_view
diff --git a/backend/tests/baserow/contrib/database/field/test_last_modified_by_field_type.py b/backend/tests/baserow/contrib/database/field/test_last_modified_by_field_type.py
index 83916d720..f3dd82405 100644
--- a/backend/tests/baserow/contrib/database/field/test_last_modified_by_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_last_modified_by_field_type.py
@@ -134,7 +134,7 @@ def test_create_rows_last_modified_by(data_fixture):
 
     rows = row_handler.create_rows(
         user=user, table=table, rows_values=[{}, {}], model=model
-    )
+    ).created_rows
 
     assert getattr(rows[0], f"field_{field.id}") == user
 
diff --git a/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py b/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py
index 2434168a1..44cec179e 100644
--- a/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py
@@ -255,7 +255,7 @@ def test_last_modified_field_adjacent_row(data_fixture):
             {},
         ],
         model=table_model,
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_b.id, previous=True, view=grid_view
@@ -278,14 +278,16 @@ def test_last_modified_field_can_be_looked_up(data_fixture):
 
     row_handler = RowHandler()
 
-    row_b1, _ = row_handler.create_rows(user=user, table=table_b, rows_values=[{}, {}])
+    row_b1, _ = row_handler.create_rows(
+        user=user, table=table_b, rows_values=[{}, {}]
+    ).created_rows
 
     with freeze_time("2020-01-01 12:00"):
         row_a1, _ = row_handler.create_rows(
             user=user,
             table=table_a,
             rows_values=[{link_row.db_column: [row_b1.id]}, {}],
-        )
+        ).created_rows
 
     updated_row_b1 = row_handler.get_row(user=user, table=table_b, row_id=row_b1.id)
     assert getattr(updated_row_b1, lookup_last_modified_field.db_column) == [
diff --git a/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py b/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
index 34c5a28d5..86f61d110 100644
--- a/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
@@ -2260,11 +2260,15 @@ def test_dont_export_deleted_relations(data_fixture):
     row_b2 = table_b_model.objects.create()
 
     table_a_model = table_a.get_model()
-    (row_a1,) = RowHandler().force_create_rows(
-        user,
-        table_a,
-        [{link_field.db_column: [row_b1.id, row_b2.id]}],
-        model=table_a_model,
+    (row_a1,) = (
+        RowHandler()
+        .force_create_rows(
+            user,
+            table_a,
+            [{link_field.db_column: [row_b1.id, row_b2.id]}],
+            model=table_a_model,
+        )
+        .created_rows
     )
 
     assert getattr(row_a1, link_field.db_column).count() == 2
@@ -2336,7 +2340,7 @@ def setup_table_with_single_select_pk(user, data_fixture):
         for (char, opt) in zip(all_chars, options)
     ]
 
-    rows = RowHandler().force_create_rows(user, table, rows_values)
+    rows = RowHandler().force_create_rows(user, table, rows_values).created_rows
     return LinkRowOrderSetup(table, primary_field, rows, comparable_field)
 
 
@@ -2363,7 +2367,7 @@ def setup_table_with_multiple_select_pk(user, data_fixture):
         for (i, char) in enumerate(all_chars)
     ]
 
-    rows = RowHandler().force_create_rows(user, table, rows_values)
+    rows = RowHandler().force_create_rows(user, table, rows_values).created_rows
     return LinkRowOrderSetup(table, primary_field, rows, comparable_field)
 
 
@@ -2410,16 +2414,22 @@ def setup_table_with_collaborator_pk(user, data_fixture):
         ]
     )
 
-    rows = RowHandler().force_create_rows(
-        user,
-        table,
-        [
-            {
-                f"{primary_field.db_column}": [{"id": usr.id, "name": usr.first_name}],
-                f"{comparable_field.db_column}": usr.first_name,
-            }
-            for usr in users
-        ],
+    rows = (
+        RowHandler()
+        .force_create_rows(
+            user,
+            table,
+            [
+                {
+                    f"{primary_field.db_column}": [
+                        {"id": usr.id, "name": usr.first_name}
+                    ],
+                    f"{comparable_field.db_column}": usr.first_name,
+                }
+                for usr in users
+            ],
+        )
+        .created_rows
     )
     return LinkRowOrderSetup(table, primary_field, rows, comparable_field)
 
@@ -2611,10 +2621,14 @@ def test_get_group_by_metadata_in_rows_with_many_to_many_field(data_fixture):
     user = data_fixture.create_user()
     table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables(user=user)
 
-    row_b1, row_b2, row_b3 = RowHandler().force_create_rows(
-        user=user,
-        table=table_b,
-        rows_values=[{}, {}, {}],
+    row_b1, row_b2, row_b3 = (
+        RowHandler()
+        .force_create_rows(
+            user=user,
+            table=table_b,
+            rows_values=[{}, {}, {}],
+        )
+        .created_rows
     )
 
     RowHandler().force_create_rows(
@@ -2727,24 +2741,28 @@ def test_list_rows_with_group_by_link_row_to_multiple_select_field(
     grid = data_fixture.create_grid_view(table=table_a)
     data_fixture.create_view_group_by(view=grid, field=link_a_to_b)
 
-    row_b1, row_b2 = RowHandler().force_create_rows(
-        user=user,
-        table=table_b,
-        rows_values=[
-            {
-                f"field_{multiple_select_field.id}": [
-                    select_option_1.id,
-                    select_option_2.id,
-                    select_option_3.id,
-                ],
-            },
-            {
-                f"field_{multiple_select_field.id}": [
-                    select_option_2.id,
-                    select_option_3.id,
-                ],
-            },
-        ],
+    row_b1, row_b2 = (
+        RowHandler()
+        .force_create_rows(
+            user=user,
+            table=table_b,
+            rows_values=[
+                {
+                    f"field_{multiple_select_field.id}": [
+                        select_option_1.id,
+                        select_option_2.id,
+                        select_option_3.id,
+                    ],
+                },
+                {
+                    f"field_{multiple_select_field.id}": [
+                        select_option_2.id,
+                        select_option_3.id,
+                    ],
+                },
+            ],
+        )
+        .created_rows
     )
 
     RowHandler().force_create_rows(
diff --git a/backend/tests/baserow/contrib/database/field/test_long_text_field_type.py b/backend/tests/baserow/contrib/database/field/test_long_text_field_type.py
index 9166a54e5..584c86139 100644
--- a/backend/tests/baserow/contrib/database/field/test_long_text_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_long_text_field_type.py
@@ -62,14 +62,18 @@ def test_perm_deleting_rows_delete_rich_text_mentions(data_fixture):
         table=table, long_text_enable_rich_text=True
     )
 
-    row_1, row_2, row_3 = RowHandler().create_rows(
-        user=user,
-        table=table,
-        rows_values=[
-            {field.db_column: f"Hello @{user.id}!"},
-            {field.db_column: f"Ciao @{user.id}!"},
-            {field.db_column: f"Hola @{user.id}!"},
-        ],
+    row_1, row_2, row_3 = (
+        RowHandler()
+        .create_rows(
+            user=user,
+            table=table,
+            rows_values=[
+                {field.db_column: f"Hello @{user.id}!"},
+                {field.db_column: f"Ciao @{user.id}!"},
+                {field.db_column: f"Hola @{user.id}!"},
+            ],
+        )
+        .created_rows
     )
 
     mentions = RichTextFieldMention.objects.all()
diff --git a/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py b/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py
index 40a033e9e..c6606f22d 100644
--- a/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py
@@ -825,19 +825,23 @@ def test_can_modify_row_containing_lookup(
         link_row_table=table2,
     )
 
-    a, b = RowHandler().create_rows(
-        user,
-        table2,
-        [
-            {
-                looked_up_field.db_column: f"2021-02-01",
-                table2_primary_field.db_column: "primary a",
-            },
-            {
-                looked_up_field.db_column: f"2022-02-03",
-                table2_primary_field.db_column: "primary b",
-            },
-        ],
+    a, b = (
+        RowHandler()
+        .create_rows(
+            user,
+            table2,
+            [
+                {
+                    looked_up_field.db_column: f"2021-02-01",
+                    table2_primary_field.db_column: "primary a",
+                },
+                {
+                    looked_up_field.db_column: f"2022-02-03",
+                    table2_primary_field.db_column: "primary b",
+                },
+            ],
+        )
+        .created_rows
     )
 
     table_row = RowHandler().create_row(
@@ -1347,20 +1351,24 @@ def test_deleting_table_with_dependants_works(
     )
 
     table2_model = table2.get_model()
-    a, b = RowHandler().create_rows(
-        user,
-        table2,
-        rows_values=[
-            {
-                looked_up_field.db_column: "2021-02-01",
-                table2_primary_field.db_column: "primary a",
-            },
-            {
-                looked_up_field.db_column: "2022-02-03",
-                table2_primary_field.db_column: "primary b",
-            },
-        ],
-        model=table2_model,
+    a, b = (
+        RowHandler()
+        .create_rows(
+            user,
+            table2,
+            rows_values=[
+                {
+                    looked_up_field.db_column: "2021-02-01",
+                    table2_primary_field.db_column: "primary a",
+                },
+                {
+                    looked_up_field.db_column: "2022-02-03",
+                    table2_primary_field.db_column: "primary b",
+                },
+            ],
+            model=table2_model,
+        )
+        .created_rows
     )
 
     table_model = table.get_model()
@@ -1847,34 +1855,42 @@ def test_can_modify_row_containing_lookup_diamond_dep(
     starting_row = RowHandler().create_row(
         user, table1, {primary_table1.db_column: "table1_primary_row_1"}
     )
-    table2_row1, table2_row2 = RowHandler().create_rows(
-        user,
-        table2,
-        [
-            {
-                primary_table2.db_column: "table2_row1",
-                table2_link_to_table1.db_column: [starting_row.id],
-            },
-            {
-                primary_table2.db_column: "table2_row2",
-                table2_link_to_table1.db_column: [starting_row.id],
-            },
-        ],
+    table2_row1, table2_row2 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table2,
+            [
+                {
+                    primary_table2.db_column: "table2_row1",
+                    table2_link_to_table1.db_column: [starting_row.id],
+                },
+                {
+                    primary_table2.db_column: "table2_row2",
+                    table2_link_to_table1.db_column: [starting_row.id],
+                },
+            ],
+        )
+        .created_rows
     )
 
-    table3_row1, table3_row2 = RowHandler().create_rows(
-        user,
-        table3,
-        [
-            {
-                primary_table3.db_column: "table3_row1",
-                table3_link_to_table2_a.db_column: [table2_row1.id],
-            },
-            {
-                primary_table3.db_column: "table3_row2",
-                table3_link_to_table2_b.db_column: [table2_row2.id],
-            },
-        ],
+    table3_row1, table3_row2 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table3,
+            [
+                {
+                    primary_table3.db_column: "table3_row1",
+                    table3_link_to_table2_a.db_column: [table2_row1.id],
+                },
+                {
+                    primary_table3.db_column: "table3_row2",
+                    table3_link_to_table2_b.db_column: [table2_row2.id],
+                },
+            ],
+        )
+        .created_rows
     )
 
     FieldHandler().create_field(
diff --git a/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py b/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py
index 4ab9542eb..b21dcad92 100644
--- a/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py
@@ -849,12 +849,12 @@ def test_multiple_collaborators_field_type_values_can_be_searched(data_fixture):
             {collaborator_field.db_column: [{"id": luigi.id}]},
             {collaborator_field.db_column: [{"id": mario.id}, {"id": luigi.id}]},
         ],
-    )
+    ).created_rows
     rows_a_to_b = row_handler.force_create_rows(
         user=mario,
         table=table_a,
         rows_values=[{link_a_to_b.db_column: [row_b.id]} for row_b in rows_b],
-    )
+    ).created_rows
 
     # search in B
     model_b = table_b.get_model()
@@ -931,7 +931,7 @@ def test_multiple_collaborators_formula_field_cache_users_query(data_fixture):
             {field_id: [{"id": user_2.id}, {"id": user_3.id}]},
         ],
         model=table_model,
-    )
+    ).created_rows
 
     # The number of queries should not increas as we export more rows
     with CaptureQueriesContext(connection) as queries_for_all_others:
diff --git a/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py b/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py
index 9c89e176b..39718dab2 100644
--- a/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py
@@ -450,7 +450,7 @@ def test_multiple_select_field_type_multiple_rows(data_fixture):
     assert len(row_5_field) == 1
     assert getattr(row_5_field[0], "id") == select_options[0].id
 
-    _, error_report = row_handler.create_rows(
+    error_report = row_handler.create_rows(
         user,
         table,
         rows_values=[
@@ -460,7 +460,7 @@ def test_multiple_select_field_type_multiple_rows(data_fixture):
             {f"field_{field.id}": [99999, "missing"]},
         ],
         generate_error_report=True,
-    )
+    ).errors
 
     assert list(error_report.keys()) == [0, 2, 3]
     assert f"field_{field.id}" in error_report[0]
@@ -2300,7 +2300,7 @@ def test_multiple_select_adjacent_row(data_fixture):
                 f"field_{multiple_select_field.id}": [option_a.id],
             },
         ],
-    )
+    ).created_rows
 
     base_queryset = ViewHandler().apply_sorting(
         grid_view, table.get_model().objects.all()
@@ -2595,7 +2595,7 @@ def test_get_group_by_metadata_in_rows_with_many_to_many_field(data_fixture):
                 ],
             },
         ],
-    )
+    ).created_rows
 
     model = table.get_model()
 
@@ -2792,7 +2792,7 @@ def test_get_group_by_metadata_in_rows_multiple_and_single_select_fields(data_fi
                 ],
             },
         ],
-    )
+    ).created_rows
 
     model = table.get_model()
 
@@ -2992,11 +2992,15 @@ def setup_view_for_multiple_select_field(data_fixture, option_values):
             return {}
         return {multiple_select_field.db_column: [opt.id for opt in options]}
 
-    rows = RowHandler().force_create_rows(
-        user,
-        table,
-        [prep_row([option] if option is not None else None) for option in options],
-        model=model,
+    rows = (
+        RowHandler()
+        .force_create_rows(
+            user,
+            table,
+            [prep_row([option] if option is not None else None) for option in options],
+            model=model,
+        )
+        .created_rows
     )
 
     fields = {
diff --git a/backend/tests/baserow/contrib/database/field/test_number_field_type.py b/backend/tests/baserow/contrib/database/field/test_number_field_type.py
index ec00c4691..8f1b7b53e 100644
--- a/backend/tests/baserow/contrib/database/field/test_number_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_number_field_type.py
@@ -274,7 +274,7 @@ def test_number_field_adjacent_row(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_b.id, previous=True, view=grid_view
diff --git a/backend/tests/baserow/contrib/database/field/test_number_lookup_field_filters.py b/backend/tests/baserow/contrib/database/field/test_number_lookup_field_filters.py
index 6ecacffd0..f0edd87be 100644
--- a/backend/tests/baserow/contrib/database/field/test_number_lookup_field_filters.py
+++ b/backend/tests/baserow/contrib/database/field/test_number_lookup_field_filters.py
@@ -65,7 +65,7 @@ def number_lookup_filter_proc(
 
     linked_rows = t.row_handler.create_rows(
         user=t.user, table=t.other_table, rows_values=dict_rows
-    )
+    ).created_rows
 
     # helper to get linked rows by indexes
     def get_linked_rows(*indexes) -> list[int]:
diff --git a/backend/tests/baserow/contrib/database/field/test_rating_field_type.py b/backend/tests/baserow/contrib/database/field/test_rating_field_type.py
index ed58c6ef0..80853d5c6 100644
--- a/backend/tests/baserow/contrib/database/field/test_rating_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_rating_field_type.py
@@ -320,7 +320,7 @@ def test_rating_field_adjacent_row(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_b.id, previous=True, view=grid_view
diff --git a/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py b/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py
index 0ab78c9a0..493e5d052 100644
--- a/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py
@@ -1105,7 +1105,7 @@ def test_single_select_adjacent_row(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     previous_row = handler.get_adjacent_row(
         table_model, row_b.id, previous=True, view=grid_view
@@ -1141,7 +1141,7 @@ def test_single_select_adjacent_row_working_with_sorts_and_null_values(data_fixt
             {},
         ],
         model=table_model,
-    )
+    ).created_rows
 
     next_row = handler.get_adjacent_row(table_model, row_a.id, view=grid_view)
     assert next_row.id == row_b.id
@@ -1379,8 +1379,12 @@ def setup_view_for_single_select_field(data_fixture, option_values):
     def prep_row(option):
         return {single_select_field.db_column: option.id if option else None}
 
-    rows = RowHandler().force_create_rows(
-        user, table, [prep_row(option) for option in options], model=model
+    rows = (
+        RowHandler()
+        .force_create_rows(
+            user, table, [prep_row(option) for option in options], model=model
+        )
+        .created_rows
     )
 
     fields = {
diff --git a/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py b/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py
index 163019734..45c7589b9 100644
--- a/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py
@@ -156,7 +156,7 @@ def test_create_uuid_row_in_bulk(data_fixture):
 
     rows = row_handler.create_rows(
         user=user, table=table, rows_values=[{}, {}], model=model
-    )
+    ).created_rows
 
     assert isinstance(rows[0].uuid, UUID)
     assert isinstance(rows[1].uuid, UUID)
diff --git a/backend/tests/baserow/contrib/database/file_import/test_file_import_tasks.py b/backend/tests/baserow/contrib/database/file_import/test_file_import_tasks.py
index 2002e15fd..bdcc29d0c 100644
--- a/backend/tests/baserow/contrib/database/file_import/test_file_import_tasks.py
+++ b/backend/tests/baserow/contrib/database/file_import/test_file_import_tasks.py
@@ -9,6 +9,8 @@ from pyinstrument import Profiler
 
 from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler
 from baserow.contrib.database.fields.exceptions import (
+    FieldNotInTable,
+    IncompatibleField,
     InvalidBaserowFieldName,
     MaxFieldLimitExceeded,
     MaxFieldNameLengthExceeded,
@@ -16,7 +18,10 @@ from baserow.contrib.database.fields.exceptions import (
 )
 from baserow.contrib.database.fields.field_cache import FieldCache
 from baserow.contrib.database.fields.models import SelectOption, TextField
-from baserow.contrib.database.rows.exceptions import ReportMaxErrorCountExceeded
+from baserow.contrib.database.rows.exceptions import (
+    InvalidRowLength,
+    ReportMaxErrorCountExceeded,
+)
 from baserow.contrib.database.table.exceptions import (
     InitialTableDataDuplicateName,
     InitialTableDataLimitExceeded,
@@ -43,23 +48,25 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
         run_async_job(job.id)
 
     with patch_filefield_storage(), pytest.raises(InvalidInitialTableData):
-        job = data_fixture.create_file_import_job(data=[])
+        job = data_fixture.create_file_import_job(data={"data": []})
         run_async_job(job.id)
 
     with patch_filefield_storage(), pytest.raises(InvalidInitialTableData):
-        job = data_fixture.create_file_import_job(data=[[]])
+        job = data_fixture.create_file_import_job(data={"data": [[]]})
         run_async_job(job.id)
 
     with override_settings(
         INITIAL_TABLE_DATA_LIMIT=2
     ), patch_filefield_storage(), pytest.raises(InitialTableDataLimitExceeded):
-        job = data_fixture.create_file_import_job(data=[[], [], []])
+        job = data_fixture.create_file_import_job(data={"data": [[], [], []]})
         run_async_job(job.id)
 
     with override_settings(MAX_FIELD_LIMIT=2), patch_filefield_storage(), pytest.raises(
         MaxFieldLimitExceeded
     ):
-        job = data_fixture.create_file_import_job(data=[["fields"] * 3, ["rows"] * 3])
+        job = data_fixture.create_file_import_job(
+            data={"data": [["fields"] * 3, ["rows"] * 3]}
+        )
         run_async_job(job.id)
 
     too_long_field_name = "x" * 256
@@ -73,35 +80,37 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
     ]
 
     with patch_filefield_storage(), pytest.raises(MaxFieldNameLengthExceeded):
-        job = data_fixture.create_file_import_job(data=data)
+        job = data_fixture.create_file_import_job(data={"data": data})
         run_async_job(job.id)
 
     data[0][0] = field_name_with_ok_length
     with patch_filefield_storage():
-        job = data_fixture.create_file_import_job(data=data)
+        job = data_fixture.create_file_import_job(data={"data": data})
         run_async_job(job.id)
 
     with patch_filefield_storage(), pytest.raises(ReservedBaserowFieldNameException):
-        job = data_fixture.create_file_import_job(data=[["id"]])
+        job = data_fixture.create_file_import_job(data={"data": [["id"]]})
         run_async_job(job.id)
 
     with patch_filefield_storage(), pytest.raises(InitialTableDataDuplicateName):
-        job = data_fixture.create_file_import_job(data=[["test", "test"]])
+        job = data_fixture.create_file_import_job(data={"data": [["test", "test"]]})
         run_async_job(job.id)
 
     with patch_filefield_storage(), pytest.raises(InvalidBaserowFieldName):
-        job = data_fixture.create_file_import_job(data=[[" "]])
+        job = data_fixture.create_file_import_job(data={"data": [[" "]]})
         run_async_job(job.id)
 
     # Basic use
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(
-            data=[
-                ["A", "B", "C", "D"],
-                ["1-1", "1-2", "1-3", "1-4", "1-5"],
-                ["2-1", "2-2", "2-3"],
-                ["3-1", "3-2"],
-            ]
+            data={
+                "data": [
+                    ["A", "B", "C", "D"],
+                    ["1-1", "1-2", "1-3", "1-4", "1-5"],
+                    ["2-1", "2-2", "2-3"],
+                    ["3-1", "3-2"],
+                ]
+            }
         )
         run_async_job(job.id)
 
@@ -130,11 +139,13 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
     # Without first row header
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(
-            data=[
-                ["1-1"],
-                ["2-1", "2-2", "2-3"],
-                ["3-1", "3-2"],
-            ],
+            data={
+                "data": [
+                    ["1-1"],
+                    ["2-1", "2-2", "2-3"],
+                    ["3-1", "3-2"],
+                ]
+            },
             first_row_header=False,
         )
         run_async_job(job.id)
@@ -151,17 +162,19 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
     # Robust to strange field names
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(
-            data=[
-                [
-                    "TEst 1",
-                    "10.00",
-                    'Falsea"""',
-                    'a"a"a"a"a,',
-                    "a",
-                    1.3,
-                    "/w. r/awr",
-                ],
-            ],
+            data={
+                "data": [
+                    [
+                        "TEst 1",
+                        "10.00",
+                        'Falsea"""',
+                        'a"a"a"a"a,',
+                        "a",
+                        1.3,
+                        "/w. r/awr",
+                    ],
+                ]
+            },
         )
         run_async_job(job.id)
 
@@ -196,7 +209,7 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
     model = table.get_model()
 
     # Import data to an existing table
-    data = [["baz", 3, -3, "foo", None], ["bob", -4, 2.5, "bar", "a" * 255]]
+    data = {"data": [["baz", 3, -3, "foo", None], ["bob", -4, 2.5, "bar", "a" * 255]]}
 
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(
@@ -212,13 +225,15 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
     assert len(rows) == 2
 
     # Import data with different length
-    data = [
-        ["good", "test", "test", "Anything"],
-        [],
-        [None, None],
-        ["good", 2.5, None, "Anything"],
-        ["good", 2.5, None, "Anything", "too much", "values"],
-    ]
+    data = {
+        "data": [
+            ["good", "test", "test", "Anything"],
+            [],
+            [None, None],
+            ["good", 2.5, None, "Anything"],
+            ["good", 2.5, None, "Anything", "too much", "values"],
+        ]
+    }
 
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(
@@ -331,6 +346,7 @@ def test_run_file_import_task_for_special_fields(data_fixture, patch_filefield_s
             [],
         ],
     ]
+    data = {"data": data}
 
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(
@@ -397,6 +413,7 @@ def test_run_file_import_task_for_special_fields(data_fixture, patch_filefield_s
             "bug",
         ],
     ]
+    data = {"data": data}
 
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(
@@ -454,8 +471,8 @@ def test_run_file_import_test_chunk(data_fixture, patch_filefield_storage):
 
     table, _, _ = data_fixture.build_table(
         columns=[
-            (f"col1", "text"),
-            (f"col2", "number"),
+            ("col1", "text"),
+            ("col2", "number"),
         ],
         rows=[],
         user=user,
@@ -483,11 +500,16 @@ def test_run_file_import_test_chunk(data_fixture, patch_filefield_storage):
     data[1024] = ["test", 2, 99999]
     data[1027] = ["test", "bad", single_select_option_2.id]
 
+    print("data", len(data))
+    data = {"data": data}
+
     with patch_filefield_storage():
         job = data_fixture.create_file_import_job(table=table, data=data, user=user)
         run_async_job(job.id)
 
     job.refresh_from_db()
+    assert job.finished
+    assert not job.failed
 
     model = job.table.get_model()
     assert model.objects.count() == row_count - 5
@@ -509,8 +531,8 @@ def test_run_file_import_limit(data_fixture, patch_filefield_storage):
 
     table, _, _ = data_fixture.build_table(
         columns=[
-            (f"col1", "text"),
-            (f"col2", "number"),
+            ("col1", "text"),
+            ("col2", "number"),
         ],
         rows=[],
         user=user,
@@ -529,7 +551,9 @@ def test_run_file_import_limit(data_fixture, patch_filefield_storage):
     data += [["test", "bad", single_select_option_1.id]] * (max_error + 5)
 
     with patch_filefield_storage():
-        job = data_fixture.create_file_import_job(table=table, data=data, user=user)
+        job = data_fixture.create_file_import_job(
+            table=table, data={"data": data}, user=user
+        )
 
         with pytest.raises(ReportMaxErrorCountExceeded):
             run_async_job(job.id)
@@ -550,7 +574,9 @@ def test_run_file_import_limit(data_fixture, patch_filefield_storage):
     data += [["test", 1, 0]] * (max_error + 5)
 
     with patch_filefield_storage():
-        job = data_fixture.create_file_import_job(table=table, data=data, user=user)
+        job = data_fixture.create_file_import_job(
+            table=table, data={"data": data}, user=user
+        )
 
         with pytest.raises(ReportMaxErrorCountExceeded):
             run_async_job(job.id)
@@ -646,3 +672,315 @@ def test_cleanup_file_import_job(data_fixture, settings, patch_filefield_storage
     job3.refresh_from_db()
     assert job3.state == JOB_FINISHED
     assert job3.updated_on == time_before_soft_limit
+
+
+@pytest.mark.django_db(transaction=True)
+def test_run_file_import_task_with_upsert_fields_not_in_table(
+    data_fixture, patch_filefield_storage
+):
+    user = data_fixture.create_user()
+    database = data_fixture.create_database_application(user=user)
+    table = data_fixture.create_database_table(user=user, database=database)
+    data_fixture.create_text_field(table=table, order=1, name="text 1")
+    init_data = [["foo"], ["bar"]]
+
+    with pytest.raises(FieldNotInTable):
+        with patch_filefield_storage():
+            job = data_fixture.create_file_import_job(
+                data={
+                    "data": init_data,
+                    "configuration": {"upsert_fields": [100, 120]},
+                },
+                table=table,
+                user=user,
+            )
+            run_async_job(job.id)
+
+    model = table.get_model()
+    assert len(model.objects.all()) == 0
+
+
+@pytest.mark.django_db(transaction=True)
+def test_run_file_import_task_with_upsert_fields_not_usable(
+    data_fixture, patch_filefield_storage
+):
+    user = data_fixture.create_user()
+    database = data_fixture.create_database_application(user=user)
+    table = data_fixture.create_database_table(user=user, database=database)
+    f1 = data_fixture.create_text_field(table=table, order=1, name="text 1")
+    f2 = data_fixture.create_formula_field(table=table, order=2, name="formula field")
+
+    model = table.get_model()
+    # dummy data just to ensure later on the table wasn't modified.
+    init_data = [
+        [
+            "aa-",
+        ],
+        [
+            "aa-",
+        ],
+    ]
+
+    with patch_filefield_storage():
+        job = data_fixture.create_file_import_job(
+            data={"data": init_data},
+            table=table,
+            user=user,
+        )
+        run_async_job(job.id)
+
+    job.refresh_from_db()
+
+    assert job.state == JOB_FINISHED
+    assert job.progress_percentage == 100
+
+    with pytest.raises(IncompatibleField):
+        with patch_filefield_storage():
+            job = data_fixture.create_file_import_job(
+                data={
+                    "data": [["bbb"], ["ccc"], ["aaa"]],
+                    "configuration": {
+                        # we're trying to use formula field, which is not supported
+                        "upsert_fields": [f2.id],
+                        "upsert_values": [["aaa"], ["aaa"], ["aaa"]],
+                    },
+                },
+                table=table,
+                user=user,
+                first_row_header=False,
+            )
+            run_async_job(job.id)
+
+    rows = model.objects.all()
+    assert len(rows) == 2
+    assert all([getattr(r, f1.db_column) == "aa-" for r in rows])
+
+
+@pytest.mark.django_db(transaction=True)
+def test_run_file_import_task_with_upsert_fields_invalid_length(
+    data_fixture, patch_filefield_storage
+):
+    user = data_fixture.create_user()
+    database = data_fixture.create_database_application(user=user)
+    table = data_fixture.create_database_table(user=user, database=database)
+    f1 = data_fixture.create_text_field(table=table, order=1, name="text 1")
+
+    model = table.get_model()
+
+    with pytest.raises(InvalidRowLength):
+        with patch_filefield_storage():
+            job = data_fixture.create_file_import_job(
+                data={
+                    "data": [["bbb"], ["ccc"], ["aaa"]],
+                    "configuration": {
+                        # fields and values have different lengths
+                        "upsert_fields": [f1.id],
+                        "upsert_values": [
+                            ["aaa", "bbb"],
+                        ],
+                    },
+                },
+                table=table,
+                user=user,
+                first_row_header=False,
+            )
+            run_async_job(job.id)
+    job.refresh_from_db()
+    assert job.failed
+
+    rows = model.objects.all()
+    assert len(rows) == 0
+
+
+@pytest.mark.django_db(transaction=True)
+def test_run_file_import_task_with_upsert(data_fixture, patch_filefield_storage):
+    user = data_fixture.create_user()
+    database = data_fixture.create_database_application(user=user)
+    table = data_fixture.create_database_table(user=user, database=database)
+
+    f1 = data_fixture.create_text_field(table=table, order=1, name="text 1")
+    f2 = data_fixture.create_number_field(
+        table=table, order=2, name="number 1", number_negative=True
+    )
+    f3 = data_fixture.create_date_field(user=user, table=table, order=3, name="date 1")
+    f4 = data_fixture.create_date_field(
+        user=user, table=table, order=4, name="datetime 1", date_include_time=True
+    )
+    f5 = data_fixture.create_number_field(
+        table=table,
+        order=5,
+        name="value field",
+        number_negative=True,
+        number_decimal_places=10,
+    )
+    f6 = data_fixture.create_text_field(table=table, order=6, name="text 2")
+
+    model = table.get_model()
+
+    init_data = [
+        [
+            "aaa",
+            1,
+            "2024-01-01",
+            "2024-01-01T01:02:03.004+01:00",
+            0.1,
+            "aaa-1-1",
+        ],
+        [
+            "aab",
+            1,
+            "2024-01-01",
+            "2024-01-01T01:02:03",
+            0.2,
+            "aab-1-1",
+        ],
+        [
+            "aac",
+            1,
+            "2024-01-01",
+            "2024-01-01T01:02:03",
+            0.2,
+            "aac-1-1",
+        ],
+        [
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        ],
+        [
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        ],
+        [
+            "aac",
+            1,
+            None,
+            "2024-01-01T01:02:03",
+            0.2,
+            "aac-1-2",
+        ],
+        [
+            "aab",
+            1,
+            "2024-01-01",
+            None,
+            0.2,
+            "aac-1-2",
+        ],
+        [
+            "aaa",
+            1,
+            "2024-01-01",
+            "2024-01-01T01:02:03.004+01:00",
+            0.1,
+            "aaa-1-1",
+        ],
+        [
+            "aaa",
+            1,
+            "2024-01-02",
+            "2024-01-01 01:02:03.004 +01:00",
+            0.1,
+            "aaa-1-1",
+        ],
+    ]
+
+    with patch_filefield_storage():
+        job = data_fixture.create_file_import_job(
+            data={"data": init_data},
+            table=table,
+            user=user,
+        )
+        run_async_job(job.id)
+
+    job.refresh_from_db()
+
+    assert job.state == JOB_FINISHED
+    assert job.progress_percentage == 100
+
+    rows = model.objects.all()
+
+    assert len(rows) == len(init_data)
+
+    update_with_duplicates = [
+        # first three are duplicates
+        [
+            "aab",
+            1,
+            "2024-01-01",
+            "2024-01-01T01:02:03",
+            0.3,
+            "aab-1-1-modified",
+        ],
+        [
+            "aaa",
+            1,
+            "2024-01-01",
+            "2024-01-01T01:02:03.004+01:00",
+            0.2,
+            "aaa-1-1-modified",
+        ],
+        [
+            "aab",
+            1,
+            "2024-01-01",
+            None,
+            0.33333,
+            "aac-1-2-modified",
+        ],
+        # insert
+        [
+            "aab",
+            1,
+            None,
+            None,
+            125,
+            "aab-1-3-new",
+        ],
+        [
+            "aab",
+            1,
+            "2024-01-01",
+            None,
+            0.33333,
+            "aab-1-4-new",
+        ],
+    ]
+    # Without first row header
+    with patch_filefield_storage():
+        job = data_fixture.create_file_import_job(
+            data={
+                "data": update_with_duplicates,
+                "configuration": {
+                    "upsert_fields": [f1.id, f2.id, f3.id, f4.id],
+                    "upsert_values": [i[:4] for i in update_with_duplicates],
+                },
+            },
+            table=table,
+            user=user,
+            first_row_header=False,
+        )
+        run_async_job(job.id)
+
+    job.refresh_from_db()
+    assert job.finished
+    assert not job.failed
+
+    rows = list(model.objects.all())
+
+    assert len(rows) == len(init_data) + 2
+
+    last = rows[-1]
+    assert getattr(last, f1.db_column) == "aab"
+    assert getattr(last, f6.db_column) == "aab-1-4-new"
+
+    last = rows[-2]
+    assert getattr(last, f1.db_column) == "aab"
+    assert getattr(last, f6.db_column) == "aab-1-3-new"
diff --git a/backend/tests/baserow/contrib/database/formula/test_baserow_formula_results.py b/backend/tests/baserow/contrib/database/formula/test_baserow_formula_results.py
index 168590eed..dd0da762a 100644
--- a/backend/tests/baserow/contrib/database/formula/test_baserow_formula_results.py
+++ b/backend/tests/baserow/contrib/database/formula/test_baserow_formula_results.py
@@ -1751,18 +1751,22 @@ def test_can_filter_in_aggregated_formulas(data_fixture):
         name="autonr",
     )
 
-    rows_b = RowHandler().create_rows(
-        user,
-        table_b,
-        [
-            {boolean_field.db_column: True},
-            {},
-            {boolean_field.db_column: True},
-            {},
-            {},
-            {boolean_field.db_column: True},
-            {},
-        ],
+    rows_b = (
+        RowHandler()
+        .create_rows(
+            user,
+            table_b,
+            [
+                {boolean_field.db_column: True},
+                {},
+                {boolean_field.db_column: True},
+                {},
+                {},
+                {boolean_field.db_column: True},
+                {},
+            ],
+        )
+        .created_rows
     )
 
     formula_field = data_fixture.create_formula_field(
@@ -1771,14 +1775,18 @@ def test_can_filter_in_aggregated_formulas(data_fixture):
         formula=f"max(filter(lookup('link', 'autonr'), lookup('link', 'check')))",
     )
 
-    row_a1, row_a2, row_a3 = RowHandler().create_rows(
-        user,
-        table_a,
-        [
-            {link_field.db_column: [rows_b[0].id, rows_b[1].id]},
-            {link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]},
-            {link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]},
-        ],
+    row_a1, row_a2, row_a3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table_a,
+            [
+                {link_field.db_column: [rows_b[0].id, rows_b[1].id]},
+                {link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]},
+                {link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]},
+            ],
+        )
+        .created_rows
     )
 
     # autonr of row_b[0], because it's the only one with check=True
@@ -1800,27 +1808,31 @@ def test_can_filter_in_aggregated_formulas_with_multipleselects(data_fixture):
     option_c = data_fixture.create_select_option(field=multiple_select_field, value="c")
     option_d = data_fixture.create_select_option(field=multiple_select_field, value="d")
 
-    rows_b = RowHandler().create_rows(
-        user,
-        table_b,
-        [
-            {
-                boolean_field.db_column: True,
-                multiple_select_field.db_column: [option_a.id, option_b.id],
-            },
-            {multiple_select_field.db_column: [option_c.id]},
-            {
-                boolean_field.db_column: True,
-                multiple_select_field.db_column: [option_d.id],
-            },
-            {multiple_select_field.db_column: [option_a.id, option_b.id]},
-            {multiple_select_field.db_column: [option_c.id, option_d.id]},
-            {
-                boolean_field.db_column: True,
-                multiple_select_field.db_column: [option_b.id],
-            },
-            {},
-        ],
+    rows_b = (
+        RowHandler()
+        .create_rows(
+            user,
+            table_b,
+            [
+                {
+                    boolean_field.db_column: True,
+                    multiple_select_field.db_column: [option_a.id, option_b.id],
+                },
+                {multiple_select_field.db_column: [option_c.id]},
+                {
+                    boolean_field.db_column: True,
+                    multiple_select_field.db_column: [option_d.id],
+                },
+                {multiple_select_field.db_column: [option_a.id, option_b.id]},
+                {multiple_select_field.db_column: [option_c.id, option_d.id]},
+                {
+                    boolean_field.db_column: True,
+                    multiple_select_field.db_column: [option_b.id],
+                },
+                {},
+            ],
+        )
+        .created_rows
     )
 
     formula_field = data_fixture.create_formula_field(
@@ -1829,14 +1841,18 @@ def test_can_filter_in_aggregated_formulas_with_multipleselects(data_fixture):
         formula=f"count(filter(lookup('link', 'mm'), lookup('link', 'check')))",
     )
 
-    row_a1, row_a2, row_a3 = RowHandler().create_rows(
-        user,
-        table_a,
-        [
-            {link_field.db_column: [rows_b[0].id, rows_b[1].id]},
-            {link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]},
-            {link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]},
-        ],
+    row_a1, row_a2, row_a3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table_a,
+            [
+                {link_field.db_column: [rows_b[0].id, rows_b[1].id]},
+                {link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]},
+                {link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]},
+            ],
+        )
+        .created_rows
     )
 
     # autonr of row_b[0], because it's the only one with check=True
@@ -1869,19 +1885,23 @@ def test_formulas_with_lookup_url_field_type(data_fixture):
         table=linked_table,
     )
 
-    linked_row_1, linked_row_2 = RowHandler().create_rows(
-        user,
-        linked_table,
-        [
-            {
-                linked_table_primary_field.db_column: "URL #1",
-                linked_table_url_field.db_column: "https://baserow.io/1",
-            },
-            {
-                linked_table_primary_field.db_column: "URL #2",
-                linked_table_url_field.db_column: "https://baserow.io/2",
-            },
-        ],
+    linked_row_1, linked_row_2 = (
+        RowHandler()
+        .create_rows(
+            user,
+            linked_table,
+            [
+                {
+                    linked_table_primary_field.db_column: "URL #1",
+                    linked_table_url_field.db_column: "https://baserow.io/1",
+                },
+                {
+                    linked_table_primary_field.db_column: "URL #2",
+                    linked_table_url_field.db_column: "https://baserow.io/2",
+                },
+            ],
+        )
+        .created_rows
     )
 
     link_field = FieldHandler().create_field(
@@ -1981,8 +2001,10 @@ def test_lookup_arrays(data_fixture):
         rows=[["b1"], ["b2"]],
         fields=[table_b_primary_field],
     )
-    (row_a1,) = RowHandler().create_rows(
-        user, table_a, [{link_field.db_column: [row_b1.id, row_b2.id]}]
+    (row_a1,) = (
+        RowHandler()
+        .create_rows(user, table_a, [{link_field.db_column: [row_b1.id, row_b2.id]}])
+        .created_rows
     )
     lookup_field = FieldHandler().create_field(
         user,
@@ -2038,17 +2060,21 @@ def test_formulas_with_lookup_to_uuid_primary_field(data_fixture):
         table=linked_table,
     )
 
-    linked_row_1, linked_row_2 = RowHandler().create_rows(
-        user,
-        linked_table,
-        [
-            {
-                linked_table_text_field.db_column: "Linked row #1",
-            },
-            {
-                linked_table_text_field.db_column: "Linked row #2",
-            },
-        ],
+    linked_row_1, linked_row_2 = (
+        RowHandler()
+        .create_rows(
+            user,
+            linked_table,
+            [
+                {
+                    linked_table_text_field.db_column: "Linked row #1",
+                },
+                {
+                    linked_table_text_field.db_column: "Linked row #2",
+                },
+            ],
+        )
+        .created_rows
     )
 
     link_field = FieldHandler().create_field(
diff --git a/backend/tests/baserow/contrib/database/rows/test_rows_actions.py b/backend/tests/baserow/contrib/database/rows/test_rows_actions.py
index e52326d28..79cb28731 100644
--- a/backend/tests/baserow/contrib/database/rows/test_rows_actions.py
+++ b/backend/tests/baserow/contrib/database/rows/test_rows_actions.py
@@ -258,23 +258,25 @@ def test_can_undo_importing_rows(data_fixture):
     action_type_registry.get_by_type(ImportRowsActionType).do(
         user,
         table,
-        data=[
-            [
-                "Tesla",
-                240,
-                59999.99,
-            ],
-            [
-                "Giulietta",
-                210,
-                34999.99,
-            ],
-            [
-                "Panda",
-                160,
-                8999.99,
-            ],
-        ],
+        data={
+            "data": [
+                [
+                    "Tesla",
+                    240,
+                    59999.99,
+                ],
+                [
+                    "Giulietta",
+                    210,
+                    34999.99,
+                ],
+                [
+                    "Panda",
+                    160,
+                    8999.99,
+                ],
+            ]
+        },
     )
 
     assert model.objects.all().count() == 3
@@ -314,23 +316,25 @@ def test_can_undo_redo_importing_rows(row_send_mock, table_send_mock, data_fixtu
     action_type_registry.get_by_type(ImportRowsActionType).do(
         user,
         table,
-        data=[
-            [
-                "Tesla",
-                240,
-                59999.99,
-            ],
-            [
-                "Giulietta",
-                210,
-                34999.99,
-            ],
-            [
-                "Panda",
-                160,
-                8999.99,
-            ],
-        ],
+        data={
+            "data": [
+                [
+                    "Tesla",
+                    240,
+                    59999.99,
+                ],
+                [
+                    "Giulietta",
+                    210,
+                    34999.99,
+                ],
+                [
+                    "Panda",
+                    160,
+                    8999.99,
+                ],
+            ]
+        },
     )
 
     table_send_mock.assert_called_once()
@@ -363,14 +367,16 @@ def test_can_undo_redo_importing_rows(row_send_mock, table_send_mock, data_fixtu
     action_type_registry.get_by_type(ImportRowsActionType).do(
         user,
         table,
-        data=[
-            [
-                "Tesla",
-                240,
-                59999.99,
-            ],
-        ]
-        * 51,
+        data={
+            "data": [
+                [
+                    "Tesla",
+                    240,
+                    59999.99,
+                ],
+            ]
+            * 51
+        },
     )
 
     row_send_mock.reset_mock()
@@ -506,26 +512,30 @@ def test_can_undo_deleting_rows(data_fixture):
     )
     model = table.get_model()
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{name_field.id}": "Tesla",
-                f"field_{speed_field.id}": 240,
-                f"field_{price_field.id}": 59999.99,
-            },
-            {
-                f"field_{name_field.id}": "Giulietta",
-                f"field_{speed_field.id}": 210,
-                f"field_{price_field.id}": 34999.99,
-            },
-            {
-                f"field_{name_field.id}": "Panda",
-                f"field_{speed_field.id}": 160,
-                f"field_{price_field.id}": 8999.99,
-            },
-        ],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{name_field.id}": "Tesla",
+                    f"field_{speed_field.id}": 240,
+                    f"field_{price_field.id}": 59999.99,
+                },
+                {
+                    f"field_{name_field.id}": "Giulietta",
+                    f"field_{speed_field.id}": 210,
+                    f"field_{price_field.id}": 34999.99,
+                },
+                {
+                    f"field_{name_field.id}": "Panda",
+                    f"field_{speed_field.id}": 160,
+                    f"field_{price_field.id}": 8999.99,
+                },
+            ],
+        )
+        .created_rows
     )
 
     assert model.objects.all().count() == 3
@@ -565,26 +575,30 @@ def test_can_undo_redo_deleting_rows(data_fixture):
     )
     model = table.get_model()
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{name_field.id}": "Tesla",
-                f"field_{speed_field.id}": 240,
-                f"field_{price_field.id}": 59999.99,
-            },
-            {
-                f"field_{name_field.id}": "Giulietta",
-                f"field_{speed_field.id}": 210,
-                f"field_{price_field.id}": 34999.99,
-            },
-            {
-                f"field_{name_field.id}": "Panda",
-                f"field_{speed_field.id}": 160,
-                f"field_{price_field.id}": 8999.99,
-            },
-        ],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{name_field.id}": "Tesla",
+                    f"field_{speed_field.id}": 240,
+                    f"field_{price_field.id}": 59999.99,
+                },
+                {
+                    f"field_{name_field.id}": "Giulietta",
+                    f"field_{speed_field.id}": 210,
+                    f"field_{price_field.id}": 34999.99,
+                },
+                {
+                    f"field_{name_field.id}": "Panda",
+                    f"field_{speed_field.id}": 160,
+                    f"field_{price_field.id}": 8999.99,
+                },
+            ],
+        )
+        .created_rows
     )
 
     assert model.objects.all().count() == 3
diff --git a/backend/tests/baserow/contrib/database/rows/test_rows_handler.py b/backend/tests/baserow/contrib/database/rows/test_rows_handler.py
index 5b82491c2..bfd78acaa 100644
--- a/backend/tests/baserow/contrib/database/rows/test_rows_handler.py
+++ b/backend/tests/baserow/contrib/database/rows/test_rows_handler.py
@@ -339,7 +339,7 @@ def test_get_adjacent_row(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     next_row = handler.get_adjacent_row(table_model, rows[1].id)
     previous_row = handler.get_adjacent_row(table_model, rows[1].id, previous=True)
@@ -373,7 +373,7 @@ def test_get_adjacent_row_with_custom_filters(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     base_queryset = (
         table.get_model()
@@ -421,7 +421,7 @@ def test_get_adjacent_row_with_view_sort(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     next_row = handler.get_adjacent_row(table_model, row_2.id, view=view)
     previous_row = handler.get_adjacent_row(
@@ -460,7 +460,7 @@ def test_get_adjacent_row_with_view_group_by(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     next_row = handler.get_adjacent_row(table_model, row_2.id, view=view)
     previous_row = handler.get_adjacent_row(
@@ -497,7 +497,7 @@ def test_get_adjacent_row_with_search(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     search = "a"
     next_row = handler.get_adjacent_row(table_model, row_2.id, view=view, search=search)
@@ -551,7 +551,7 @@ def test_get_adjacent_row_with_view_group_by_and_view_sort(data_fixture):
             },
         ],
         model=table_model,
-    )
+    ).created_rows
 
     next_row = handler.get_adjacent_row(table_model, row_2.id, view=view)
     previous_row = handler.get_adjacent_row(
@@ -582,7 +582,7 @@ def test_get_adjacent_row_performance_many_rows(data_fixture):
     table_model = table.get_model()
     rows = handler.create_rows(
         user=user, table=table, rows_values=row_values, model=table_model
-    )
+    ).created_rows
 
     profiler = Profiler()
     profiler.start()
@@ -621,7 +621,7 @@ def test_get_adjacent_row_performance_many_fields(data_fixture):
     table_model = table.get_model()
     rows = handler.create_rows(
         user=user, table=table, rows_values=row_values, model=table_model
-    )
+    ).created_rows
 
     profiler = Profiler()
     profiler.start()
@@ -747,7 +747,7 @@ def test_update_rows_return_original_values_and_fields_metadata(data_fixture):
         user=user,
         table=table,
         rows_values=[{}, {}],
-    )
+    ).created_rows
 
     result = handler.update_rows(
         user=user,
@@ -842,7 +842,9 @@ def test_create_rows_created_on_and_last_modified(data_fixture):
     handler = RowHandler()
 
     with freeze_time("2020-01-01 12:00"):
-        rows = handler.create_rows(user=user, table=table, rows_values=[{}])
+        rows = handler.create_rows(
+            user=user, table=table, rows_values=[{}]
+        ).created_rows
         row = rows[0]
         assert row.created_on == datetime(2020, 1, 1, 12, 0, tzinfo=timezone.utc)
         assert row.updated_on == datetime(2020, 1, 1, 12, 0, tzinfo=timezone.utc)
@@ -862,7 +864,7 @@ def test_create_rows_last_modified_by(data_fixture):
             {f"field_{name_field.id}": "Test"},
             {f"field_{name_field.id}": "Test 2"},
         ],
-    )
+    ).created_rows
 
     assert rows[0].last_modified_by == user
     assert rows[1].last_modified_by == user
@@ -1562,15 +1564,19 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_created(
     # An UPDATE query to set the formula field value + 1 query due
     # to FormulaFieldType.after_rows_created
     with django_assert_num_queries(len(captured.captured_queries) + 2):
-        (r,) = RowHandler().force_create_rows(
-            user=user,
-            table=table,
-            rows_values=[
-                {
-                    f"field_{name_field.id}": "Giulietta",
-                }
-            ],
-            model=model,
+        (r,) = (
+            RowHandler()
+            .force_create_rows(
+                user=user,
+                table=table,
+                rows_values=[
+                    {
+                        f"field_{name_field.id}": "Giulietta",
+                    }
+                ],
+                model=model,
+            )
+            .created_rows
         )
     assert getattr(r, f"field_{f1.id}") == "Giulietta-a"
 
@@ -1584,15 +1590,19 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_created(
     model = table.get_model()
 
     with django_assert_num_queries(len(captured.captured_queries) + 2):
-        (r,) = RowHandler().force_create_rows(
-            user=user,
-            table=table,
-            rows_values=[
-                {
-                    f"field_{name_field.id}": "Stelvio",
-                }
-            ],
-            model=model,
+        (r,) = (
+            RowHandler()
+            .force_create_rows(
+                user=user,
+                table=table,
+                rows_values=[
+                    {
+                        f"field_{name_field.id}": "Stelvio",
+                    }
+                ],
+                model=model,
+            )
+            .created_rows
         )
     assert getattr(r, f"field_{f1.id}") == "Stelvio-a"
     assert getattr(r, f"field_{f2.id}") == "Stelvio-b"
@@ -1609,15 +1619,19 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_created(
     # Now a second UPDATE query is needed, so that F3 can use the result
     # of F1 to correctly calculate its value
     with django_assert_num_queries(len(captured.captured_queries) + 3):
-        (r,) = RowHandler().force_create_rows(
-            user=user,
-            table=table,
-            rows_values=[
-                {
-                    f"field_{name_field.id}": "Tonale",
-                }
-            ],
-            model=model,
+        (r,) = (
+            RowHandler()
+            .force_create_rows(
+                user=user,
+                table=table,
+                rows_values=[
+                    {
+                        f"field_{name_field.id}": "Tonale",
+                    }
+                ],
+                model=model,
+            )
+            .created_rows
         )
     assert getattr(r, f"field_{f1.id}") == "Tonale-a"
     assert getattr(r, f"field_{f2.id}") == "Tonale-b"
@@ -1642,7 +1656,11 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_updated(
     # in the FieldDependencyHandler:
     # link_row_field_content_type = ContentType.objects.get_for_model(LinkRowField)
     # so let's create a row first to avoid counting that query
-    (r,) = RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
+    (r,) = (
+        RowHandler()
+        .force_create_rows(user=user, table=table, rows_values=[{}])
+        .created_rows
+    )
 
     with CaptureQueriesContext(connection) as captured:
         RowHandler().force_update_rows(
@@ -1740,18 +1758,26 @@ def test_can_move_rows_and_formulas_are_updated_correctly(data_fixture):
     table_a, table_b, link_a_b = data_fixture.create_two_linked_tables(user=user)
     prim_b = data_fixture.create_text_field(table=table_b, primary=True, name="name")
 
-    row_b1, row_b2 = RowHandler().create_rows(
-        user, table_b, [{prim_b.db_column: "b1"}, {prim_b.db_column: "b2"}]
+    row_b1, row_b2 = (
+        RowHandler()
+        .create_rows(
+            user, table_b, [{prim_b.db_column: "b1"}, {prim_b.db_column: "b2"}]
+        )
+        .created_rows
     )
 
     lookup_a = data_fixture.create_formula_field(
         table=table_a, formula="join(lookup('link', 'name'), '')"
     )
 
-    row_a1, row_a2 = RowHandler().create_rows(
-        user,
-        table_a,
-        [{link_a_b.db_column: [row_b1.id]}, {link_a_b.db_column: [row_b2.id]}],
+    row_a1, row_a2 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table_a,
+            [{link_a_b.db_column: [row_b1.id]}, {link_a_b.db_column: [row_b2.id]}],
+        )
+        .created_rows
     )
 
     assert getattr(row_a1, lookup_a.db_column) == "b1"
diff --git a/backend/tests/baserow/contrib/database/table/test_table_models.py b/backend/tests/baserow/contrib/database/table/test_table_models.py
index 63a745fec..0112e8a03 100644
--- a/backend/tests/baserow/contrib/database/table/test_table_models.py
+++ b/backend/tests/baserow/contrib/database/table/test_table_models.py
@@ -482,39 +482,43 @@ def test_order_by_fields_string_queryset(data_fixture):
         field=multiple_select_field, value="D", color="red"
     )
 
-    row_1, row_2, row_3, row_4 = RowHandler().force_create_rows(
-        user=None,
-        table=table,
-        rows_values=[
-            {
-                name_field.db_column: "BMW",
-                color_field.db_column: "Blue",
-                price_field.db_column: 10000,
-                description_field.db_column: "Sports car.",
-                single_select_field.db_column: option_a.id,
-                multiple_select_field.db_column: [option_c.id],
-            },
-            {
-                name_field.db_column: "Audi",
-                color_field.db_column: "Orange",
-                price_field.db_column: 20000,
-                description_field.db_column: "This is the most expensive car we have.",
-                single_select_field.db_column: option_b.id,
-                multiple_select_field.db_column: [option_d.id],
-            },
-            {
-                name_field.db_column: "Volkswagen",
-                color_field.db_column: "White",
-                price_field.db_column: 5000,
-                description_field.db_column: "A very old car.",
-            },
-            {
-                name_field.db_column: "Volkswagen",
-                color_field.db_column: "Green",
-                price_field.db_column: 4000,
-                description_field.db_column: "Strange color.",
-            },
-        ],
+    row_1, row_2, row_3, row_4 = (
+        RowHandler()
+        .force_create_rows(
+            user=None,
+            table=table,
+            rows_values=[
+                {
+                    name_field.db_column: "BMW",
+                    color_field.db_column: "Blue",
+                    price_field.db_column: 10000,
+                    description_field.db_column: "Sports car.",
+                    single_select_field.db_column: option_a.id,
+                    multiple_select_field.db_column: [option_c.id],
+                },
+                {
+                    name_field.db_column: "Audi",
+                    color_field.db_column: "Orange",
+                    price_field.db_column: 20000,
+                    description_field.db_column: "This is the most expensive car we have.",
+                    single_select_field.db_column: option_b.id,
+                    multiple_select_field.db_column: [option_d.id],
+                },
+                {
+                    name_field.db_column: "Volkswagen",
+                    color_field.db_column: "White",
+                    price_field.db_column: 5000,
+                    description_field.db_column: "A very old car.",
+                },
+                {
+                    name_field.db_column: "Volkswagen",
+                    color_field.db_column: "Green",
+                    price_field.db_column: 4000,
+                    description_field.db_column: "Strange color.",
+                },
+            ],
+        )
+        .created_rows
     )
 
     model = table.get_model()
@@ -704,19 +708,23 @@ def test_order_by_fields_string_queryset_with_type(data_fixture):
         field=single_select_field, value="B", color="red", order=1
     )
 
-    row_1, row_2 = RowHandler().force_create_rows(
-        user=None,
-        table=table,
-        rows_values=[
-            {
-                name_field.db_column: "BMW",
-                single_select_field.db_column: option_a.id,
-            },
-            {
-                name_field.db_column: "Audi",
-                single_select_field.db_column: option_b.id,
-            },
-        ],
+    row_1, row_2 = (
+        RowHandler()
+        .force_create_rows(
+            user=None,
+            table=table,
+            rows_values=[
+                {
+                    name_field.db_column: "BMW",
+                    single_select_field.db_column: option_a.id,
+                },
+                {
+                    name_field.db_column: "Audi",
+                    single_select_field.db_column: option_b.id,
+                },
+            ],
+        )
+        .created_rows
     )
 
     model = table.get_model()
diff --git a/backend/tests/baserow/contrib/database/test_cachalot.py b/backend/tests/baserow/contrib/database/test_cachalot.py
index f7ce090e7..f75a783cf 100644
--- a/backend/tests/baserow/contrib/database/test_cachalot.py
+++ b/backend/tests/baserow/contrib/database/test_cachalot.py
@@ -105,7 +105,7 @@ if settings.CACHALOT_ENABLED:
                 {f"field_{field.id}": [select_options[0].id, select_options[1].value]},
                 {f"field_{field.id}": [select_options[2].value, select_options[0].id]},
             ],
-        )
+        ).created_rows
 
         url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id})
         response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
diff --git a/backend/tests/baserow/contrib/database/test_database_application_type.py b/backend/tests/baserow/contrib/database/test_database_application_type.py
index 363afda2a..fc03976f3 100644
--- a/backend/tests/baserow/contrib/database/test_database_application_type.py
+++ b/backend/tests/baserow/contrib/database/test_database_application_type.py
@@ -43,17 +43,27 @@ def test_import_export_database(data_fixture):
     data_fixture.create_view_sort(view=view, field=text_field)
 
     with freeze_time("2021-01-01 12:30"):
-        row, _ = RowHandler().force_create_rows(
-            user,
-            table,
-            [{f"field_{text_field.id}": "Test"}, {f"field_{text_field.id}": "Test 2"}],
+        row = (
+            RowHandler()
+            .force_create_rows(
+                user,
+                table,
+                [
+                    {f"field_{text_field.id}": "Test"},
+                    {f"field_{text_field.id}": "Test 2"},
+                ],
+            )
+            .created_rows[0]
         )
 
     with freeze_time("2021-01-02 13:30"):
-        res = RowHandler().force_update_rows(
-            user, table, [{"id": row.id, f"field_{text_field.id}": "Test"}]
+        row = (
+            RowHandler()
+            .force_update_rows(
+                user, table, [{"id": row.id, f"field_{text_field.id}": "Test"}]
+            )
+            .updated_rows[0]
         )
-        row = res.updated_rows[0]
 
     database_type = application_type_registry.get("database")
     config = ImportExportConfig(include_permission_data=True)
diff --git a/backend/tests/baserow/contrib/database/view/test_view_array_filters.py b/backend/tests/baserow/contrib/database/view/test_view_array_filters.py
index d1e3d0049..9e3e58bed 100644
--- a/backend/tests/baserow/contrib/database/view/test_view_array_filters.py
+++ b/backend/tests/baserow/contrib/database/view/test_view_array_filters.py
@@ -92,7 +92,7 @@ def boolean_lookup_filter_proc(
 
     linked_rows = test_setup.row_handler.create_rows(
         user=test_setup.user, table=test_setup.other_table, rows_values=dict_rows
-    )
+    ).created_rows
     rows = [
         # mixed
         {
@@ -126,7 +126,7 @@ def boolean_lookup_filter_proc(
     ]
     r_mixed, r_false, r_true, r_none = test_setup.row_handler.create_rows(
         user=test_setup.user, table=test_setup.table, rows_values=rows
-    )
+    ).created_rows
     rows = [r_mixed, r_false, r_true, r_none]
     selected = [rows[idx] for idx in expected_rows]
 
@@ -2423,7 +2423,7 @@ def setup_multiple_select_rows(data_fixture):
             {f"field_{test_setup.target_field.id}": row_B_value},
             {f"field_{test_setup.target_field.id}": row_empty_value},
         ],
-    )
+    ).created_rows
     row_1 = test_setup.row_handler.create_row(
         user=test_setup.user,
         table=test_setup.table,
@@ -2629,7 +2629,7 @@ def setup_date_rows(data_fixture, field_factory):
             {},
         ],
         model=test_setup.other_table_model,
-    )
+    ).created_rows
     row_1, row_2, empty_row = test_setup.row_handler.force_create_rows(
         user,
         test_setup.table,
@@ -2639,7 +2639,7 @@ def setup_date_rows(data_fixture, field_factory):
             {test_setup.link_row_field.db_column: [other_row_3.id]},
         ],
         model=test_setup.model,
-    )
+    ).created_rows
     return test_setup, [row_1, row_2, empty_row]
 
 
@@ -2745,16 +2745,20 @@ def table_view_fields_rows(data_fixture):
     datetime_field = data_fixture.create_date_field(
         table=orig_table, date_include_time=True
     )
-    orig_rows = RowHandler().force_create_rows(
-        user,
-        orig_table,
-        [
-            {
-                date_field.db_column: date_value,
-                datetime_field.db_column: date_value,
-            }
-            for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES
-        ],
+    orig_rows = (
+        RowHandler()
+        .force_create_rows(
+            user,
+            orig_table,
+            [
+                {
+                    date_field.db_column: date_value,
+                    datetime_field.db_column: date_value,
+                }
+                for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES
+            ],
+        )
+        .created_rows
     )
 
     table = data_fixture.create_database_table(database=orig_table.database)
@@ -2777,10 +2781,14 @@ def table_view_fields_rows(data_fixture):
         through_field_name=link_field.name,
         target_field_name=datetime_field.name,
     )
-    rows = RowHandler().force_create_rows(
-        user,
-        table,
-        [{link_field.db_column: [r.id]} for r in orig_rows],
+    rows = (
+        RowHandler()
+        .force_create_rows(
+            user,
+            table,
+            [{link_field.db_column: [r.id]} for r in orig_rows],
+        )
+        .created_rows
     )
 
     grid_view = data_fixture.create_grid_view(table=table)
diff --git a/backend/tests/baserow/contrib/database/view/test_view_filters.py b/backend/tests/baserow/contrib/database/view/test_view_filters.py
index be0e1bb53..27dead907 100644
--- a/backend/tests/baserow/contrib/database/view/test_view_filters.py
+++ b/backend/tests/baserow/contrib/database/view/test_view_filters.py
@@ -89,33 +89,37 @@ def test_equal_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    row, row_2, row_3 = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{text_field.id}": "Test",
-                f"field_{long_text_field.id}": "Long",
-                f"field_{integer_field.id}": 10,
-                f"field_{decimal_field.id}": 20.20,
-                f"field_{boolean_field.id}": True,
-            },
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{integer_field.id}": None,
-                f"field_{decimal_field.id}": None,
-                f"field_{boolean_field.id}": False,
-            },
-            {
-                f"field_{text_field.id}": "NOT",
-                f"field_{long_text_field.id}": "NOT2",
-                f"field_{integer_field.id}": 99,
-                f"field_{decimal_field.id}": 99.99,
-                f"field_{boolean_field.id}": False,
-            },
-        ],
-        model=model,
+    row, row_2, row_3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{text_field.id}": "Test",
+                    f"field_{long_text_field.id}": "Long",
+                    f"field_{integer_field.id}": 10,
+                    f"field_{decimal_field.id}": 20.20,
+                    f"field_{boolean_field.id}": True,
+                },
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{integer_field.id}": None,
+                    f"field_{decimal_field.id}": None,
+                    f"field_{boolean_field.id}": False,
+                },
+                {
+                    f"field_{text_field.id}": "NOT",
+                    f"field_{long_text_field.id}": "NOT2",
+                    f"field_{integer_field.id}": 99,
+                    f"field_{decimal_field.id}": 99.99,
+                    f"field_{boolean_field.id}": False,
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -225,33 +229,37 @@ def test_not_equal_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    row, row_2, row_3 = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{text_field.id}": "Test",
-                f"field_{long_text_field.id}": "Long",
-                f"field_{integer_field.id}": 10,
-                f"field_{decimal_field.id}": 20.20,
-                f"field_{boolean_field.id}": True,
-            },
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{integer_field.id}": None,
-                f"field_{decimal_field.id}": None,
-                f"field_{boolean_field.id}": False,
-            },
-            {
-                f"field_{text_field.id}": "NOT",
-                f"field_{long_text_field.id}": "NOT2",
-                f"field_{integer_field.id}": 99,
-                f"field_{decimal_field.id}": 99.99,
-                f"field_{boolean_field.id}": False,
-            },
-        ],
-        model=model,
+    row, row_2, row_3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{text_field.id}": "Test",
+                    f"field_{long_text_field.id}": "Long",
+                    f"field_{integer_field.id}": 10,
+                    f"field_{decimal_field.id}": 20.20,
+                    f"field_{boolean_field.id}": True,
+                },
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{integer_field.id}": None,
+                    f"field_{decimal_field.id}": None,
+                    f"field_{boolean_field.id}": False,
+                },
+                {
+                    f"field_{text_field.id}": "NOT",
+                    f"field_{long_text_field.id}": "NOT2",
+                    f"field_{integer_field.id}": 99,
+                    f"field_{decimal_field.id}": 99.99,
+                    f"field_{boolean_field.id}": False,
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -394,36 +402,40 @@ def test_contains_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    row, _, row_3 = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{text_field.id}": "My name is John Doe.",
-                f"field_{long_text_field.id}": "Long text that is not empty.",
-                f"field_{date_field.id}": "2020-02-01 01:23",
-                f"field_{number_field.id}": "98989898",
-                f"field_{single_select_field.id}": option_a,
-                f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
-            },
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{date_field.id}": None,
-                f"field_{number_field.id}": None,
-                f"field_{single_select_field.id}": None,
-            },
-            {
-                f"field_{text_field.id}": "This is a test field.",
-                f"field_{long_text_field.id}": "This text is a bit longer, but it also "
-                "contains.\n A multiline approach.",
-                f"field_{date_field.id}": "0001-01-02 00:12",
-                f"field_{number_field.id}": "10000",
-                f"field_{single_select_field.id}": option_b,
-                f"field_{multiple_select_field.id}": [option_c.id],
-            },
-        ],
-        model=model,
+    row, _, row_3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{text_field.id}": "My name is John Doe.",
+                    f"field_{long_text_field.id}": "Long text that is not empty.",
+                    f"field_{date_field.id}": "2020-02-01 01:23",
+                    f"field_{number_field.id}": "98989898",
+                    f"field_{single_select_field.id}": option_a,
+                    f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
+                },
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{date_field.id}": None,
+                    f"field_{number_field.id}": None,
+                    f"field_{single_select_field.id}": None,
+                },
+                {
+                    f"field_{text_field.id}": "This is a test field.",
+                    f"field_{long_text_field.id}": "This text is a bit longer, but it also "
+                    "contains.\n A multiline approach.",
+                    f"field_{date_field.id}": "0001-01-02 00:12",
+                    f"field_{number_field.id}": "10000",
+                    f"field_{single_select_field.id}": option_b,
+                    f"field_{multiple_select_field.id}": [option_c.id],
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -603,36 +615,40 @@ def test_contains_not_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    row, row_2, row_3 = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{text_field.id}": "My name is John Doe.",
-                f"field_{long_text_field.id}": "Long text that is not empty.",
-                f"field_{date_field.id}": "2020-02-01 01:23",
-                f"field_{number_field.id}": "98989898",
-                f"field_{single_select_field.id}": option_a,
-                f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
-            },
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{date_field.id}": None,
-                f"field_{number_field.id}": None,
-                f"field_{single_select_field.id}": None,
-            },
-            {
-                f"field_{text_field.id}": "This is a test field.",
-                f"field_{long_text_field.id}": "This text is a bit longer, but it also "
-                "contains.\n A multiline approach.",
-                f"field_{date_field.id}": "0001-01-02 00:12",
-                f"field_{number_field.id}": "10000",
-                f"field_{single_select_field.id}": option_b,
-                f"field_{multiple_select_field.id}": [option_d.id],
-            },
-        ],
-        model=model,
+    row, row_2, row_3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{text_field.id}": "My name is John Doe.",
+                    f"field_{long_text_field.id}": "Long text that is not empty.",
+                    f"field_{date_field.id}": "2020-02-01 01:23",
+                    f"field_{number_field.id}": "98989898",
+                    f"field_{single_select_field.id}": option_a,
+                    f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
+                },
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{date_field.id}": None,
+                    f"field_{number_field.id}": None,
+                    f"field_{single_select_field.id}": None,
+                },
+                {
+                    f"field_{text_field.id}": "This is a test field.",
+                    f"field_{long_text_field.id}": "This text is a bit longer, but it also "
+                    "contains.\n A multiline approach.",
+                    f"field_{date_field.id}": "0001-01-02 00:12",
+                    f"field_{number_field.id}": "10000",
+                    f"field_{single_select_field.id}": option_b,
+                    f"field_{multiple_select_field.id}": [option_d.id],
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -818,36 +834,40 @@ def test_contains_word_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    row, row_2, row_3 = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{text_field.id}": "My name is John Doe.",
-                f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.",
-                f"field_{url_field.id}": "https://www.example.com",
-                f"field_{email_field.id}": "test.user@example.com",
-                f"field_{single_select_field.id}": option_a,
-                f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
-            },
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{url_field.id}": "",
-                f"field_{email_field.id}": "",
-                f"field_{single_select_field.id}": None,
-            },
-            {
-                f"field_{text_field.id}": "This is a test field with the word Johny.",
-                f"field_{long_text_field.id}": "This text is a bit longer, but it also "
-                "contains.\n A multiline approach.",
-                f"field_{url_field.id}": "https://www.examplewebsite.com",
-                f"field_{email_field.id}": "test.user@examplewebsite.com",
-                f"field_{single_select_field.id}": option_b,
-                f"field_{multiple_select_field.id}": [option_c.id],
-            },
-        ],
-        model=model,
+    row, row_2, row_3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{text_field.id}": "My name is John Doe.",
+                    f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.",
+                    f"field_{url_field.id}": "https://www.example.com",
+                    f"field_{email_field.id}": "test.user@example.com",
+                    f"field_{single_select_field.id}": option_a,
+                    f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
+                },
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{url_field.id}": "",
+                    f"field_{email_field.id}": "",
+                    f"field_{single_select_field.id}": None,
+                },
+                {
+                    f"field_{text_field.id}": "This is a test field with the word Johny.",
+                    f"field_{long_text_field.id}": "This text is a bit longer, but it also "
+                    "contains.\n A multiline approach.",
+                    f"field_{url_field.id}": "https://www.examplewebsite.com",
+                    f"field_{email_field.id}": "test.user@examplewebsite.com",
+                    f"field_{single_select_field.id}": option_b,
+                    f"field_{multiple_select_field.id}": [option_c.id],
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -1011,36 +1031,40 @@ def test_doesnt_contain_word_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    row, row_2, row_3 = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{text_field.id}": "My name is John Doe.",
-                f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.",
-                f"field_{url_field.id}": "https://www.example.com",
-                f"field_{email_field.id}": "test.user@example.com",
-                f"field_{single_select_field.id}": option_a,
-                f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
-            },
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{url_field.id}": "",
-                f"field_{email_field.id}": "",
-                f"field_{single_select_field.id}": None,
-            },
-            {
-                f"field_{text_field.id}": "This is a test field with the word Johny.",
-                f"field_{long_text_field.id}": "This text is a bit longer, but it also "
-                "contains.\n A multiline approach.",
-                f"field_{url_field.id}": "https://www.examplewebsite.com",
-                f"field_{email_field.id}": "test.user@examplewebsite.com",
-                f"field_{single_select_field.id}": option_b,
-                f"field_{multiple_select_field.id}": [option_c.id],
-            },
-        ],
-        model=model,
+    row, row_2, row_3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{text_field.id}": "My name is John Doe.",
+                    f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.",
+                    f"field_{url_field.id}": "https://www.example.com",
+                    f"field_{email_field.id}": "test.user@example.com",
+                    f"field_{single_select_field.id}": option_a,
+                    f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
+                },
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{url_field.id}": "",
+                    f"field_{email_field.id}": "",
+                    f"field_{single_select_field.id}": None,
+                },
+                {
+                    f"field_{text_field.id}": "This is a test field with the word Johny.",
+                    f"field_{long_text_field.id}": "This text is a bit longer, but it also "
+                    "contains.\n A multiline approach.",
+                    f"field_{url_field.id}": "https://www.examplewebsite.com",
+                    f"field_{email_field.id}": "test.user@examplewebsite.com",
+                    f"field_{single_select_field.id}": option_b,
+                    f"field_{multiple_select_field.id}": [option_c.id],
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -3275,56 +3299,60 @@ def test_empty_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    row, row_2, row_3 = RowHandler().create_rows(
-        user,
-        table,
-        [
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{integer_field.id}": None,
-                f"field_{decimal_field.id}": None,
-                f"field_{date_field.id}": None,
-                f"field_{date_time_field.id}": None,
-                f"field_{boolean_field.id}": False,
-                f"field_{file_field.id}": [],
-                f"field_{single_select_field.id}_id": None,
-            },
-            {
-                f"field_{text_field.id}": "Value",
-                f"field_{long_text_field.id}": "Value",
-                f"field_{integer_field.id}": 10,
-                f"field_{decimal_field.id}": 1022,
-                f"field_{date_field.id}": date(2020, 6, 17),
-                f"field_{date_time_field.id}": datetime(
-                    2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc
-                ),
-                f"field_{boolean_field.id}": True,
-                f"field_{file_field.id}": [{"name": file_a.name}],
-                f"field_{single_select_field.id}_id": option_1.id,
-                f"field_{link_row_field.id}": [tmp_row.id],
-                f"field_{multiple_select_field.id}": [option_2.id],
-            },
-            {
-                f"field_{text_field.id}": "other value",
-                f"field_{long_text_field.id}": " ",
-                f"field_{integer_field.id}": 0,
-                f"field_{decimal_field.id}": 0.00,
-                f"field_{date_field.id}": date(1970, 1, 1),
-                f"field_{date_time_field.id}": datetime(
-                    1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc
-                ),
-                f"field_{boolean_field.id}": True,
-                f"field_{file_field.id}": [
-                    {"name": file_a.name},
-                    {"name": file_b.name},
-                ],
-                f"field_{single_select_field.id}_id": option_1.id,
-                f"field_{link_row_field.id}": [tmp_row.id],
-                f"field_{multiple_select_field.id}": [option_2.id, option_3.id],
-            },
-        ],
-        model=model,
+    row, row_2, row_3 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            [
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{integer_field.id}": None,
+                    f"field_{decimal_field.id}": None,
+                    f"field_{date_field.id}": None,
+                    f"field_{date_time_field.id}": None,
+                    f"field_{boolean_field.id}": False,
+                    f"field_{file_field.id}": [],
+                    f"field_{single_select_field.id}_id": None,
+                },
+                {
+                    f"field_{text_field.id}": "Value",
+                    f"field_{long_text_field.id}": "Value",
+                    f"field_{integer_field.id}": 10,
+                    f"field_{decimal_field.id}": 1022,
+                    f"field_{date_field.id}": date(2020, 6, 17),
+                    f"field_{date_time_field.id}": datetime(
+                        2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc
+                    ),
+                    f"field_{boolean_field.id}": True,
+                    f"field_{file_field.id}": [{"name": file_a.name}],
+                    f"field_{single_select_field.id}_id": option_1.id,
+                    f"field_{link_row_field.id}": [tmp_row.id],
+                    f"field_{multiple_select_field.id}": [option_2.id],
+                },
+                {
+                    f"field_{text_field.id}": "other value",
+                    f"field_{long_text_field.id}": " ",
+                    f"field_{integer_field.id}": 0,
+                    f"field_{decimal_field.id}": 0.00,
+                    f"field_{date_field.id}": date(1970, 1, 1),
+                    f"field_{date_time_field.id}": datetime(
+                        1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc
+                    ),
+                    f"field_{boolean_field.id}": True,
+                    f"field_{file_field.id}": [
+                        {"name": file_a.name},
+                        {"name": file_b.name},
+                    ],
+                    f"field_{single_select_field.id}_id": option_1.id,
+                    f"field_{link_row_field.id}": [tmp_row.id],
+                    f"field_{multiple_select_field.id}": [option_2.id, option_3.id],
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -3434,38 +3462,42 @@ def test_not_empty_filter_type(data_fixture):
     handler = ViewHandler()
     model = table.get_model()
 
-    _, row_2 = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{text_field.id}": "",
-                f"field_{long_text_field.id}": "",
-                f"field_{integer_field.id}": None,
-                f"field_{decimal_field.id}": None,
-                f"field_{date_field.id}": None,
-                f"field_{date_time_field.id}": None,
-                f"field_{boolean_field.id}": False,
-                f"field_{file_field.id}": [],
-                f"field_{single_select_field.id}": None,
-            },
-            {
-                f"field_{text_field.id}": "Value",
-                f"field_{long_text_field.id}": "Value",
-                f"field_{integer_field.id}": 10,
-                f"field_{decimal_field.id}": 1022,
-                f"field_{date_field.id}": date(2020, 6, 17),
-                f"field_{date_time_field.id}": datetime(
-                    2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc
-                ),
-                f"field_{boolean_field.id}": True,
-                f"field_{file_field.id}": [{"name": file_a.name}],
-                f"field_{single_select_field.id}_id": option_1.id,
-                f"field_{link_row_field.id}": [tmp_row.id],
-                f"field_{multiple_select_field.id}": [option_2.id, option_3.id],
-            },
-        ],
-        model=model,
+    _, row_2 = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{text_field.id}": "",
+                    f"field_{long_text_field.id}": "",
+                    f"field_{integer_field.id}": None,
+                    f"field_{decimal_field.id}": None,
+                    f"field_{date_field.id}": None,
+                    f"field_{date_time_field.id}": None,
+                    f"field_{boolean_field.id}": False,
+                    f"field_{file_field.id}": [],
+                    f"field_{single_select_field.id}": None,
+                },
+                {
+                    f"field_{text_field.id}": "Value",
+                    f"field_{long_text_field.id}": "Value",
+                    f"field_{integer_field.id}": 10,
+                    f"field_{decimal_field.id}": 1022,
+                    f"field_{date_field.id}": date(2020, 6, 17),
+                    f"field_{date_time_field.id}": datetime(
+                        2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc
+                    ),
+                    f"field_{boolean_field.id}": True,
+                    f"field_{file_field.id}": [{"name": file_a.name}],
+                    f"field_{single_select_field.id}_id": option_1.id,
+                    f"field_{link_row_field.id}": [tmp_row.id],
+                    f"field_{multiple_select_field.id}": [option_2.id, option_3.id],
+                },
+            ],
+            model=model,
+        )
+        .created_rows
     )
 
     view_filter = data_fixture.create_view_filter(
@@ -5729,7 +5761,7 @@ def test_multiple_collaborators_empty_filter_type(data_fixture):
                 multiple_collaborators_field.db_column: [],
             },
         ],
-    )
+    ).created_rows
     handler = ViewHandler()
     for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
         grid_view = data_fixture.create_grid_view(table=table)
@@ -5786,7 +5818,7 @@ def test_multiple_collaborators_not_empty_filter_type(data_fixture):
                 multiple_collaborators_field.db_column: [],
             },
         ],
-    )
+    ).created_rows
     handler = ViewHandler()
     for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
         grid_view = data_fixture.create_grid_view(table=table)
@@ -5852,7 +5884,7 @@ def test_multiple_collaborators_has_filter_type(data_fixture):
                 ],
             },
         ],
-    )
+    ).created_rows
 
     handler = ViewHandler()
     for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
@@ -5980,7 +6012,7 @@ def test_multiple_collaborators_has_not_filter_type(data_fixture):
                 ],
             },
         ],
-    )
+    ).created_rows
 
     handler = ViewHandler()
     for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
@@ -6668,16 +6700,20 @@ def table_view_fields_rows(data_fixture):
     grid_view = data_fixture.create_grid_view(table=table)
     date_field = data_fixture.create_date_field(table=table)
     datetime_field = data_fixture.create_date_field(table=table, date_include_time=True)
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        [
-            {
-                date_field.db_column: date_value,
-                datetime_field.db_column: date_value,
-            }
-            for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES
-        ],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            [
+                {
+                    date_field.db_column: date_value,
+                    datetime_field.db_column: date_value,
+                }
+                for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES
+            ],
+        )
+        .created_rows
     )
     return table, grid_view, date_field, datetime_field, rows
 
diff --git a/backend/tests/baserow/contrib/database/view/test_view_handler.py b/backend/tests/baserow/contrib/database/view/test_view_handler.py
index 120d3f0a3..be732551f 100755
--- a/backend/tests/baserow/contrib/database/view/test_view_handler.py
+++ b/backend/tests/baserow/contrib/database/view/test_view_handler.py
@@ -4422,14 +4422,18 @@ def test_can_duplicate_views_with_multiple_collaborator_has_filter(data_fixture)
         view=grid, field=field, type="multiple_collaborators_has", value=user_1.id
     )
 
-    rows = RowHandler().force_create_rows(
-        user_1,
-        table,
-        [
-            {field.db_column: []},
-            {field.db_column: [{"id": user_1.id, "name": user_1.first_name}]},
-            {field.db_column: [{"id": user_2.id, "name": user_2.first_name}]},
-        ],
+    rows = (
+        RowHandler()
+        .force_create_rows(
+            user_1,
+            table,
+            [
+                {field.db_column: []},
+                {field.db_column: [{"id": user_1.id, "name": user_1.first_name}]},
+                {field.db_column: [{"id": user_2.id, "name": user_2.first_name}]},
+            ],
+        )
+        .created_rows
     )
 
     results = ViewHandler().get_queryset(grid)
diff --git a/backend/tests/baserow/contrib/database/view/test_view_signals.py b/backend/tests/baserow/contrib/database/view/test_view_signals.py
index cf3459e1a..4955eb0d5 100644
--- a/backend/tests/baserow/contrib/database/view/test_view_signals.py
+++ b/backend/tests/baserow/contrib/database/view/test_view_signals.py
@@ -156,7 +156,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
     with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
         (new_row,) = row_handler.force_create_rows(
             user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
-        )
+        ).created_rows
         p.assert_not_called()
 
     with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
@@ -169,7 +169,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
     with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
         (new_row,) = row_handler.force_create_rows(
             user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
-        )
+        ).created_rows
         p.assert_called_once()
         assert p.call_args[1]["view"].id == view_a.id
         assert p.call_args[1]["row_ids"] == [new_row.id]
@@ -188,7 +188,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
     with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
         (new_row,) = row_handler.force_create_rows(
             user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
-        )
+        ).created_rows
         assert p.call_count == 2
         assert p.call_args_list[0][1]["view"].id == view_a.id
         assert p.call_args_list[0][1]["row_ids"] == [new_row.id]
@@ -209,7 +209,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
     with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
         (new_row,) = row_handler.force_create_rows(
             user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
-        )
+        ).created_rows
         p.assert_not_called()
 
     with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
@@ -498,10 +498,10 @@ def test_rows_enter_and_exit_view_when_data_changes_in_looked_up_tables(
     model_b = table_b.get_model()
     (row_b1,) = row_handler.force_create_rows(
         user, table_b, [{text_field_b.db_column: ""}], model=model_b
-    )
+    ).created_rows
     _, row_a2 = row_handler.force_create_rows(
         user, table_a, [{}, {link_a_to_b.db_column: [row_b1.id]}], model=model_a
-    )
+    ).created_rows
 
     view_a = data_fixture.create_grid_view(table=table_a)
     view_filter = data_fixture.create_view_filter(
@@ -519,7 +519,7 @@ def test_rows_enter_and_exit_view_when_data_changes_in_looked_up_tables(
 
         (row_a3,) = row_handler.force_create_rows(
             user, table_a, [{link_a_to_b.db_column: [row_b1.id]}], model=model_a
-        )
+        ).created_rows
 
         assert p.call_count == 2
         assert p.call_args_list[1][1]["view"].id == view_a.id
diff --git a/backend/tests/baserow/contrib/database/ws/public/test_public_ws_rows_signals.py b/backend/tests/baserow/contrib/database/ws/public/test_public_ws_rows_signals.py
index ae4479c91..44010cb67 100644
--- a/backend/tests/baserow/contrib/database/ws/public/test_public_ws_rows_signals.py
+++ b/backend/tests/baserow/contrib/database/ws/public/test_public_ws_rows_signals.py
@@ -203,10 +203,14 @@ def test_batch_rows_created_public_views_receive_restricted_row_created_ws_event
         {f"field_{visible_field.id}": "Visible", f"field_{hidden_field.id}": "Hidden"},
     ]
 
-    rows = RowHandler().create_rows(
-        user=user,
-        table=table,
-        rows_values=rows_to_create,
+    rows = (
+        RowHandler()
+        .create_rows(
+            user=user,
+            table=table,
+            rows_values=rows_to_create,
+        )
+        .created_rows
     )
 
     assert mock_broadcast_to_channel_group.delay.mock_calls == (
@@ -316,10 +320,14 @@ def test_batch_rows_created_public_views_receive_row_created_when_filters_match(
         {f"field_{visible_field.id}": "Visible", f"field_{hidden_field.id}": "Hidden"},
     ]
 
-    rows = RowHandler().create_rows(
-        user=user,
-        table=table,
-        rows_values=rows_to_create,
+    rows = (
+        RowHandler()
+        .create_rows(
+            user=user,
+            table=table,
+            rows_values=rows_to_create,
+        )
+        .created_rows
     )
 
     assert mock_broadcast_to_channel_group.delay.mock_calls == (
diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py
index 94e1423c9..886811cea 100644
--- a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py
+++ b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py
@@ -322,14 +322,18 @@ def test_local_baserow_list_rows_service_dispatch_data_with_view_and_service_fil
         ],
     )
     field = table.field_set.get(name="Ingredient")
-    [row_1, row_2, _] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{field.id}": "Cheese"},
-            {f"field_{field.id}": "Chicken"},
-            {f"field_{field.id}": "Milk"},
-        ],
+    [row_1, row_2, _] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{field.id}": "Cheese"},
+                {f"field_{field.id}": "Chicken"},
+                {f"field_{field.id}": "Milk"},
+            ],
+        )
+        .created_rows
     )
 
     view = data_fixture.create_grid_view(user, table=table, owned_by=user)
@@ -385,15 +389,19 @@ def test_local_baserow_list_rows_service_dispatch_data_with_varying_filter_types
     )
     ingredient = table.field_set.get(name="Ingredient")
     cost = table.field_set.get(name="Cost")
-    [row_1, row_2, row_3, _] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{ingredient.id}": "Duck", f"field_{cost.id}": 50},
-            {f"field_{ingredient.id}": "Duckling", f"field_{cost.id}": 25},
-            {f"field_{ingredient.id}": "Goose", f"field_{cost.id}": 150},
-            {f"field_{ingredient.id}": "Beef", f"field_{cost.id}": 250},
-        ],
+    [row_1, row_2, row_3, _] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{ingredient.id}": "Duck", f"field_{cost.id}": 50},
+                {f"field_{ingredient.id}": "Duckling", f"field_{cost.id}": 25},
+                {f"field_{ingredient.id}": "Goose", f"field_{cost.id}": 150},
+                {f"field_{ingredient.id}": "Beef", f"field_{cost.id}": 250},
+            ],
+        )
+        .created_rows
     )
 
     view = data_fixture.create_grid_view(
@@ -470,14 +478,18 @@ def test_local_baserow_list_rows_service_dispatch_data_with_view_and_service_sor
     )
     ingredients = table.field_set.get(name="Ingredient")
     cost = table.field_set.get(name="Cost")
-    [row_1, row_2, row_3] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{ingredients.id}": "Duck", f"field_{cost.id}": 50},
-            {f"field_{ingredients.id}": "Goose", f"field_{cost.id}": 150},
-            {f"field_{ingredients.id}": "Beef", f"field_{cost.id}": 250},
-        ],
+    [row_1, row_2, row_3] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{ingredients.id}": "Duck", f"field_{cost.id}": 50},
+                {f"field_{ingredients.id}": "Goose", f"field_{cost.id}": 150},
+                {f"field_{ingredients.id}": "Beef", f"field_{cost.id}": 250},
+            ],
+        )
+        .created_rows
     )
     view = data_fixture.create_grid_view(user, table=table, owned_by=user)
     service_type = LocalBaserowListRowsUserServiceType()
diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py b/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py
index 8b1746838..e98bc064b 100644
--- a/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py
+++ b/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py
@@ -44,15 +44,19 @@ def test_local_baserow_table_service_filterable_mixin_get_table_queryset(
     table_model = table.get_model()
     service = data_fixture.create_local_baserow_list_rows_service(table=table)
 
-    [alessia, alex, alastair, alexandra] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{field.id}": "Alessia"},
-            {f"field_{field.id}": "Alex"},
-            {f"field_{field.id}": "Alastair"},
-            {f"field_{field.id}": "Alexandra"},
-        ],
+    [alessia, alex, alastair, alexandra] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{field.id}": "Alessia"},
+                {f"field_{field.id}": "Alex"},
+                {f"field_{field.id}": "Alastair"},
+                {f"field_{field.id}": "Alexandra"},
+            ],
+        )
+        .created_rows
     )
 
     dispatch_context = FakeDispatchContext()
@@ -254,15 +258,19 @@ def test_local_baserow_table_service_sortable_mixin_get_table_queryset(
     table_model = table.get_model()
     service = data_fixture.create_local_baserow_list_rows_service(table=table)
 
-    [aardvark, badger, crow, dragonfly] = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{field.id}": "Aardvark"},
-            {f"field_{field.id}": "Badger"},
-            {f"field_{field.id}": "Crow"},
-            {f"field_{field.id}": "Dragonfly"},
-        ],
+    [aardvark, badger, crow, dragonfly] = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{field.id}": "Aardvark"},
+                {f"field_{field.id}": "Badger"},
+                {f"field_{field.id}": "Crow"},
+                {f"field_{field.id}": "Dragonfly"},
+            ],
+        )
+        .created_rows
     )
 
     dispatch_context = FakeDispatchContext()
@@ -357,15 +365,19 @@ def test_local_baserow_table_service_searchable_mixin_get_table_queryset(
         table = data_fixture.create_database_table(user=user)
         field = data_fixture.create_text_field(name="Names", table=table)
         service = data_fixture.create_local_baserow_list_rows_service(table=table)
-        [alessia, alex, alastair, alexandra] = RowHandler().create_rows(
-            user,
-            table,
-            rows_values=[
-                {f"field_{field.id}": "Alessia"},
-                {f"field_{field.id}": "Alex"},
-                {f"field_{field.id}": "Alastair"},
-                {f"field_{field.id}": "Alexandra"},
-            ],
+        [alessia, alex, alastair, alexandra] = (
+            RowHandler()
+            .create_rows(
+                user,
+                table,
+                rows_values=[
+                    {f"field_{field.id}": "Alessia"},
+                    {f"field_{field.id}": "Alex"},
+                    {f"field_{field.id}": "Alastair"},
+                    {f"field_{field.id}": "Alexandra"},
+                ],
+            )
+            .created_rows
         )
 
     table_model = table.get_model()
diff --git a/changelog/entries/unreleased/feature/2213_introduce_row_update_functionality_during_table_import.json b/changelog/entries/unreleased/feature/2213_introduce_row_update_functionality_during_table_import.json
new file mode 100644
index 000000000..be73ac0b3
--- /dev/null
+++ b/changelog/entries/unreleased/feature/2213_introduce_row_update_functionality_during_table_import.json
@@ -0,0 +1,8 @@
+{
+    "type": "feature",
+    "message": "Introduce row update functionality during table import",
+    "domain": "database",
+    "issue_number": 2213,
+    "bullet_points": [],
+    "created_at": "2025-03-13"
+}
\ No newline at end of file
diff --git a/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py b/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py
index 4a06c441e..bb2b19c3b 100644
--- a/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py
+++ b/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py
@@ -2778,35 +2778,39 @@ def test_grouped_aggregate_rows_service_dispatch_max_buckets_sort_on_primary_fie
         direction="ASC",
     )
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {
-                f"field_{field.id}": 40,
-                f"field_{field_2.id}": "Z",
-            },
-            {
-                f"field_{field.id}": 20,
-                f"field_{field_2.id}": "K",
-            },
-            {
-                f"field_{field.id}": 30,
-                f"field_{field_2.id}": "L",
-            },
-            {
-                f"field_{field.id}": 10,
-                f"field_{field_2.id}": "A",
-            },
-            {
-                f"field_{field.id}": 60,
-                f"field_{field_2.id}": "H",
-            },
-            {
-                f"field_{field.id}": 50,
-                f"field_{field_2.id}": "M",
-            },
-        ],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {
+                    f"field_{field.id}": 40,
+                    f"field_{field_2.id}": "Z",
+                },
+                {
+                    f"field_{field.id}": 20,
+                    f"field_{field_2.id}": "K",
+                },
+                {
+                    f"field_{field.id}": 30,
+                    f"field_{field_2.id}": "L",
+                },
+                {
+                    f"field_{field.id}": 10,
+                    f"field_{field_2.id}": "A",
+                },
+                {
+                    f"field_{field.id}": 60,
+                    f"field_{field_2.id}": "H",
+                },
+                {
+                    f"field_{field.id}": 50,
+                    f"field_{field_2.id}": "M",
+                },
+            ],
+        )
+        .created_rows
     )
 
     dispatch_context = FakeDispatchContext()
diff --git a/enterprise/backend/tests/baserow_enterprise_tests/webhooks/test_webhook_event_types.py b/enterprise/backend/tests/baserow_enterprise_tests/webhooks/test_webhook_event_types.py
index 9bec30972..9aaf20bd0 100644
--- a/enterprise/backend/tests/baserow_enterprise_tests/webhooks/test_webhook_event_types.py
+++ b/enterprise/backend/tests/baserow_enterprise_tests/webhooks/test_webhook_event_types.py
@@ -392,7 +392,7 @@ def test_rows_enter_view_event_type_paginate_data(
     }
 
     with transaction.atomic():
-        webhook = WebhookHandler().create_table_webhook(
+        WebhookHandler().create_table_webhook(
             user=user,
             table=table,
             url="http://localhost/",
@@ -403,7 +403,7 @@ def test_rows_enter_view_event_type_paginate_data(
             use_user_field_names=True,
         )
 
-        rows = RowHandler().force_create_rows(
+        RowHandler().force_create_rows(
             user=user,
             table=table,
             rows_values=[
diff --git a/enterprise/web-frontend/modules/baserow_enterprise/assets/scss/default.scss b/enterprise/web-frontend/modules/baserow_enterprise/assets/scss/default.scss
index ce24697d8..6b3cba8ad 100644
--- a/enterprise/web-frontend/modules/baserow_enterprise/assets/scss/default.scss
+++ b/enterprise/web-frontend/modules/baserow_enterprise/assets/scss/default.scss
@@ -1,2 +1,2 @@
-@import "@baserow_premium/assets/scss/default";
-@import "components/all";
+@import '@baserow_premium/assets/scss/default';
+@import 'components/all';
diff --git a/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py b/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py
index cc77f2dd5..d0b7ebf6c 100644
--- a/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py
+++ b/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py
@@ -33,10 +33,14 @@ def test_generate_ai_field_value_without_license(premium_data_fixture, api_clien
     table = premium_data_fixture.create_database_table(name="table", database=database)
     field = premium_data_fixture.create_ai_field(table=table, name="ai")
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{}],
+        )
+        .created_rows
     )
 
     response = api_client.post(
@@ -71,10 +75,14 @@ def test_generate_ai_field_value_view_field_does_not_exist(
     table = premium_data_fixture.create_database_table(name="table", database=database)
     field = premium_data_fixture.create_ai_field(table=table, name="ai")
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{}],
+        )
+        .created_rows
     )
 
     response = api_client.post(
@@ -110,10 +118,14 @@ def test_generate_ai_field_value_view_row_does_not_exist(
     table = premium_data_fixture.create_database_table(name="table", database=database)
     field = premium_data_fixture.create_ai_field(table=table, name="ai")
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{}],
+        )
+        .created_rows
     )
 
     response = api_client.post(
@@ -155,10 +167,14 @@ def test_generate_ai_field_value_view_user_not_in_workspace(
     table = premium_data_fixture.create_database_table(name="table", database=database)
     field = premium_data_fixture.create_ai_field(table=table, name="ai")
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{}],
+        )
+        .created_rows
     )
 
     response = api_client.post(
@@ -196,10 +212,14 @@ def test_generate_ai_field_value_view_generative_ai_does_not_exist(
         table=table, name="ai", ai_generative_ai_type="does_not_exist"
     )
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{}],
+        )
+        .created_rows
     )
 
     response = api_client.post(
@@ -237,12 +257,16 @@ def test_generate_ai_field_value_view_generative_ai_model_does_not_belong_to_typ
         table=table, name="ai", ai_generative_ai_model="does_not_exist"
     )
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {},
-        ],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {},
+            ],
+        )
+        .created_rows
     )
 
     response = api_client.post(
@@ -281,10 +305,14 @@ def test_generate_ai_field_value_view_generative_ai(
         table=table, name="ai", ai_prompt="'Hello'"
     )
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{}],
+        )
+        .created_rows
     )
     assert patched_generate_ai_values_for_rows.call_count == 0
 
@@ -313,10 +341,14 @@ def test_batch_generate_ai_field_value_limit(api_client, premium_data_fixture):
     field = premium_data_fixture.create_ai_field(
         table=table, name="ai", ai_prompt="'Hello'"
     )
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{}] * (settings.BATCH_ROWS_SIZE_LIMIT + 1),
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{}] * (settings.BATCH_ROWS_SIZE_LIMIT + 1),
+        )
+        .created_rows
     )
 
     row_ids = [row.id for row in rows]
diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py
index 32621d9ed..d3cceb7aa 100644
--- a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py
+++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py
@@ -29,7 +29,7 @@ def test_generate_ai_field_value_view_generative_ai(
         table=table, name="ai", ai_prompt="'Hello'"
     )
 
-    rows = RowHandler().create_rows(user, table, rows_values=[{}])
+    rows = RowHandler().create_rows(user, table, rows_values=[{}]).created_rows
 
     assert patched_rows_updated.call_count == 0
     generate_ai_values_for_rows(user.id, field.id, [rows[0].id])
@@ -61,7 +61,7 @@ def test_generate_ai_field_value_view_generative_ai_with_temperature(
         table=table, name="ai", ai_prompt="'Hello'", ai_temperature=0.7
     )
 
-    rows = RowHandler().create_rows(user, table, rows_values=[{}])
+    rows = RowHandler().create_rows(user, table, rows_values=[{}]).created_rows
 
     generate_ai_values_for_rows(user.id, field.id, [rows[0].id])
     updated_row = patched_rows_updated.call_args[1]["rows"][0]
@@ -92,12 +92,16 @@ def test_generate_ai_field_value_view_generative_ai_parse_formula(
         table=table, name="ai", ai_prompt=formula
     )
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[
-            {f"field_{firstname.id}": "Bram", f"field_{lastname.id}": "Wiepjes"},
-        ],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[
+                {f"field_{firstname.id}": "Bram", f"field_{lastname.id}": "Wiepjes"},
+            ],
+        )
+        .created_rows
     )
 
     assert patched_rows_updated.call_count == 0
@@ -132,10 +136,14 @@ def test_generate_ai_field_value_view_generative_ai_invalid_field(
         table=table, name="ai", ai_prompt=formula
     )
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{f"field_{firstname.id}": "Bram"}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{f"field_{firstname.id}": "Bram"}],
+        )
+        .created_rows
     )
     assert patched_rows_updated.call_count == 0
     generate_ai_values_for_rows(user.id, field.id, [rows[0].id])
@@ -172,10 +180,14 @@ def test_generate_ai_field_value_view_generative_ai_invalid_prompt(
         ai_prompt=formula,
     )
 
-    rows = RowHandler().create_rows(
-        user,
-        table,
-        rows_values=[{f"field_{firstname.id}": "Bram"}],
+    rows = (
+        RowHandler()
+        .create_rows(
+            user,
+            table,
+            rows_values=[{f"field_{firstname.id}": "Bram"}],
+        )
+        .created_rows
     )
 
     assert patched_rows_ai_values_generation_error.call_count == 0
diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py
index e67c14f0b..caf4a5c8c 100644
--- a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py
+++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py
@@ -1066,13 +1066,17 @@ def test_link_row_field_can_be_sorted_when_linking_an_ai_field(premium_data_fixt
         field=primary_b, value="b", color="green", order=0
     )
 
-    row_b1, row_b2 = RowHandler().force_create_rows(
-        user,
-        table_b,
-        [
-            {primary_b.db_column: opt_1.id},
-            {primary_b.db_column: opt_2.id},
-        ],
+    row_b1, row_b2 = (
+        RowHandler()
+        .force_create_rows(
+            user,
+            table_b,
+            [
+                {primary_b.db_column: opt_1.id},
+                {primary_b.db_column: opt_2.id},
+            ],
+        )
+        .created_rows
     )
 
     table_a, table_b, link_field = premium_data_fixture.create_two_linked_tables(
diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/row_edit_modal_comments_notification_mode.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/row_edit_modal_comments_notification_mode.scss
index 936e1a9fc..a14ba2843 100644
--- a/premium/web-frontend/modules/baserow_premium/assets/scss/components/row_edit_modal_comments_notification_mode.scss
+++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/row_edit_modal_comments_notification_mode.scss
@@ -6,4 +6,4 @@
   &:hover {
     color: $color-neutral-800;
   }
- }
\ No newline at end of file
+}
diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/all.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/all.scss
index dca9222ff..285d56b22 100644
--- a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/all.scss
+++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/all.scss
@@ -4,4 +4,4 @@
 @import 'kanban';
 @import 'decorators';
 @import 'view_date_selector';
-@import 'view_date_indicator';
\ No newline at end of file
+@import 'view_date_indicator';
diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/all.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/all.scss
index ca40c0336..d564dbe25 100644
--- a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/all.scss
+++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/all.scss
@@ -4,4 +4,4 @@
 @import 'timeline_date_settings_init_box';
 @import 'timeline_grid';
 @import 'timeline_grid_row';
-@import 'timeline_timescale_context';
\ No newline at end of file
+@import 'timeline_timescale_context';
diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_grid_row.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_grid_row.scss
index a50855a7e..83609103b 100644
--- a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_grid_row.scss
+++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_grid_row.scss
@@ -44,7 +44,7 @@
 
 .timeline-grid-row__label {
   @extend %ellipsis;
-  
+
   margin-right: 8px;
   font-size: 12px;
   line-height: 20px;
diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_timescale_context.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_timescale_context.scss
index 9e53a49ef..1b5cc9e9a 100644
--- a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_timescale_context.scss
+++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_timescale_context.scss
@@ -6,4 +6,4 @@
   display: flex;
   align-items: center;
   justify-content: space-between;
-}
\ No newline at end of file
+}
diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_view.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_view.scss
index 9aa640b54..6e1e507be 100644
--- a/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_view.scss
+++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/views/timeline/timeline_view.scss
@@ -2,4 +2,4 @@
   width: 100%;
   height: 100%;
   overflow-y: auto;
-}
\ No newline at end of file
+}
diff --git a/premium/web-frontend/modules/baserow_premium/pages/admin/license.vue b/premium/web-frontend/modules/baserow_premium/pages/admin/license.vue
index 4b64e0d4d..e811f6c3a 100644
--- a/premium/web-frontend/modules/baserow_premium/pages/admin/license.vue
+++ b/premium/web-frontend/modules/baserow_premium/pages/admin/license.vue
@@ -36,11 +36,9 @@
             <div class="license-detail__item-value">
               <Badge :color="licenseType.getLicenseBadgeColor()" bold>
                 {{ licenseType.getName() }}
-              </Badge
-              >
-              <Badge v-if="!license.is_active" color="red">{{
-                  $t('licenses.expired')
-                }}
+              </Badge>
+              <Badge v-if="!license.is_active" color="red"
+                >{{ $t('licenses.expired') }}
               </Badge>
             </div>
           </div>
@@ -105,7 +103,8 @@
               </div>
             </div>
             <div class="license-detail__item-value">
-              {{ license.application_users_taken }} / {{ license.application_users }}
+              {{ license.application_users_taken }} /
+              {{ license.application_users }}
             </div>
           </div>
           <div class="license-detail__item">
@@ -180,15 +179,14 @@
             <i18n path="license.disconnectDescription" tag="p">
               <template #contact>
                 <a href="https://baserow.io/contact" target="_blank"
-                >baserow.io/contact</a
+                  >baserow.io/contact</a
                 >
               </template>
             </i18n>
 
             <Button type="danger" @click="$refs.disconnectModal.show()">
               {{ $t('license.disconnectLicense') }}
-            </Button
-            >
+            </Button>
             <DisconnectLicenseModal
               ref="disconnectModal"
               :license="license"
@@ -204,18 +202,15 @@
 import moment from '@baserow/modules/core/moment'
 import { notifyIf } from '@baserow/modules/core/utils/error'
 import LicenseService from '@baserow_premium/services/license'
-import DisconnectLicenseModal
-  from '@baserow_premium/components/license/DisconnectLicenseModal'
-import ManualLicenseSeatsForm
-  from '@baserow_premium/components/license/ManualLicenseSeatForm'
-import AutomaticLicenseSeats
-  from '@baserow_premium/components/license/AutomaticLicenseSeats'
+import DisconnectLicenseModal from '@baserow_premium/components/license/DisconnectLicenseModal'
+import ManualLicenseSeatsForm from '@baserow_premium/components/license/ManualLicenseSeatForm'
+import AutomaticLicenseSeats from '@baserow_premium/components/license/AutomaticLicenseSeats'
 
 export default {
   components: {
     DisconnectLicenseModal,
     ManualLicenseSeatsForm,
-    AutomaticLicenseSeats
+    AutomaticLicenseSeats,
   },
   layout: 'app',
   middleware: 'staff',
@@ -226,14 +221,14 @@ export default {
     } catch {
       return error({
         statusCode: 404,
-        message: 'The license was not found.'
+        message: 'The license was not found.',
       })
     }
   },
   data() {
     return {
       user: null,
-      checkLoading: false
+      checkLoading: false,
     }
   },
   computed: {
@@ -269,7 +264,7 @@ export default {
       }
 
       this.checkLoading = false
-    }
-  }
+    },
+  },
 }
 </script>
diff --git a/premium/web-frontend/modules/baserow_premium/pages/admin/licenses.vue b/premium/web-frontend/modules/baserow_premium/pages/admin/licenses.vue
index 681af89ac..b44f61843 100644
--- a/premium/web-frontend/modules/baserow_premium/pages/admin/licenses.vue
+++ b/premium/web-frontend/modules/baserow_premium/pages/admin/licenses.vue
@@ -127,8 +127,12 @@
               {{ license.seats_taken }} / {{ license.seats }}
               {{ $t('licenses.seats') }}
             </li>
-            <li v-if="license.application_users" class="licenses__item-detail-item">
-              {{ license.application_users_taken }} / {{ license.application_users }}
+            <li
+              v-if="license.application_users"
+              class="licenses__item-detail-item"
+            >
+              {{ license.application_users_taken }} /
+              {{ license.application_users }}
               {{ $t('licenses.applicationUsers') }}
             </li>
           </ul>
@@ -148,7 +152,6 @@
               ></i>
             </li>
           </ul>
-
         </nuxt-link>
       </div>
     </div>
diff --git a/web-frontend/modules/database/components/table/ImportFileModal.vue b/web-frontend/modules/database/components/table/ImportFileModal.vue
index c23d3efdd..115fb90ed 100644
--- a/web-frontend/modules/database/components/table/ImportFileModal.vue
+++ b/web-frontend/modules/database/components/table/ImportFileModal.vue
@@ -55,7 +55,39 @@
           @header="onHeader($event)"
           @data="onData($event)"
           @getData="onGetData($event)"
-        />
+        >
+          <template #upsertMapping>
+            <div class="control margin-top-1">
+              <label class="control__label control__label--small">
+                {{ $t('importFileModal.useUpsertField') }}
+                <HelpIcon
+                  :icon="'info-empty'"
+                  :tooltip="$t('importFileModal.upsertTooltip')"
+                />
+              </label>
+              <div class="control__elements">
+                <Checkbox
+                  v-model="useUpsertField"
+                  :disabled="!mappingNotEmpty"
+                  >{{ $t('common.yes') }}</Checkbox
+                >
+              </div>
+
+              <Dropdown
+                v-model="upsertField"
+                :disabled="!useUpsertField"
+                class="margin-top-1"
+              >
+                <DropdownItem
+                  v-for="item in availableUpsertFields"
+                  :key="item.id"
+                  :name="item.name"
+                  :value="item.id"
+                />
+              </Dropdown>
+            </div>
+          </template>
+        </component>
       </div>
 
       <ImportErrorReport :job="job" :error="error"></ImportErrorReport>
@@ -204,6 +236,8 @@ export default {
       getData: null,
       previewData: [],
       dataLoaded: false,
+      useUpsertField: false,
+      upsertField: undefined,
     }
   },
   computed: {
@@ -213,12 +247,19 @@ export default {
       }
       return this.database.tables.some(({ id }) => id === this.job.table_id)
     },
+    mappingNotEmpty() {
+      return Object.values(this.mapping).some(
+        (value) => this.fieldIndexMap[value] !== undefined
+      )
+    },
     canBeSubmitted() {
       return (
         this.importer &&
         Object.values(this.mapping).some(
           (value) => this.fieldIndexMap[value] !== undefined
-        )
+        ) &&
+        (!this.useUpsertField ||
+          Object.values(this.mapping).includes(this.upsertField))
       )
     },
     fieldTypes() {
@@ -307,6 +348,14 @@ export default {
     selectedFields() {
       return Object.values(this.mapping)
     },
+    availableUpsertFields() {
+      const selected = Object.values(this.mapping)
+      return this.fields.filter((field) => {
+        return (
+          selected.includes(field.id) && this.fieldTypes[field.type].canUpsert()
+        )
+      })
+    },
     progressPercentage() {
       switch (this.state) {
         case null:
@@ -417,6 +466,14 @@ export default {
       this.showProgressBar = false
       this.reset(false)
       let data = null
+      const importConfiguration = {}
+
+      if (this.upsertField) {
+        // at the moment we use only one field, but the key may be composed of several
+        // fields.
+        importConfiguration.upsert_fields = [this.upsertField]
+        importConfiguration.upsert_values = []
+      }
 
       if (typeof this.getData === 'function') {
         try {
@@ -425,6 +482,18 @@ export default {
           await this.$ensureRender()
 
           data = await this.getData()
+          const upsertFields = importConfiguration.upsert_fields || []
+          const upsertValues = importConfiguration.upsert_values || []
+          const upsertFieldIndexes = []
+
+          Object.entries(this.mapping).forEach(
+            ([importIndex, targetFieldId]) => {
+              if (upsertFields.includes(targetFieldId)) {
+                upsertFieldIndexes.push(importIndex)
+              }
+            }
+          )
+
           const fieldMapping = Object.entries(this.mapping)
             .filter(
               ([, targetFieldId]) =>
@@ -456,22 +525,41 @@ export default {
 
           // Processes the data by chunk to avoid UI freezes
           const result = []
+
           for (const chunk of _.chunk(data, 1000)) {
             result.push(
               chunk.map((row) => {
                 const newRow = clone(defaultRow)
+                const upsertRow = []
                 fieldMapping.forEach(([importIndex, targetIndex]) => {
                   newRow[targetIndex] = prepareValueByField[targetIndex](
                     row[importIndex]
                   )
+                  if (upsertFieldIndexes.includes(importIndex)) {
+                    upsertRow.push(newRow[targetIndex])
+                  }
                 })
 
+                if (upsertFields.length > 0 && upsertRow.length > 0) {
+                  if (upsertFields.length !== upsertRow.length) {
+                    throw new Error(
+                      "upsert row length doesn't match required fields"
+                    )
+                  }
+                  upsertValues.push(upsertRow)
+                }
                 return newRow
               })
             )
             await this.$ensureRender()
           }
           data = result.flat()
+          if (upsertFields.length > 0) {
+            if (upsertValues.length !== data.length) {
+              throw new Error('upsert values lenght mismatch')
+            }
+            importConfiguration.upsert_values = upsertValues
+          }
         } catch (error) {
           this.reset()
           this.handleError(error, 'application')
@@ -493,7 +581,8 @@ export default {
           data,
           {
             onUploadProgress,
-          }
+          },
+          importConfiguration.upsert_fields ? importConfiguration : null
         )
         this.startJobPoller(job)
       } catch (error) {
diff --git a/web-frontend/modules/database/components/table/TableCSVImporter.vue b/web-frontend/modules/database/components/table/TableCSVImporter.vue
index 803212fb6..7f38393a9 100644
--- a/web-frontend/modules/database/components/table/TableCSVImporter.vue
+++ b/web-frontend/modules/database/components/table/TableCSVImporter.vue
@@ -106,6 +106,9 @@
         </div>
       </div>
     </div>
+    <div v-if="values.filename !== ''" class="row">
+      <div class="col col-8 margin-top-1"><slot name="upsertMapping" /></div>
+    </div>
     <Alert v-if="error !== ''" type="error">
       <template #title> {{ $t('common.wrong') }} </template>
       {{ error }}
diff --git a/web-frontend/modules/database/components/table/TableJSONImporter.vue b/web-frontend/modules/database/components/table/TableJSONImporter.vue
index b76e334b1..8a7cd8cab 100644
--- a/web-frontend/modules/database/components/table/TableJSONImporter.vue
+++ b/web-frontend/modules/database/components/table/TableJSONImporter.vue
@@ -75,6 +75,11 @@
         ></CharsetDropdown>
       </div>
     </div>
+
+    <div v-if="values.filename !== ''" class="control margin-top-2">
+      <slot name="upsertMapping" />
+    </div>
+
     <Alert v-if="error !== ''" type="error">
       <template #title> {{ $t('common.wrong') }} </template>
       {{ error }}
diff --git a/web-frontend/modules/database/components/table/TablePasteImporter.vue b/web-frontend/modules/database/components/table/TablePasteImporter.vue
index 1e7deaff5..c1e97807c 100644
--- a/web-frontend/modules/database/components/table/TablePasteImporter.vue
+++ b/web-frontend/modules/database/components/table/TablePasteImporter.vue
@@ -28,6 +28,10 @@
       </Checkbox>
     </FormGroup>
 
+    <div v-if="values.filename !== ''" class="control margin-top-0">
+      <slot name="upsertMapping" />
+    </div>
+
     <Alert v-if="error !== ''" type="error">
       <template #title> {{ $t('common.wrong') }} </template>
       {{ error }}
diff --git a/web-frontend/modules/database/components/table/TableXMLImporter.vue b/web-frontend/modules/database/components/table/TableXMLImporter.vue
index b2e5cf23d..a01562cf4 100644
--- a/web-frontend/modules/database/components/table/TableXMLImporter.vue
+++ b/web-frontend/modules/database/components/table/TableXMLImporter.vue
@@ -27,7 +27,7 @@
         </div>
       </template>
       <div class="control__elements">
-        <div class="file-upload">
+        <div class="file-upload margin-top-1">
           <input
             v-show="false"
             ref="file"
@@ -61,6 +61,10 @@
         <div v-if="v$.values.filename.$error" class="error">
           {{ v$.values.filename.$errors[0]?.$message }}
         </div>
+
+        <div v-if="values.filename !== ''" class="control margin-top-1">
+          <slot name="upsertMapping" />
+        </div>
       </div>
     </div>
     <Alert v-if="error !== ''" type="error">
diff --git a/web-frontend/modules/database/fieldTypes.js b/web-frontend/modules/database/fieldTypes.js
index 7ff3da909..1abd961da 100644
--- a/web-frontend/modules/database/fieldTypes.js
+++ b/web-frontend/modules/database/fieldTypes.js
@@ -542,10 +542,13 @@ export class FieldType extends Registerable {
   }
 
   /**
-   * This hook is called before the field's value is copied to the clipboard.
-   * Optionally formatting can be done here. By default the value is always
-   * converted to a string.
+   * Can a field of this type be used to perform an update during import on rows that
+   * contain the same value as imported one.
    */
+  canUpsert() {
+    return false
+  }
+
   /**
    * This hook is called before the field's value is copied to the clipboard.
    * Optionally formatting can be done here. By default the value is always
@@ -991,6 +994,10 @@ export class TextFieldType extends FieldType {
     return field.text_default
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const stringA = a[name] === null ? '' : '' + a[name]
@@ -1102,6 +1109,10 @@ export class LongTextFieldType extends FieldType {
     return ''
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const stringA = a[name] === null ? '' : '' + a[name]
@@ -1551,6 +1562,10 @@ export class NumberFieldType extends FieldType {
     return ['text', '1', '9']
   }
 
+  canUpsert() {
+    return true
+  }
+
   /**
    * When searching a cell's value, this should return the value to match the user's
    * search term against. We can't use `toHumanReadableString` here as it needs to be
@@ -1765,6 +1780,10 @@ export class RatingFieldType extends FieldType {
     return 0
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       if (a[name] === b[name]) {
@@ -1899,6 +1918,10 @@ export class BooleanFieldType extends FieldType {
     return ['icon', 'baserow-icon-circle-empty', 'baserow-icon-circle-checked']
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const intA = +a[name]
@@ -2252,6 +2275,10 @@ export class DateFieldType extends BaseDateFieldType {
     return true
   }
 
+  canUpsert() {
+    return true
+  }
+
   parseQueryParameter(field, value) {
     return this.formatValue(
       field.field,
@@ -2718,6 +2745,10 @@ export class DurationFieldType extends FieldType {
     return this.formatValue(field, value)
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const aValue = a[name]
@@ -2865,6 +2896,10 @@ export class URLFieldType extends FieldType {
     return isValidURL(value) ? value : ''
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const stringA = a[name] === null ? '' : '' + a[name]
@@ -2964,6 +2999,10 @@ export class EmailFieldType extends FieldType {
     return isValidEmail(value) ? value : ''
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const stringA = a[name] === null ? '' : '' + a[name]
@@ -3810,6 +3849,10 @@ export class PhoneNumberFieldType extends FieldType {
     return isSimplePhoneNumber(value) ? value : ''
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const stringA = a[name] === null ? '' : '' + a[name]
@@ -4456,6 +4499,10 @@ export class UUIDFieldType extends FieldType {
     return RowCardFieldUUID
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       const stringA = a[name] === null ? '' : '' + a[name]
@@ -4535,6 +4582,10 @@ export class AutonumberFieldType extends FieldType {
     return RowCardFieldAutonumber
   }
 
+  canUpsert() {
+    return true
+  }
+
   getSort(name, order) {
     return (a, b) => {
       if (a[name] === b[name]) {
diff --git a/web-frontend/modules/database/locales/en.json b/web-frontend/modules/database/locales/en.json
index 8eeba08f7..13dd1e292 100644
--- a/web-frontend/modules/database/locales/en.json
+++ b/web-frontend/modules/database/locales/en.json
@@ -441,7 +441,9 @@
     "fieldMappingDescription": "We have automatically mapped the columns of the Baserow fields in your table. You can change them below. Any incompatible cell will remain empty after the import.",
     "selectImportMessage": "Please select data to import.",
     "filePreview": "File content preview",
-    "importPreview": "Import preview"
+    "importPreview": "Import preview",
+    "useUpsertField": "Update rows if they already exist",
+    "upsertTooltip": "Match existing rows using a unique field to overwrite data with imported values."
   },
   "formulaAdvancedEditContext": {
     "textAreaFormulaInputPlaceholder": "Click to edit the formula",
diff --git a/web-frontend/modules/database/mixins/importer.js b/web-frontend/modules/database/mixins/importer.js
index 25b90984e..dc64e7c2d 100644
--- a/web-frontend/modules/database/mixins/importer.js
+++ b/web-frontend/modules/database/mixins/importer.js
@@ -9,6 +9,16 @@ import {
 const IMPORT_PREVIEW_MAX_ROW_COUNT = 6
 
 export default {
+  props: {
+    mapping: {
+      type: Object,
+      required: false,
+      default: () => {
+        return {}
+      },
+    },
+  },
+
   data() {
     return {
       fileLoadingProgress: 0,
diff --git a/web-frontend/modules/database/services/table.js b/web-frontend/modules/database/services/table.js
index 918680d9f..4053ba8d1 100644
--- a/web-frontend/modules/database/services/table.js
+++ b/web-frontend/modules/database/services/table.js
@@ -29,10 +29,15 @@ export default (client) => {
 
       return client.post(`/database/tables/database/${databaseId}/`, values)
     },
-    importData(tableId, data, config = null) {
+    importData(tableId, data, config = null, importConfiguration = null) {
+      const payload = { data }
+      if (importConfiguration) {
+        payload.configuration = importConfiguration
+      }
+
       return client.post(
         `/database/tables/${tableId}/import/async/`,
-        { data },
+        payload,
         config
       )
     },