1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-06 05:55:28 +00:00

2213 upsert in table import

This commit is contained in:
Cezary Statkiewicz 2025-03-18 21:58:54 +01:00 committed by Bram Wiepjes
parent cac5aaf28e
commit a5145514a6
80 changed files with 2623 additions and 1136 deletions
backend
changelog/entries/unreleased/feature
enterprise
backend/tests/baserow_enterprise_tests
integrations/local_baserow/service_types
webhooks
web-frontend/modules/baserow_enterprise/assets/scss
premium
web-frontend/modules/database

View file

@ -1,9 +1,71 @@
from django.utils.functional import lazy
from rest_framework import serializers from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from baserow.contrib.database.api.data_sync.serializers import DataSyncSerializer from baserow.contrib.database.api.data_sync.serializers import DataSyncSerializer
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.table.models import Table from baserow.contrib.database.table.models import Table
class TableImportConfiguration(serializers.Serializer):
"""
Additional table import configuration.
"""
upsert_fields = serializers.ListField(
child=serializers.IntegerField(min_value=1),
min_length=1,
allow_null=True,
allow_empty=True,
default=None,
help_text=lazy(
lambda: (
"A list of field IDs in the table used to generate a value for "
"identifying a row during the upsert process in file import. Each "
"field ID must reference an existing field in the table, which will "
"be used to match provided values against existing ones to determine "
"whether a row should be inserted or updated.\n "
"Field types that can be used in upsert fields: "
f"{','.join([f.type for f in field_type_registry.get_all() if f.can_upsert])}. "
"If specified, `upsert_values` should also be provided."
)
),
)
upsert_values = serializers.ListField(
allow_empty=True,
allow_null=True,
default=None,
child=serializers.ListField(
min_length=1,
),
help_text=(
"A list of values that are identifying rows in imported data.\n "
"The number of rows in `upsert_values` should be equal to the number of "
"rows in imported data. Each row in `upsert_values` should contain a "
"list of values that match the number and field types of fields selected "
"in `upsert_fields`. Based on `upsert_fields`, a similar upsert values "
"will be calculated for each row in the table.\n "
"There's no guarantee of uniqueness of row identification calculated from "
"`upsert_values` nor from the table. Repeated upsert values are compared "
"in order with matching values in the table. The imported data must be in "
"the same order as the table rows for correct matching."
),
)
def validate(self, attrs):
if attrs.get("upsert_fields") and not len(attrs.get("upsert_values") or []):
raise ValidationError(
{
"upsert_value": (
"upsert_values must not be empty "
"when upsert_fields are provided."
)
}
)
return attrs
class TableSerializer(serializers.ModelSerializer): class TableSerializer(serializers.ModelSerializer):
data_sync = DataSyncSerializer() data_sync = DataSyncSerializer()
@ -74,10 +136,26 @@ class TableImportSerializer(serializers.Serializer):
"for adding two rows to a table with two writable fields." "for adding two rows to a table with two writable fields."
), ),
) )
configuration = TableImportConfiguration(required=False, default=None)
class Meta: class Meta:
fields = ("data",) fields = ("data",)
def validate(self, attrs):
if attrs.get("configuration"):
if attrs["configuration"].get("upsert_values"):
if len(attrs["configuration"].get("upsert_values")) != len(
attrs["data"]
):
msg = (
"`data` and `configuration.upsert_values` "
"should have the same length."
)
raise ValidationError(
{"data": msg, "configuration": {"upsert_values": msg}}
)
return attrs
class TableUpdateSerializer(serializers.ModelSerializer): class TableUpdateSerializer(serializers.ModelSerializer):
class Meta: class Meta:

View file

@ -489,14 +489,14 @@ class AsyncTableImportView(APIView):
workspace=table.database.workspace, workspace=table.database.workspace,
context=table, context=table,
) )
configuration = data.get("configuration")
data = data["data"] data = data["data"]
file_import_job = JobHandler().create_and_start_job( file_import_job = JobHandler().create_and_start_job(
request.user, request.user,
"file_import", "file_import",
data=data, data=data,
table=table, table=table,
configuration=configuration,
) )
serializer = job_type_registry.get_serializer(file_import_job, JobSerializer) serializer = job_type_registry.get_serializer(file_import_job, JobSerializer)

View file

@ -412,6 +412,8 @@ class TextFieldType(CollationSortMixin, FieldType):
serializer_field_names = ["text_default"] serializer_field_names = ["text_default"]
_can_group_by = True _can_group_by = True
can_upsert = True
def get_serializer_field(self, instance, **kwargs): def get_serializer_field(self, instance, **kwargs):
required = kwargs.get("required", False) required = kwargs.get("required", False)
return serializers.CharField( return serializers.CharField(
@ -456,6 +458,7 @@ class LongTextFieldType(CollationSortMixin, FieldType):
model_class = LongTextField model_class = LongTextField
allowed_fields = ["long_text_enable_rich_text"] allowed_fields = ["long_text_enable_rich_text"]
serializer_field_names = ["long_text_enable_rich_text"] serializer_field_names = ["long_text_enable_rich_text"]
can_upsert = True
def check_can_group_by(self, field: Field, sort_type: str) -> bool: def check_can_group_by(self, field: Field, sort_type: str) -> bool:
return not field.long_text_enable_rich_text return not field.long_text_enable_rich_text
@ -570,6 +573,7 @@ class NumberFieldType(FieldType):
} }
_can_group_by = True _can_group_by = True
_db_column_fields = ["number_decimal_places"] _db_column_fields = ["number_decimal_places"]
can_upsert = True
def prepare_value_for_db(self, instance: NumberField, value): def prepare_value_for_db(self, instance: NumberField, value):
if value is None: if value is None:
@ -811,6 +815,7 @@ class RatingFieldType(FieldType):
serializer_field_names = ["max_value", "color", "style"] serializer_field_names = ["max_value", "color", "style"]
_can_group_by = True _can_group_by = True
_db_column_fields = [] _db_column_fields = []
can_upsert = True
def prepare_value_for_db(self, instance, value): def prepare_value_for_db(self, instance, value):
if not value: if not value:
@ -936,6 +941,7 @@ class BooleanFieldType(FieldType):
type = "boolean" type = "boolean"
model_class = BooleanField model_class = BooleanField
_can_group_by = True _can_group_by = True
can_upsert = True
def get_alter_column_prepare_new_value(self, connection, from_field, to_field): def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
""" """
@ -1025,6 +1031,7 @@ class DateFieldType(FieldType):
} }
_can_group_by = True _can_group_by = True
_db_column_fields = ["date_include_time"] _db_column_fields = ["date_include_time"]
can_upsert = True
def can_represent_date(self, field): def can_represent_date(self, field):
return True return True
@ -1931,6 +1938,7 @@ class DurationFieldType(FieldType):
serializer_field_names = ["duration_format"] serializer_field_names = ["duration_format"]
_can_group_by = True _can_group_by = True
_db_column_fields = [] _db_column_fields = []
can_upsert = True
def get_model_field(self, instance: DurationField, **kwargs): def get_model_field(self, instance: DurationField, **kwargs):
return DurationModelField(instance.duration_format, null=True, **kwargs) return DurationModelField(instance.duration_format, null=True, **kwargs)
@ -3483,6 +3491,7 @@ class LinkRowFieldType(
class EmailFieldType(CollationSortMixin, CharFieldMatchingRegexFieldType): class EmailFieldType(CollationSortMixin, CharFieldMatchingRegexFieldType):
type = "email" type = "email"
model_class = EmailField model_class = EmailField
can_upsert = True
@property @property
def regex(self): def regex(self):
@ -4742,6 +4751,7 @@ class PhoneNumberFieldType(CollationSortMixin, CharFieldMatchingRegexFieldType):
type = "phone_number" type = "phone_number"
model_class = PhoneNumberField model_class = PhoneNumberField
can_upsert = True
MAX_PHONE_NUMBER_LENGTH = 100 MAX_PHONE_NUMBER_LENGTH = 100

View file

@ -210,6 +210,12 @@ class FieldType(
some fields can depend on it like the `lookup` field. some fields can depend on it like the `lookup` field.
""" """
can_upsert = False
"""
A field of this type may be used to calculate a match value during import, that
allows to update existing rows with imported data instead of adding them.
"""
@property @property
def db_column_fields(self) -> Set[str]: def db_column_fields(self) -> Set[str]:
if self._db_column_fields is not None: if self._db_column_fields is not None:

View file

@ -26,6 +26,7 @@ from baserow.contrib.database.fields.exceptions import (
) )
from baserow.contrib.database.rows.actions import ImportRowsActionType from baserow.contrib.database.rows.actions import ImportRowsActionType
from baserow.contrib.database.rows.exceptions import ReportMaxErrorCountExceeded from baserow.contrib.database.rows.exceptions import ReportMaxErrorCountExceeded
from baserow.contrib.database.rows.types import FileImportDict
from baserow.contrib.database.table.actions import CreateTableActionType from baserow.contrib.database.table.actions import CreateTableActionType
from baserow.contrib.database.table.exceptions import ( from baserow.contrib.database.table.exceptions import (
InitialTableDataDuplicateName, InitialTableDataDuplicateName,
@ -91,6 +92,7 @@ class FileImportJobType(JobType):
filtered_dict = dict(**values) filtered_dict = dict(**values)
filtered_dict.pop("data") filtered_dict.pop("data")
filtered_dict.pop("configuration", None)
return filtered_dict return filtered_dict
def after_job_creation(self, job, values): def after_job_creation(self, job, values):
@ -99,7 +101,10 @@ class FileImportJobType(JobType):
""" """
data_file = ContentFile( data_file = ContentFile(
json.dumps(values["data"], ensure_ascii=False).encode("utf8") json.dumps(
{"data": values["data"], "configuration": values.get("configuration")},
ensure_ascii=False,
).encode("utf8")
) )
job.data_file.save(None, data_file) job.data_file.save(None, data_file)
@ -154,8 +159,7 @@ class FileImportJobType(JobType):
""" """
with job.data_file.open("r") as fin: with job.data_file.open("r") as fin:
data = json.load(fin) data: FileImportDict = json.load(fin)
try: try:
if job.table is None: if job.table is None:
new_table, error_report = action_type_registry.get_by_type( new_table, error_report = action_type_registry.get_by_type(
@ -164,7 +168,7 @@ class FileImportJobType(JobType):
job.user, job.user,
job.database, job.database,
name=job.name, name=job.name,
data=data, data=data["data"],
first_row_header=job.first_row_header, first_row_header=job.first_row_header,
progress=progress, progress=progress,
) )

View file

@ -66,7 +66,7 @@ class DatabasePlugin(Plugin):
["John", "Von Neumann", "", True], ["John", "Von Neumann", "", True],
["Blaise", "Pascal", "", True], ["Blaise", "Pascal", "", True],
] ]
row_handler.import_rows(user, table, data, send_realtime_update=False) row_handler.import_rows(user, table, data=data, send_realtime_update=False)
# Creating the example projects table. # Creating the example projects table.
table = table_handler.create_table_and_fields( table = table_handler.create_table_and_fields(
@ -86,4 +86,4 @@ class DatabasePlugin(Plugin):
[_("Computer architecture"), str(date(1945, 1, 1)), False], [_("Computer architecture"), str(date(1945, 1, 1)), False],
[_("Cellular Automata"), str(date(1952, 6, 1)), False], [_("Cellular Automata"), str(date(1952, 6, 1)), False],
] ]
row_handler.import_rows(user, table, data, send_realtime_update=False) row_handler.import_rows(user, table, data=data, send_realtime_update=False)

View file

@ -95,7 +95,9 @@ def load_test_data():
("Rabbit", select_by_name["Meat"], fake.sentence(nb_words=10)), ("Rabbit", select_by_name["Meat"], fake.sentence(nb_words=10)),
] ]
RowHandler().import_rows(user, products_table, data, send_realtime_update=False) RowHandler().import_rows(
user, products_table, data=data, send_realtime_update=False
)
try: try:
suppliers_table = Table.objects.get(name="Suppliers", database=database) suppliers_table = Table.objects.get(name="Suppliers", database=database)
@ -195,7 +197,7 @@ def load_test_data():
] ]
RowHandler().import_rows( RowHandler().import_rows(
user, suppliers_table, data, send_realtime_update=False user, suppliers_table, data=data, send_realtime_update=False
) )
try: try:
@ -253,7 +255,7 @@ def load_test_data():
] ]
RowHandler().import_rows( RowHandler().import_rows(
user, retailers_table, data, send_realtime_update=False user, retailers_table, data=data, send_realtime_update=False
) )
try: try:
@ -358,5 +360,5 @@ def load_test_data():
] ]
RowHandler().import_rows( RowHandler().import_rows(
user, user_accounts_table, data, send_realtime_update=False user, user_accounts_table, data=data, send_realtime_update=False
) )

View file

@ -6,6 +6,8 @@ from typing import Any, Dict, List, Optional, Tuple, Type
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from loguru import logger
from baserow.contrib.database.action.scopes import ( from baserow.contrib.database.action.scopes import (
TABLE_ACTION_CONTEXT, TABLE_ACTION_CONTEXT,
TableActionScopeType, TableActionScopeType,
@ -18,6 +20,7 @@ from baserow.contrib.database.rows.handler import (
GeneratedTableModelForUpdate, GeneratedTableModelForUpdate,
RowHandler, RowHandler,
) )
from baserow.contrib.database.rows.types import FileImportDict
from baserow.contrib.database.table.handler import TableHandler from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.table.models import GeneratedTableModel, Table from baserow.contrib.database.table.models import GeneratedTableModel, Table
from baserow.core.action.models import Action from baserow.core.action.models import Action
@ -178,13 +181,17 @@ class CreateRowsActionType(UndoableActionType):
"Can't create rows because it has a data sync." "Can't create rows because it has a data sync."
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values, user,
before_row=before_row, table,
model=model, rows_values,
send_webhook_events=send_webhook_events, before_row=before_row,
model=model,
send_webhook_events=send_webhook_events,
)
.created_rows
) )
workspace = table.database.workspace workspace = table.database.workspace
@ -244,7 +251,7 @@ class ImportRowsActionType(UndoableActionType):
cls, cls,
user: AbstractUser, user: AbstractUser,
table: Table, table: Table,
data=List[List[Any]], data: FileImportDict,
progress: Optional[Progress] = None, progress: Optional[Progress] = None,
) -> Tuple[List[GeneratedTableModel], Dict[str, Any]]: ) -> Tuple[List[GeneratedTableModel], Dict[str, Any]]:
""" """
@ -270,9 +277,14 @@ class ImportRowsActionType(UndoableActionType):
) )
created_rows, error_report = RowHandler().import_rows( created_rows, error_report = RowHandler().import_rows(
user, table, data, progress=progress user,
table,
data=data["data"],
configuration=data.get("configuration") or {},
progress=progress,
) )
if error_report:
logger.warning(f"Errors during rows import: {error_report}")
workspace = table.database.workspace workspace = table.database.workspace
params = cls.Params( params = cls.Params(
table.id, table.id,

View file

@ -36,3 +36,12 @@ class CannotDeleteRowsInTable(Exception):
""" """
Raised when it's not possible to delete rows in the table. Raised when it's not possible to delete rows in the table.
""" """
class InvalidRowLength(Exception):
"""
Row's length doesn't match expected length based on schema.
"""
def __init__(self, row_idx: int):
self.row_idx = row_idx

View file

@ -1,14 +1,13 @@
from collections import defaultdict from collections import defaultdict
from copy import deepcopy from copy import deepcopy
from decimal import Decimal from decimal import Decimal
from functools import cached_property
from typing import ( from typing import (
TYPE_CHECKING, TYPE_CHECKING,
Any, Any,
Dict, Dict,
Iterable, Iterable,
List, List,
NamedTuple,
NewType,
Optional, Optional,
Set, Set,
Tuple, Tuple,
@ -17,24 +16,37 @@ from typing import (
cast, cast,
) )
from django import db
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.db import connection, transaction from django.db import connection, transaction
from django.db.models import Field as DjangoField
from django.db.models import Model, QuerySet, Window from django.db.models import Model, QuerySet, Window
from django.db.models.expressions import RawSQL from django.db.models.expressions import RawSQL
from django.db.models.fields.related import ForeignKey, ManyToManyField from django.db.models.fields.related import ForeignKey, ManyToManyField
from django.db.models.functions import RowNumber from django.db.models.functions import RowNumber
from django.utils.encoding import force_str from django.utils.encoding import force_str
from celery.utils import chunks
from opentelemetry import metrics, trace from opentelemetry import metrics, trace
from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler
from baserow.contrib.database.fields.dependencies.update_collector import ( from baserow.contrib.database.fields.dependencies.update_collector import (
FieldUpdateCollector, FieldUpdateCollector,
) )
from baserow.contrib.database.fields.exceptions import (
FieldNotInTable,
IncompatibleField,
)
from baserow.contrib.database.fields.field_cache import FieldCache from baserow.contrib.database.fields.field_cache import FieldCache
from baserow.contrib.database.fields.registries import field_type_registry from baserow.contrib.database.fields.registries import FieldType, field_type_registry
from baserow.contrib.database.fields.utils import get_field_id_from_field_key from baserow.contrib.database.fields.utils import get_field_id_from_field_key
from baserow.contrib.database.search.handler import SearchHandler
from baserow.contrib.database.table.constants import (
CREATED_BY_COLUMN_NAME,
LAST_MODIFIED_BY_COLUMN_NAME,
ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME,
)
from baserow.contrib.database.table.models import GeneratedTableModel, Table from baserow.contrib.database.table.models import GeneratedTableModel, Table
from baserow.contrib.database.table.operations import ( from baserow.contrib.database.table.operations import (
CreateRowDatabaseTableOperationType, CreateRowDatabaseTableOperationType,
@ -49,20 +61,15 @@ from baserow.core.db import (
) )
from baserow.core.exceptions import CannotCalculateIntermediateOrder from baserow.core.exceptions import CannotCalculateIntermediateOrder
from baserow.core.handler import CoreHandler from baserow.core.handler import CoreHandler
from baserow.core.psycopg import sql
from baserow.core.telemetry.utils import baserow_trace_methods from baserow.core.telemetry.utils import baserow_trace_methods
from baserow.core.trash.handler import TrashHandler from baserow.core.trash.handler import TrashHandler
from baserow.core.trash.registries import trash_item_type_registry from baserow.core.trash.registries import trash_item_type_registry
from baserow.core.utils import Progress, get_non_unique_values, grouper from baserow.core.utils import Progress, get_non_unique_values, grouper
from ..search.handler import SearchHandler
from ..table.constants import (
CREATED_BY_COLUMN_NAME,
LAST_MODIFIED_BY_COLUMN_NAME,
ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME,
)
from .constants import ROW_IMPORT_CREATION, ROW_IMPORT_VALIDATION from .constants import ROW_IMPORT_CREATION, ROW_IMPORT_VALIDATION
from .error_report import RowErrorReport from .error_report import RowErrorReport
from .exceptions import RowDoesNotExist, RowIdsNotUnique from .exceptions import InvalidRowLength, RowDoesNotExist, RowIdsNotUnique
from .operations import ( from .operations import (
DeleteDatabaseRowOperationType, DeleteDatabaseRowOperationType,
MoveRowDatabaseRowOperationType, MoveRowDatabaseRowOperationType,
@ -77,19 +84,23 @@ from .signals import (
rows_deleted, rows_deleted,
rows_updated, rows_updated,
) )
from .types import (
CreatedRowsData,
FieldsMetadata,
FileImportConfiguration,
GeneratedTableModelForUpdate,
RowId,
RowsForUpdate,
UpdatedRowsData,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from django.db.backends.utils import CursorWrapper
from baserow.contrib.database.fields.models import Field from baserow.contrib.database.fields.models import Field
tracer = trace.get_tracer(__name__) tracer = trace.get_tracer(__name__)
GeneratedTableModelForUpdate = NewType(
"GeneratedTableModelForUpdate", GeneratedTableModel
)
RowsForUpdate = NewType("RowsForUpdate", QuerySet)
BATCH_SIZE = 1024 BATCH_SIZE = 1024
meter = metrics.get_meter(__name__) meter = metrics.get_meter(__name__)
@ -139,29 +150,18 @@ def prepare_field_errors(field_errors):
} }
FieldsMetadata = NewType("FieldsMetadata", Dict[str, Any])
RowValues = NewType("RowValues", Dict[str, Any])
RowId = NewType("RowId", int)
class UpdatedRowsWithOldValuesAndMetadata(NamedTuple):
updated_rows: List[GeneratedTableModelForUpdate]
original_rows_values_by_id: Dict[RowId, RowValues]
updated_fields_metadata_by_row_id: Dict[RowId, FieldsMetadata]
class RowM2MChangeTracker: class RowM2MChangeTracker:
def __init__(self): def __init__(self):
self._deleted_m2m_rels: Dict[ self._deleted_m2m_rels: Dict[
str, Dict["Field", Dict[GeneratedTableModel, Set[int]]] str, Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]
] = defaultdict(lambda: defaultdict(lambda: defaultdict(set))) ] = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
self._created_m2m_rels: Dict[ self._created_m2m_rels: Dict[
str, Dict["Field", Dict[GeneratedTableModel, Set[int]]] str, Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]
] = defaultdict(lambda: defaultdict(lambda: defaultdict(set))) ] = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
def track_m2m_update_for_field_and_row( def track_m2m_update_for_field_and_row(
self, self,
field: "Field", field: "DjangoField",
field_name: str, field_name: str,
row: GeneratedTableModel, row: GeneratedTableModel,
new_values: Iterable[int], new_values: Iterable[int],
@ -181,7 +181,7 @@ class RowM2MChangeTracker:
def track_m2m_created_for_new_row( def track_m2m_created_for_new_row(
self, self,
row: GeneratedTableModel, row: GeneratedTableModel,
field: "Field", field: "DjangoField",
new_values: Iterable[Union[int, Model]], new_values: Iterable[Union[int, Model]],
): ):
field_type = field_type_registry.get_by_model(field) field_type = field_type_registry.get_by_model(field)
@ -197,7 +197,7 @@ class RowM2MChangeTracker:
def get_created_m2m_rels_per_field_for_type( def get_created_m2m_rels_per_field_for_type(
self, field_type self, field_type
) -> Dict["Field", Dict[GeneratedTableModel, Set[int]]]: ) -> Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]:
return self._created_m2m_rels[field_type] return self._created_m2m_rels[field_type]
def get_deleted_link_row_rels_for_update_collector( def get_deleted_link_row_rels_for_update_collector(
@ -1021,7 +1021,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
updated_field_ids: Set[int], updated_field_ids: Set[int],
m2m_change_tracker: Optional[RowM2MChangeTracker] = None, m2m_change_tracker: Optional[RowM2MChangeTracker] = None,
skip_search_updates: bool = False, skip_search_updates: bool = False,
) -> List["Field"]: ) -> List["DjangoField"]:
""" """
Prepares a list of fields that are dependent on the updated fields and updates Prepares a list of fields that are dependent on the updated fields and updates
them. them.
@ -1088,7 +1088,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
send_webhook_events: bool = True, send_webhook_events: bool = True,
generate_error_report: bool = False, generate_error_report: bool = False,
skip_search_update: bool = False, skip_search_update: bool = False,
) -> List[GeneratedTableModel]: ) -> CreatedRowsData:
""" """
Creates new rows for a given table without checking permissions. It also calls Creates new rows for a given table without checking permissions. It also calls
the rows_created signal. the rows_created signal.
@ -1223,9 +1223,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
dependant_fields=dependant_fields, dependant_fields=dependant_fields,
) )
if generate_error_report: return CreatedRowsData(rows_to_return, report)
return inserted_rows, report
return rows_to_return
def create_rows( def create_rows(
self, self,
@ -1238,7 +1236,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
send_webhook_events: bool = True, send_webhook_events: bool = True,
generate_error_report: bool = False, generate_error_report: bool = False,
skip_search_update: bool = False, skip_search_update: bool = False,
) -> List[GeneratedTableModel]: ) -> CreatedRowsData:
""" """
Creates new rows for a given table if the user Creates new rows for a given table if the user
belongs to the related workspace. It also calls the rows_created signal. belongs to the related workspace. It also calls the rows_created signal.
@ -1289,7 +1287,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
self, self,
model: Type[GeneratedTableModel], model: Type[GeneratedTableModel],
created_rows: List[GeneratedTableModel], created_rows: List[GeneratedTableModel],
) -> List["Field"]: ) -> List["DjangoField"]:
""" """
Generates a list of dependant fields that need to be updated after the rows have Generates a list of dependant fields that need to be updated after the rows have
been created and updates them. been created and updates them.
@ -1443,11 +1441,11 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
return report return report
def create_rows_by_batch( def force_create_rows_by_batch(
self, self,
user: AbstractUser, user: AbstractUser,
table: Table, table: Table,
rows: List[Dict[str, Any]], rows_values: List[Dict[str, Any]],
progress: Optional[Progress] = None, progress: Optional[Progress] = None,
model: Optional[Type[GeneratedTableModel]] = None, model: Optional[Type[GeneratedTableModel]] = None,
) -> Tuple[List[GeneratedTableModel], Dict[str, Dict[str, Any]]]: ) -> Tuple[List[GeneratedTableModel], Dict[str, Dict[str, Any]]]:
@ -1457,13 +1455,13 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
:param user: The user of whose behalf the rows are created. :param user: The user of whose behalf the rows are created.
:param table: The table for which the rows should be created. :param table: The table for which the rows should be created.
:param rows: List of rows values for rows that need to be created. :param rows_values: List of rows values for rows that need to be created.
:param progress: Give a progress instance to track the progress of the import. :param progress: Give a progress instance to track the progress of the import.
:param model: Optional model to prevent recomputing table model. :param model: Optional model to prevent recomputing table model.
:return: The created rows and the error report. :return: The created rows and the error report.
""" """
if not rows: if not rows_values:
return [], {} return [], {}
if progress: if progress:
@ -1474,7 +1472,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
report = {} report = {}
all_created_rows = [] all_created_rows = []
for count, chunk in enumerate(grouper(BATCH_SIZE, rows)): for count, chunk in enumerate(grouper(BATCH_SIZE, rows_values)):
row_start_index = count * BATCH_SIZE row_start_index = count * BATCH_SIZE
created_rows, creation_report = self.create_rows( created_rows, creation_report = self.create_rows(
user=user, user=user,
@ -1503,11 +1501,64 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
return all_created_rows, report return all_created_rows, report
def force_update_rows_by_batch(
self,
user: AbstractUser,
table: Table,
rows_values: List[Dict[str, Any]],
progress: Progress,
model: Optional[Type[GeneratedTableModel]] = None,
) -> Tuple[List[Dict[str, Any] | None], Dict[str, Dict[str, Any]]]:
"""
Creates rows by batch and generates an error report instead of failing on first
error.
:param user: The user of whose behalf the rows are created.
:param table: The table for which the rows should be created.
:param rows_values: List of rows values for rows that need to be created.
:param progress: Give a progress instance to track the progress of the import.
:param model: Optional model to prevent recomputing table model.
:return: The created rows and the error report.
"""
if not rows_values:
return [], {}
progress.increment(state=ROW_IMPORT_CREATION)
if model is None:
model = table.get_model()
report = {}
all_updated_rows = []
for count, chunk in enumerate(grouper(BATCH_SIZE, rows_values)):
updated_rows = self.force_update_rows(
user=user,
table=table,
model=model,
rows_values=chunk,
send_realtime_update=False,
send_webhook_events=False,
# Don't trigger loads of search updates for every batch of rows we
# create but instead a single one for this entire table at the end.
skip_search_update=True,
generate_error_report=True,
)
if progress:
progress.increment(len(chunk))
report.update(updated_rows.errors)
all_updated_rows.extend(updated_rows.updated_rows)
SearchHandler.field_value_updated_or_created(table)
return all_updated_rows, report
def import_rows( def import_rows(
self, self,
user: AbstractUser, user: AbstractUser,
table: Table, table: Table,
data: List[List[Any]], data: list[list[Any]],
configuration: FileImportConfiguration | None = None,
validate: bool = True, validate: bool = True,
progress: Optional[Progress] = None, progress: Optional[Progress] = None,
send_realtime_update: bool = True, send_realtime_update: bool = True,
@ -1523,12 +1574,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
:param user: The user of whose behalf the rows are created. :param user: The user of whose behalf the rows are created.
:param table: The table for which the rows should be created. :param table: The table for which the rows should be created.
:param data: List of rows values for rows that need to be created. :param data: List of rows values for rows that need to be created.
:param configuration: Optional import configuration dict.
:param validate: If True the data are validated before the import. :param validate: If True the data are validated before the import.
:param progress: Give a progress instance to track the progress of the :param progress: Give a progress instance to track the progress of the
import. import.
:param send_realtime_update: The parameter passed to the rows_created :param send_realtime_update: The parameter passed to the rows_created
signal indicating if a realtime update should be send. signal indicating if a realtime update should be send.
:raises InvalidRowLength:
:return: The created row instances and the error report. :return: The created row instances and the error report.
""" """
@ -1541,6 +1595,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
) )
error_report = RowErrorReport(data) error_report = RowErrorReport(data)
configuration = configuration or {}
update_handler = UpsertRowsMappingHandler(
table=table,
upsert_fields=configuration.get("upsert_fields") or [],
upsert_values=configuration.get("upsert_values") or [],
)
# Pre-run upsert configuration validation.
# Can raise InvalidRowLength
update_handler.validate()
model = table.get_model() model = table.get_model()
@ -1605,10 +1668,40 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
else None else None
) )
created_rows, creation_report = self.create_rows_by_batch( # split rows to insert and update lists. If there's no upsert field selected,
user, table, valid_rows, progress=creation_sub_progress, model=model # this will not populate rows_values_to_update.
update_map = update_handler.process_map
rows_values_to_create = []
rows_values_to_update = []
if update_map:
for current_idx, import_idx in original_row_index_mapping.items():
row = valid_rows[current_idx]
if update_idx := update_map.get(import_idx):
row["id"] = update_idx
rows_values_to_update.append(row)
else:
rows_values_to_create.append(row)
else:
rows_values_to_create = valid_rows
created_rows, creation_report = self.force_create_rows_by_batch(
user,
table,
rows_values_to_create,
progress=creation_sub_progress,
model=model,
) )
if rows_values_to_update:
updated_rows, updated_report = self.force_update_rows_by_batch(
user,
table,
rows_values_to_update,
progress=creation_sub_progress,
model=model,
)
# Add errors to global report # Add errors to global report
for index, error in creation_report.items(): for index, error in creation_report.items():
error_report.add_error( error_report.add_error(
@ -1616,6 +1709,13 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
error, error,
) )
if rows_values_to_update:
for index, error in updated_report.items():
error_report.add_error(
original_row_index_mapping[int(index)],
error,
)
if send_realtime_update: if send_realtime_update:
# Just send a single table_updated here as realtime update instead # Just send a single table_updated here as realtime update instead
# of rows_created because we might import a lot of rows. # of rows_created because we might import a lot of rows.
@ -1626,7 +1726,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
def get_fields_metadata_for_row_history( def get_fields_metadata_for_row_history(
self, self,
row: GeneratedTableModelForUpdate, row: GeneratedTableModelForUpdate,
updated_fields: List["Field"], updated_fields: List["DjangoField"],
metadata, metadata,
) -> FieldsMetadata: ) -> FieldsMetadata:
""" """
@ -1648,7 +1748,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
def get_fields_metadata_for_rows( def get_fields_metadata_for_rows(
self, self,
rows: List[GeneratedTableModelForUpdate], rows: List[GeneratedTableModelForUpdate],
updated_fields: List["Field"], updated_fields: List["DjangoField"],
fields_metadata_by_row_id=None, fields_metadata_by_row_id=None,
) -> Dict[RowId, FieldsMetadata]: ) -> Dict[RowId, FieldsMetadata]:
""" """
@ -1684,7 +1784,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
send_realtime_update: bool = True, send_realtime_update: bool = True,
send_webhook_events: bool = True, send_webhook_events: bool = True,
skip_search_update: bool = False, skip_search_update: bool = False,
) -> UpdatedRowsWithOldValuesAndMetadata: generate_error_report: bool = False,
) -> UpdatedRowsData:
""" """
Updates field values in batch based on provided rows with the new Updates field values in batch based on provided rows with the new
values. values.
@ -1704,6 +1805,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
:param skip_search_update: If you want to instead trigger the search handler :param skip_search_update: If you want to instead trigger the search handler
cells update later on after many create_rows calls then set this to True cells update later on after many create_rows calls then set this to True
but make sure you trigger it eventually. but make sure you trigger it eventually.
:param generate_error_report: Generate error report if set to True.
:raises RowIdsNotUnique: When trying to update the same row multiple :raises RowIdsNotUnique: When trying to update the same row multiple
times. times.
:raises RowDoesNotExist: When any of the rows don't exist. :raises RowDoesNotExist: When any of the rows don't exist.
@ -1716,9 +1818,12 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
user_id = user and user.id user_id = user and user.id
prepared_rows_values, _ = self.prepare_rows_in_bulk( prepared_rows_values, errors = self.prepare_rows_in_bulk(
model._field_objects, rows_values model._field_objects,
rows_values,
generate_error_report=generate_error_report,
) )
report = {index: err for index, err in errors.items()}
row_ids = [r["id"] for r in prepared_rows_values] row_ids = [r["id"] for r in prepared_rows_values]
non_unique_ids = get_non_unique_values(row_ids) non_unique_ids = get_non_unique_values(row_ids)
@ -1924,13 +2029,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
fields_metadata_by_row_id = self.get_fields_metadata_for_rows( fields_metadata_by_row_id = self.get_fields_metadata_for_rows(
updated_rows_to_return, updated_fields, fields_metadata_by_row_id updated_rows_to_return, updated_fields, fields_metadata_by_row_id
) )
updated_rows = UpdatedRowsData(
return UpdatedRowsWithOldValuesAndMetadata(
updated_rows_to_return, updated_rows_to_return,
original_row_values_by_id, original_row_values_by_id,
fields_metadata_by_row_id, fields_metadata_by_row_id,
report,
) )
return updated_rows
def update_rows( def update_rows(
self, self,
user: AbstractUser, user: AbstractUser,
@ -1941,7 +2048,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
send_realtime_update: bool = True, send_realtime_update: bool = True,
send_webhook_events: bool = True, send_webhook_events: bool = True,
skip_search_update: bool = False, skip_search_update: bool = False,
) -> UpdatedRowsWithOldValuesAndMetadata: generate_error_report: bool = False,
) -> UpdatedRowsData:
""" """
Updates field values in batch based on provided rows with the new Updates field values in batch based on provided rows with the new
values. values.
@ -1984,6 +2092,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
send_realtime_update, send_realtime_update,
send_webhook_events, send_webhook_events,
skip_search_update, skip_search_update,
generate_error_report=generate_error_report,
) )
def get_rows( def get_rows(
@ -2436,3 +2545,233 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
self, self,
table=table, table=table,
) )
def merge_values_expression(
row: list[str | int | float | None],
field_handlers: "list[UpsertFieldHandler]",
query_params: list,
) -> sql.Composable:
"""
Create a sql expression that will produce text value from a list of row values. Any
value, that should be interpolated, will be added to provided `query_params` list.
:param row: a list of values in a row
:param field_handlers: a list of field types for a row. The number of handlers
should equal the number of values in a row.
:param query_params: param values container
:return:
"""
fields = []
for val, field_handler in zip(row, field_handlers):
fields.append(field_handler.get_field_concat_expression())
query_params.append(field_handler.prepare_value(val))
return UpsertRowsMappingHandler.SEPARATOR.join(fields)
class UpsertFieldHandler:
"""
Helper class to handle field's upsert handling.
"""
def __init__(self, table: Table, field_id: id):
self.table = table
# TODO: here we are using field id, but it may be so the field_id
# is `'id'` string.
try:
self._field_def = field_def = next(
(
f
for f in table.get_model().get_field_objects()
if f["field"].id == field_id
)
)
except StopIteration:
raise FieldNotInTable(field_id)
self.field: Field = field_def["field"]
self.field_type: FieldType = field_def["type"]
if not self.field_type.can_upsert:
raise IncompatibleField(self.field.id)
self.field_name = self.field.db_column
def prepare_value(self, value: str) -> Any:
return self.field_type.prepare_value_for_db(self.field, value)
def get_field_concat_expression(self) -> sql.Composable:
column_type = sql.SQL(self.get_column_type() or "text")
return sql.SQL(" COALESCE(CAST({}::{} AS TEXT), '<NULL>')::TEXT ").format(
sql.Placeholder(), column_type
)
def get_column_type(self) -> str | None:
table_field: DjangoField = self.field_type.get_model_field(self.field)
return table_field.db_type(db.connection)
class UpsertRowsMappingHandler:
"""
Helper class for mapping new rows values to existing table rows during an upsert
operation.
This class processes upsert values from the provided data and matches them with
existing row IDs in the database. The resulting mapping helps determine which
imported rows should update existing ones.
### Usage:
>>> importrows = ImportRowsMappingHandler(table, [1234], [['a'], ['b']])
# Returns a dictionary where:
# - Keys represent the index of the upsert values in the imported dataset.
# - Values represent the corresponding row ID in the database.
>>> importrows.process_map
{0: 1, 1: 2}
# In this example:
# - The first imported value ['a'] (index 0) corresponds to the row with ID 1.
# - The second imported value ['b'] (index 1) corresponds to the row with ID 2.
"""
SEPARATOR = sql.SQL(" || '__-__' || ")
PER_CHUNK = 100
def __init__(
self, table: Table, upsert_fields: list[int], upsert_values: list[list[Any]]
):
self.table = table
self.table_name = table.get_database_table_name()
self.import_fields = [UpsertFieldHandler(table, fidx) for fidx in upsert_fields]
self.upsert_values = upsert_values
def validate(self):
"""
Validates if upsert configuration conforms formal requirements
:raises InvalidRowLength:
"""
expected_length = len(self.import_fields)
for ridx, uval in enumerate(self.upsert_values):
if len(uval) != expected_length:
raise InvalidRowLength(ridx)
@cached_property
def process_map(self) -> dict[int, int]:
"""
Calculates a map between import row indexes and table row ids.
"""
# no upsert value fields, no need for mapping
if not self.import_fields:
return {}
script_template = sql.SQL(
"""
CREATE TEMP TABLE table_upsert_indexes (id INT, upsert_value TEXT, group_index INT);
CREATE TEMP TABLE table_import (id INT, upsert_value TEXT);
CREATE TEMP VIEW table_import_indexes AS
SELECT id, upsert_value, RANK()
OVER (PARTITION BY upsert_value ORDER BY id, upsert_value )
AS group_index
FROM table_import ORDER BY id ;
"""
)
self.execute(script_template)
self.insert_table_values()
self.insert_imported_values()
# this is just a list of pairs, not very usable.
calculated = self.calculate_map()
# map import row idx -> update row_id in table
return {r[1]: r[0] for r in calculated}
@cached_property
def connection(self):
return db.connection
@cached_property
def cursor(self):
return self.connection.cursor()
def execute(self, query, *args, **kwargs) -> "CursorWrapper":
self.cursor.execute(query, *args, **kwargs)
return self.cursor
def insert_table_values(self):
"""
Populates temp upsert comparison table with values from an exsisting table.
Values from multiple source columns will be normalized to one text value.
"""
columns = self.SEPARATOR.join(
[
sql.SQL("COALESCE(CAST({} AS TEXT), '<NULL>')::TEXT").format(
sql.Identifier(field.field_name)
)
for field in self.import_fields
]
)
query = sql.SQL(
"""WITH subq AS (SELECT r.id, {} AS upsert_value FROM {} r WHERE NOT trashed)
INSERT INTO table_upsert_indexes (id, upsert_value, group_index)
SELECT id, upsert_value, RANK()
OVER (PARTITION BY upsert_value ORDER BY id, upsert_value )
AS group_index
FROM subq ORDER BY id """
).format(
columns, sql.Identifier(self.table_name)
) # nosec B608
self.execute(query)
def insert_imported_values(self):
"""
Builds and executes bulk insert queries for upsert comparison values
from import data.
"""
for _chunk in chunks(enumerate(self.upsert_values), self.PER_CHUNK):
# put all params (processed values) for the query into a container
query_params = []
rows_query = []
for rowidx, row in _chunk:
# per-row insert query
query_params.append(rowidx)
row_to_add = sql.SQL("({}, {})").format(
sql.Placeholder(),
merge_values_expression(row, self.import_fields, query_params),
)
rows_query.append(row_to_add)
rows_placeholder = sql.SQL(",\n").join(rows_query)
script_template = sql.SQL(
"INSERT INTO table_import (id, upsert_value) VALUES {};"
).format(
rows_placeholder
) # nosec B608
self.execute(script_template, query_params)
def calculate_map(self) -> list[tuple[int, int]]:
"""
Calculates a map between imported row index -> table row id
that can be used to detect if a row that is imported should be updated
(mapping exists) or inserted as a new one.
"""
q = sql.SQL(
"""
SELECT t.id, i.id
FROM table_upsert_indexes t
JOIN table_import_indexes i
ON (i.upsert_value = t.upsert_value
AND i.group_index = t.group_index);
"""
)
return self.execute(q).fetchall()

View file

@ -0,0 +1,39 @@
import typing
from typing import Any, NamedTuple, NewType
from django.db.models import QuerySet
from baserow.contrib.database.table.models import GeneratedTableModel
GeneratedTableModelForUpdate = NewType(
"GeneratedTableModelForUpdate", GeneratedTableModel
)
RowsForUpdate = NewType("RowsForUpdate", QuerySet)
class FileImportConfiguration(typing.TypedDict):
upsert_fields: list[int]
upsert_values: list[list[typing.Any]]
class FileImportDict(typing.TypedDict):
data: list[list[typing.Any]]
configuration: FileImportConfiguration | None
FieldsMetadata = NewType("FieldsMetadata", dict[str, Any])
RowValues = NewType("RowValues", dict[str, Any])
RowId = NewType("RowId", int)
class UpdatedRowsData(NamedTuple):
updated_rows: list[GeneratedTableModelForUpdate]
original_rows_values_by_id: dict[RowId, RowValues]
updated_fields_metadata_by_row_id: dict[RowId, FieldsMetadata]
errors: dict[int, dict[str, Any]] | None = None
class CreatedRowsData(NamedTuple):
created_rows: list[GeneratedTableModel]
errors: dict[int, dict[str, Any]] | None = None

View file

@ -486,7 +486,11 @@ class TableHandler(metaclass=baserow_trace_methods(tracer)):
table = self.create_table_and_fields(user, database, name, fields) table = self.create_table_and_fields(user, database, name, fields)
_, error_report = RowHandler().import_rows( _, error_report = RowHandler().import_rows(
user, table, data, progress=progress, send_realtime_update=False user,
table,
data=data,
progress=progress,
send_realtime_update=False,
) )
table_created.send(self, table=table, user=user) table_created.send(self, table=table, user=user)

View file

@ -64,7 +64,6 @@ def run_async_job(self, job_id: int):
job.set_state_failed(str(e), error) job.set_state_failed(str(e), error)
job.save() job.save()
raise raise
finally: finally:
# Delete the import job cached entry because the transaction has been committed # Delete the import job cached entry because the transaction has been committed

View file

@ -33,6 +33,7 @@ class FileImportFixtures:
for field_index in range(column_count): for field_index in range(column_count):
row.append(f"data_{index}_{field_index}") row.append(f"data_{index}_{field_index}")
data.append(row) data.append(row)
data = {"data": data}
else: else:
data = kwargs.pop("data") data = kwargs.pop("data")

View file

@ -78,7 +78,7 @@ class RowFixture:
for row in rows for row in rows
], ],
) )
return created_rows return created_rows.created_rows
def get_rows(self, fields: List[Field]) -> List[List[Any]]: def get_rows(self, fields: List[Field]) -> List[List[Any]]:
model = fields[0].table.get_model() model = fields[0].table.get_model()

View file

@ -57,16 +57,20 @@ class TableFixtures:
) )
) )
if rows: if rows:
created_rows = RowHandler().force_create_rows( created_rows = (
user=user, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=user,
{ table=table,
f"field_{field.id}": row[index] rows_values=[
for index, field in enumerate(fields) {
} f"field_{field.id}": row[index]
for row in rows for index, field in enumerate(fields)
], }
for row in rows
],
)
.created_rows
) )
else: else:
created_rows = [] created_rows = []

View file

@ -318,7 +318,7 @@ def setup_interesting_test_table(
blank_row, row = row_handler.force_create_rows( blank_row, row = row_handler.force_create_rows(
user, table, [{}, row_values], model=model user, table, [{}, row_values], model=model
) ).created_rows
# Setup the link rows # Setup the link rows
linked_row_1, linked_row_2, linked_row_3 = row_handler.force_create_rows( linked_row_1, linked_row_2, linked_row_3 = row_handler.force_create_rows(
@ -337,7 +337,7 @@ def setup_interesting_test_table(
link_table_primary_text_field.db_column: "", link_table_primary_text_field.db_column: "",
}, },
], ],
) ).created_rows
linked_row_4, linked_row_5, linked_row_6 = row_handler.force_create_rows( linked_row_4, linked_row_5, linked_row_6 = row_handler.force_create_rows(
user=user, user=user,
table=decimal_link_table, table=decimal_link_table,
@ -352,7 +352,7 @@ def setup_interesting_test_table(
decimal_table_primary_decimal_field.db_column: None, decimal_table_primary_decimal_field.db_column: None,
}, },
], ],
) ).created_rows
with freeze_time("2020-01-01 12:00"): with freeze_time("2020-01-01 12:00"):
user_file_1 = data_fixture.create_user_file( user_file_1 = data_fixture.create_user_file(
original_name=f"name{file_suffix}.txt", original_name=f"name{file_suffix}.txt",
@ -372,7 +372,7 @@ def setup_interesting_test_table(
file_link_table_primary_file_field.db_column: None, file_link_table_primary_file_field.db_column: None,
}, },
], ],
) ).created_rows
link_row_9, link_row_10 = row_handler.force_create_rows( link_row_9, link_row_10 = row_handler.force_create_rows(
user=user, user=user,
table=multiple_collaborators_link_table, table=multiple_collaborators_link_table,
@ -389,7 +389,7 @@ def setup_interesting_test_table(
], ],
}, },
], ],
) ).created_rows
link_row_field_id = name_to_field_id["link_row"] link_row_field_id = name_to_field_id["link_row"]
link_row_field_without_related_id = name_to_field_id["link_row_without_related"] link_row_field_without_related_id = name_to_field_id["link_row_without_related"]

View file

@ -712,17 +712,21 @@ def test_dispatch_local_baserow_upsert_row_workflow_action_with_unmatching_index
], ],
) )
field = table.field_set.get() field = table.field_set.get()
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "Community Engagement"}, table,
{f"field_{field.id}": "Construction"}, rows_values=[
{f"field_{field.id}": "Complex Construction Design"}, {f"field_{field.id}": "Community Engagement"},
{f"field_{field.id}": "Simple Construction Design"}, {f"field_{field.id}": "Construction"},
{f"field_{field.id}": "Landscape Design"}, {f"field_{field.id}": "Complex Construction Design"},
{f"field_{field.id}": "Infrastructure Design"}, {f"field_{field.id}": "Simple Construction Design"},
], {f"field_{field.id}": "Landscape Design"},
{f"field_{field.id}": "Infrastructure Design"},
],
)
.created_rows
) )
builder = data_fixture.create_builder_application(workspace=workspace) builder = data_fixture.create_builder_application(workspace=workspace)

View file

@ -3270,14 +3270,18 @@ def test_get_row_adjacent(api_client, data_fixture):
table = data_fixture.create_database_table(name="table", user=user) table = data_fixture.create_database_table(name="table", user=user)
field = data_fixture.create_text_field(name="some name", table=table) field = data_fixture.create_text_field(name="some name", table=table)
[row_1, row_2, row_3] = RowHandler().create_rows( [row_1, row_2, row_3] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "some value"}, table,
{f"field_{field.id}": "some value"}, rows_values=[
{f"field_{field.id}": "some value"}, {f"field_{field.id}": "some value"},
], {f"field_{field.id}": "some value"},
{f"field_{field.id}": "some value"},
],
)
.created_rows
) )
# Get the next row # Get the next row
@ -3325,14 +3329,18 @@ def test_get_row_adjacent_view_id_provided(api_client, data_fixture):
user, field=field, view=view, type="contains", value="a" user, field=field, view=view, type="contains", value="a"
) )
[row_1, row_2, row_3] = RowHandler().create_rows( [row_1, row_2, row_3] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "ab"}, table,
{f"field_{field.id}": "b"}, rows_values=[
{f"field_{field.id}": "a"}, {f"field_{field.id}": "ab"},
], {f"field_{field.id}": "b"},
{f"field_{field.id}": "a"},
],
)
.created_rows
) )
response = api_client.get( response = api_client.get(
@ -3358,14 +3366,18 @@ def test_get_row_adjacent_view_id_no_adjacent_row(api_client, data_fixture):
table = data_fixture.create_database_table(name="table", user=user) table = data_fixture.create_database_table(name="table", user=user)
field = data_fixture.create_text_field(name="field", table=table) field = data_fixture.create_text_field(name="field", table=table)
[row_1, row_2, row_3] = RowHandler().create_rows( [row_1, row_2, row_3] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "a"}, table,
{f"field_{field.id}": "b"}, rows_values=[
{f"field_{field.id}": "c"}, {f"field_{field.id}": "a"},
], {f"field_{field.id}": "b"},
{f"field_{field.id}": "c"},
],
)
.created_rows
) )
response = api_client.get( response = api_client.get(
@ -3469,14 +3481,18 @@ def test_get_row_adjacent_search(api_client, data_fixture, search_mode):
table = data_fixture.create_database_table(name="table", user=user) table = data_fixture.create_database_table(name="table", user=user)
field = data_fixture.create_text_field(name="field", table=table) field = data_fixture.create_text_field(name="field", table=table)
[row_1, row_2, row_3] = RowHandler().create_rows( [row_1, row_2, row_3] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "a"}, table,
{f"field_{field.id}": "ab"}, rows_values=[
{f"field_{field.id}": "c"}, {f"field_{field.id}": "a"},
], {f"field_{field.id}": "ab"},
{f"field_{field.id}": "c"},
],
)
.created_rows
) )
SearchHandler.update_tsvector_columns( SearchHandler.update_tsvector_columns(
table, update_tsvectors_for_changed_rows_only=False table, update_tsvectors_for_changed_rows_only=False
@ -4432,7 +4448,7 @@ def test_link_row_field_validate_input_data_for_read_only_primary_fields(
user=user, table_b=table_b user=user, table_b=table_b
) )
(row_b1,) = RowHandler().create_rows(user, table_b, [{}]) (row_b1,) = RowHandler().create_rows(user, table_b, [{}]).created_rows
row_b1_pk = str(getattr(row_b1, pk_field.db_column)) row_b1_pk = str(getattr(row_b1, pk_field.db_column))
# using a valid value as reference to the row should work # using a valid value as reference to the row should work

View file

@ -17,6 +17,7 @@ from rest_framework.status import (
from baserow.contrib.database.data_sync.handler import DataSyncHandler from baserow.contrib.database.data_sync.handler import DataSyncHandler
from baserow.contrib.database.file_import.models import FileImportJob from baserow.contrib.database.file_import.models import FileImportJob
from baserow.contrib.database.table.models import Table from baserow.contrib.database.table.models import Table
from baserow.core.jobs.models import Job
from baserow.test_utils.helpers import ( from baserow.test_utils.helpers import (
assert_serialized_rows_contain_same_values, assert_serialized_rows_contain_same_values,
independent_test_db_connection, independent_test_db_connection,
@ -248,7 +249,7 @@ def test_create_table_with_data(
with patch_filefield_storage(): with patch_filefield_storage():
with job.data_file.open("r") as fin: with job.data_file.open("r") as fin:
data = json.load(fin) data = json.load(fin)
assert data == [ assert data.get("data") == [
["A", "B", "C", "D"], ["A", "B", "C", "D"],
["1-1", "1-2", "1-3", "1-4", "1-5"], ["1-1", "1-2", "1-3", "1-4", "1-5"],
["2-1", "2-2", "2-3"], ["2-1", "2-2", "2-3"],
@ -647,3 +648,144 @@ def test_async_duplicate_interesting_table(api_client, data_fixture):
for original_row, duplicated_row in zip(original_rows, duplicated_rows): for original_row, duplicated_row in zip(original_rows, duplicated_rows):
assert_serialized_rows_contain_same_values(original_row, duplicated_row) assert_serialized_rows_contain_same_values(original_row, duplicated_row)
@pytest.mark.django_db
def test_import_table_call(api_client, data_fixture):
"""
A simple test to check import table validation
"""
user, token = data_fixture.create_user_and_token()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(database=database)
data_fixture.create_text_field(table=table, user=user)
data_fixture.create_number_field(table=table, user=user)
url = reverse("api:database:tables:import_async", kwargs={"table_id": table.id})
valid_data_no_configuration = {"data": [["1", 1], ["2", 1]]}
response = api_client.post(
url,
HTTP_AUTHORIZATION=f"JWT {token}",
data=valid_data_no_configuration,
format="json",
)
assert response.status_code == HTTP_200_OK
rdata = response.json()
assert isinstance(rdata.get("id"), int)
assert rdata.get("type") == "file_import"
Job.objects.all().delete()
valid_data_with_configuration = {"data": [["1", 1], ["2", 1]], "configuration": {}}
response = api_client.post(
url,
HTTP_AUTHORIZATION=f"JWT {token}",
data=valid_data_with_configuration,
format="json",
)
rdata = response.json()
assert response.status_code == HTTP_200_OK
assert isinstance(rdata.get("id"), int)
assert rdata.get("type") == "file_import"
Job.objects.all().delete()
invalid_data_with_configuration = {
"data": [["1", 1], ["2", 1]],
"configuration": {"upsert_fields": []},
}
response = api_client.post(
url,
HTTP_AUTHORIZATION=f"JWT {token}",
data=invalid_data_with_configuration,
format="json",
)
rdata = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert rdata == {
"error": "ERROR_REQUEST_BODY_VALIDATION",
"detail": {
"configuration": {
"upsert_fields": [
{
"error": "Ensure this field has at least 1 elements.",
"code": "min_length",
}
]
}
},
}
Job.objects.all().delete()
invalid_data = {}
response = api_client.post(
url, HTTP_AUTHORIZATION=f"JWT {token}", data=invalid_data
)
rdata = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert rdata == {
"error": "ERROR_REQUEST_BODY_VALIDATION",
"detail": {"data": [{"error": "This field is required.", "code": "required"}]},
}
invalid_data = {
"data": [["1", 1], ["2", 1]],
"configuration": {"upsert_fields": [1, 2]},
}
response = api_client.post(
url,
HTTP_AUTHORIZATION=f"JWT {token}",
data=invalid_data,
format="json",
)
assert response.status_code == HTTP_400_BAD_REQUEST
rdata = response.json()
assert rdata == {
"error": "ERROR_REQUEST_BODY_VALIDATION",
"detail": {
"configuration": {
"upsert_value": [
{
"error": "upsert_values must not be empty when upsert_fields are provided.",
"code": "invalid",
}
]
}
},
}
invalid_data = {
"data": [["1", 1], ["2", 1]],
"configuration": {"upsert_fields": [1, 2], "upsert_values": [["a"]]},
}
response = api_client.post(
url,
HTTP_AUTHORIZATION=f"JWT {token}",
data=invalid_data,
format="json",
)
assert response.status_code == HTTP_400_BAD_REQUEST
rdata = response.json()
assert rdata == {
"error": "ERROR_REQUEST_BODY_VALIDATION",
"detail": {
"data": [
{
"error": "`data` and `configuration.upsert_values` should have the same length.",
"code": "invalid",
}
],
"configuration": {
"upsert_values": {
"error": "`data` and `configuration.upsert_values` should have the same length.",
"code": "invalid",
}
},
},
}

View file

@ -618,8 +618,10 @@ def test_autonumber_field_can_be_referenced_in_formula(data_fixture):
user = data_fixture.create_user() user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user) table = data_fixture.create_database_table(user=user)
data_fixture.create_autonumber_field(name="autonumber", table=table) data_fixture.create_autonumber_field(name="autonumber", table=table)
row_1, row_2 = RowHandler().create_rows( row_1, row_2 = (
user=user, table=table, rows_values=[{}, {}] RowHandler()
.create_rows(user=user, table=table, rows_values=[{}, {}])
.created_rows
) )
formula_field = data_fixture.create_formula_field( formula_field = data_fixture.create_formula_field(
@ -633,8 +635,10 @@ def test_autonumber_field_can_be_referenced_in_formula(data_fixture):
{"id": row_2.id, f"field_{formula_field.id}": 4}, {"id": row_2.id, f"field_{formula_field.id}": 4},
] ]
(row_3,) = RowHandler().create_rows( (row_3,) = (
user=user, table=table, rows_values=[{}], model=model RowHandler()
.create_rows(user=user, table=table, rows_values=[{}], model=model)
.created_rows
) )
row_values = model.objects.all().values("id", f"field_{formula_field.id}") row_values = model.objects.all().values("id", f"field_{formula_field.id}")
assert list(row_values) == [ assert list(row_values) == [
@ -660,12 +664,17 @@ def test_autonumber_field_can_be_looked_up(data_fixture):
row_b_2 = model_b.objects.create() row_b_2 = model_b.objects.create()
model_a = table_a.get_model() model_a = table_a.get_model()
(row,) = RowHandler().create_rows( (row,) = (
user=user, RowHandler()
table=table_a, .create_rows(
rows_values=[ user=user,
{f"field_{link_field.id}": [row_b_1.id, row_b_2.id]}, table=table_a,
], rows_values=[
model=model_a, {f"field_{link_field.id}": [row_b_1.id, row_b_2.id]},
],
model=model_a,
)
.created_rows
) )
assert getattr(row, f"field_{formula_field.id}") == 3 assert getattr(row, f"field_{formula_field.id}") == 3

View file

@ -138,7 +138,7 @@ def test_boolean_field_adjacent_row(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_c.id, previous=True, view=grid_view table_model, row_c.id, previous=True, view=grid_view

View file

@ -132,7 +132,7 @@ def test_create_rows_created_by(data_fixture):
rows = row_handler.create_rows( rows = row_handler.create_rows(
user=user, table=table, rows_values=[{}, {}], model=model user=user, table=table, rows_values=[{}, {}], model=model
) ).created_rows
assert getattr(rows[0], f"field_{field.id}") == user assert getattr(rows[0], f"field_{field.id}") == user

View file

@ -237,7 +237,7 @@ def test_created_on_field_adjacent_row(data_fixture):
{}, {},
], ],
model=table_model, model=table_model,
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_b.id, previous=True, view=grid_view table_model, row_b.id, previous=True, view=grid_view

View file

@ -661,7 +661,7 @@ def test_date_field_adjacent_row(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_b.id, previous=True, view=grid_view table_model, row_b.id, previous=True, view=grid_view
@ -699,7 +699,7 @@ def test_get_group_by_metadata_in_rows_with_date_field(data_fixture):
f"field_{date_field.id}": "2010-01-02 12:01:21", f"field_{date_field.id}": "2010-01-02 12:01:21",
}, },
], ],
) ).created_rows
model = table.get_model() model = table.get_model()

View file

@ -98,7 +98,7 @@ def test_create_duration_field_rows(data_fixture):
{f"field_{duration_field.id}": timedelta(seconds=3661)}, {f"field_{duration_field.id}": timedelta(seconds=3661)},
], ],
model=model, model=model,
) ).created_rows
assert len(rows) == 2 assert len(rows) == 2
assert getattr(rows[0], f"field_{duration_field.id}") == timedelta(seconds=3660) assert getattr(rows[0], f"field_{duration_field.id}") == timedelta(seconds=3660)
@ -779,20 +779,24 @@ def test_duration_field_view_filters(data_fixture):
) )
model = table.get_model() model = table.get_model()
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{field.db_column: None}, table,
{field.db_column: "0:1.123"}, rows_values=[
{field.db_column: 1.123}, {field.db_column: None},
{field.db_column: 60}, # 1min {field.db_column: "0:1.123"},
{field.db_column: "24:0:0"}, # 1day {field.db_column: 1.123},
{field.db_column: "1 0"}, # 1day {field.db_column: 60}, # 1min
{field.db_column: 3601}, # 1hour 1sec {field.db_column: "24:0:0"}, # 1day
{field.db_column: "1:0:0"}, # 1 hour {field.db_column: "1 0"}, # 1day
], {field.db_column: 3601}, # 1hour 1sec
model=model, {field.db_column: "1:0:0"}, # 1 hour
],
model=model,
)
.created_rows
) )
# #
@ -1105,14 +1109,18 @@ def test_duration_field_can_be_looked_up(data_fixture):
) )
model_b = table_b.get_model() model_b = table_b.get_model()
row_b_1, row_b_2 = RowHandler().create_rows( row_b_1, row_b_2 = (
user=user, RowHandler()
table=table_b, .create_rows(
rows_values=[ user=user,
{duration_field.db_column: 24 * 3600}, table=table_b,
{duration_field.db_column: 60}, rows_values=[
], {duration_field.db_column: 24 * 3600},
model=model_b, {duration_field.db_column: 60},
],
model=model_b,
)
.created_rows
) )
assert list(model_b.objects.values_list(duration_formula.db_column, flat=True)) == [ assert list(model_b.objects.values_list(duration_formula.db_column, flat=True)) == [
@ -1121,13 +1129,17 @@ def test_duration_field_can_be_looked_up(data_fixture):
] ]
model_a = table_a.get_model() model_a = table_a.get_model()
(row,) = RowHandler().create_rows( (row,) = (
user=user, RowHandler()
table=table_a, .create_rows(
rows_values=[ user=user,
{f"field_{link_field.id}": [row_b_1.id, row_b_2.id]}, table=table_a,
], rows_values=[
model=model_a, {f"field_{link_field.id}": [row_b_1.id, row_b_2.id]},
],
model=model_a,
)
.created_rows
) )
assert getattr(row, f"field_{lookup_field.id}") == [ assert getattr(row, f"field_{lookup_field.id}") == [
{"id": row_b_1.id, "value": "1 day"}, {"id": row_b_1.id, "value": "1 day"},

View file

@ -79,7 +79,7 @@ def duration_formula_filter_proc(
{src_field_name: 61, refname: "1m 1s"}, {src_field_name: 61, refname: "1m 1s"},
] ]
created = t.row_handler.create_rows( t.row_handler.create_rows(
user=t.user, user=t.user,
table=t.table, table=t.table,
rows_values=rows, rows_values=rows,

View file

@ -40,12 +40,16 @@ def test_migration_rows_with_deleted_singleselect_options(
field=single_select_field, value=f"Option B" field=single_select_field, value=f"Option B"
) )
_, row_with_b = RowHandler().force_create_rows( row_with_b = (
user=user, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=user,
{single_select_field.db_column: opt.id} for opt in (option_a, option_b) table=table,
], rows_values=[
{single_select_field.db_column: opt.id} for opt in (option_a, option_b)
],
)
.created_rows[1]
) )
single_select_field_type = field_type_registry.get_by_model(single_select_field) single_select_field_type = field_type_registry.get_by_model(single_select_field)
@ -95,12 +99,16 @@ def test_single_select_ids_are_removed_from_rows_when_deleted(data_fixture):
option_a = data_fixture.create_select_option(field=single_select_field, value=f"A") option_a = data_fixture.create_select_option(field=single_select_field, value=f"A")
option_b = data_fixture.create_select_option(field=single_select_field, value=f"B") option_b = data_fixture.create_select_option(field=single_select_field, value=f"B")
_, row_with_b = RowHandler().force_create_rows( row_with_b = (
user=user, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=user,
{single_select_field.db_column: opt.id} for opt in (option_a, option_b) table=table,
], rows_values=[
{single_select_field.db_column: opt.id} for opt in (option_a, option_b)
],
)
.created_rows[1]
) )
# Keep only A, and remove B # Keep only A, and remove B

View file

@ -481,14 +481,18 @@ def test_run_delete_mentions_marked_for_deletion(data_fixture):
# Create a user mention # Create a user mention
with freeze_time("2023-02-27 9:00"): with freeze_time("2023-02-27 9:00"):
row_1, row_2 = RowHandler().create_rows( row_1, row_2 = (
user=user, RowHandler()
table=table, .create_rows(
rows_values=[ user=user,
{f"field_{rich_text_field.id}": f"Hello @{user.id}!"}, table=table,
{f"field_{rich_text_field.id}": f"Hi @{user.id}!"}, rows_values=[
], {f"field_{rich_text_field.id}": f"Hello @{user.id}!"},
model=model, {f"field_{rich_text_field.id}": f"Hi @{user.id}!"},
],
model=model,
)
.created_rows
) )
mentions = RichTextFieldMention.objects.all() mentions = RichTextFieldMention.objects.all()

View file

@ -1091,13 +1091,17 @@ def test_inserting_a_row_with_lookup_field_immediately_populates_it_with_empty_l
primary_a_field = table_a.field_set.get(primary=True) primary_a_field = table_a.field_set.get(primary=True)
primary_b_field = table_b.field_set.get(primary=True) primary_b_field = table_b.field_set.get(primary=True)
target_field = data_fixture.create_text_field(name="target", table=table_b) target_field = data_fixture.create_text_field(name="target", table=table_b)
row_1, row_2 = RowHandler().create_rows( row_1, row_2 = (
user, RowHandler()
table_b, .create_rows(
rows_values=[ user,
{primary_b_field.db_column: "1", target_field.db_column: "target 1"}, table_b,
{primary_b_field.db_column: "2", target_field.db_column: "target 2"}, rows_values=[
], {primary_b_field.db_column: "1", target_field.db_column: "target 1"},
{primary_b_field.db_column: "2", target_field.db_column: "target 2"},
],
)
.created_rows
) )
RowHandler().create_rows( RowHandler().create_rows(
user, user,
@ -1373,7 +1377,7 @@ def test_formula_field_adjacent_row(data_fixture):
f"field_{text_field.id}": "C", f"field_{text_field.id}": "C",
}, },
], ],
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_b.id, previous=True, view=grid_view table_model, row_b.id, previous=True, view=grid_view

View file

@ -134,7 +134,7 @@ def test_create_rows_last_modified_by(data_fixture):
rows = row_handler.create_rows( rows = row_handler.create_rows(
user=user, table=table, rows_values=[{}, {}], model=model user=user, table=table, rows_values=[{}, {}], model=model
) ).created_rows
assert getattr(rows[0], f"field_{field.id}") == user assert getattr(rows[0], f"field_{field.id}") == user

View file

@ -255,7 +255,7 @@ def test_last_modified_field_adjacent_row(data_fixture):
{}, {},
], ],
model=table_model, model=table_model,
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_b.id, previous=True, view=grid_view table_model, row_b.id, previous=True, view=grid_view
@ -278,14 +278,16 @@ def test_last_modified_field_can_be_looked_up(data_fixture):
row_handler = RowHandler() row_handler = RowHandler()
row_b1, _ = row_handler.create_rows(user=user, table=table_b, rows_values=[{}, {}]) row_b1, _ = row_handler.create_rows(
user=user, table=table_b, rows_values=[{}, {}]
).created_rows
with freeze_time("2020-01-01 12:00"): with freeze_time("2020-01-01 12:00"):
row_a1, _ = row_handler.create_rows( row_a1, _ = row_handler.create_rows(
user=user, user=user,
table=table_a, table=table_a,
rows_values=[{link_row.db_column: [row_b1.id]}, {}], rows_values=[{link_row.db_column: [row_b1.id]}, {}],
) ).created_rows
updated_row_b1 = row_handler.get_row(user=user, table=table_b, row_id=row_b1.id) updated_row_b1 = row_handler.get_row(user=user, table=table_b, row_id=row_b1.id)
assert getattr(updated_row_b1, lookup_last_modified_field.db_column) == [ assert getattr(updated_row_b1, lookup_last_modified_field.db_column) == [

View file

@ -2260,11 +2260,15 @@ def test_dont_export_deleted_relations(data_fixture):
row_b2 = table_b_model.objects.create() row_b2 = table_b_model.objects.create()
table_a_model = table_a.get_model() table_a_model = table_a.get_model()
(row_a1,) = RowHandler().force_create_rows( (row_a1,) = (
user, RowHandler()
table_a, .force_create_rows(
[{link_field.db_column: [row_b1.id, row_b2.id]}], user,
model=table_a_model, table_a,
[{link_field.db_column: [row_b1.id, row_b2.id]}],
model=table_a_model,
)
.created_rows
) )
assert getattr(row_a1, link_field.db_column).count() == 2 assert getattr(row_a1, link_field.db_column).count() == 2
@ -2336,7 +2340,7 @@ def setup_table_with_single_select_pk(user, data_fixture):
for (char, opt) in zip(all_chars, options) for (char, opt) in zip(all_chars, options)
] ]
rows = RowHandler().force_create_rows(user, table, rows_values) rows = RowHandler().force_create_rows(user, table, rows_values).created_rows
return LinkRowOrderSetup(table, primary_field, rows, comparable_field) return LinkRowOrderSetup(table, primary_field, rows, comparable_field)
@ -2363,7 +2367,7 @@ def setup_table_with_multiple_select_pk(user, data_fixture):
for (i, char) in enumerate(all_chars) for (i, char) in enumerate(all_chars)
] ]
rows = RowHandler().force_create_rows(user, table, rows_values) rows = RowHandler().force_create_rows(user, table, rows_values).created_rows
return LinkRowOrderSetup(table, primary_field, rows, comparable_field) return LinkRowOrderSetup(table, primary_field, rows, comparable_field)
@ -2410,16 +2414,22 @@ def setup_table_with_collaborator_pk(user, data_fixture):
] ]
) )
rows = RowHandler().force_create_rows( rows = (
user, RowHandler()
table, .force_create_rows(
[ user,
{ table,
f"{primary_field.db_column}": [{"id": usr.id, "name": usr.first_name}], [
f"{comparable_field.db_column}": usr.first_name, {
} f"{primary_field.db_column}": [
for usr in users {"id": usr.id, "name": usr.first_name}
], ],
f"{comparable_field.db_column}": usr.first_name,
}
for usr in users
],
)
.created_rows
) )
return LinkRowOrderSetup(table, primary_field, rows, comparable_field) return LinkRowOrderSetup(table, primary_field, rows, comparable_field)
@ -2611,10 +2621,14 @@ def test_get_group_by_metadata_in_rows_with_many_to_many_field(data_fixture):
user = data_fixture.create_user() user = data_fixture.create_user()
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables(user=user) table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables(user=user)
row_b1, row_b2, row_b3 = RowHandler().force_create_rows( row_b1, row_b2, row_b3 = (
user=user, RowHandler()
table=table_b, .force_create_rows(
rows_values=[{}, {}, {}], user=user,
table=table_b,
rows_values=[{}, {}, {}],
)
.created_rows
) )
RowHandler().force_create_rows( RowHandler().force_create_rows(
@ -2727,24 +2741,28 @@ def test_list_rows_with_group_by_link_row_to_multiple_select_field(
grid = data_fixture.create_grid_view(table=table_a) grid = data_fixture.create_grid_view(table=table_a)
data_fixture.create_view_group_by(view=grid, field=link_a_to_b) data_fixture.create_view_group_by(view=grid, field=link_a_to_b)
row_b1, row_b2 = RowHandler().force_create_rows( row_b1, row_b2 = (
user=user, RowHandler()
table=table_b, .force_create_rows(
rows_values=[ user=user,
{ table=table_b,
f"field_{multiple_select_field.id}": [ rows_values=[
select_option_1.id, {
select_option_2.id, f"field_{multiple_select_field.id}": [
select_option_3.id, select_option_1.id,
], select_option_2.id,
}, select_option_3.id,
{ ],
f"field_{multiple_select_field.id}": [ },
select_option_2.id, {
select_option_3.id, f"field_{multiple_select_field.id}": [
], select_option_2.id,
}, select_option_3.id,
], ],
},
],
)
.created_rows
) )
RowHandler().force_create_rows( RowHandler().force_create_rows(

View file

@ -62,14 +62,18 @@ def test_perm_deleting_rows_delete_rich_text_mentions(data_fixture):
table=table, long_text_enable_rich_text=True table=table, long_text_enable_rich_text=True
) )
row_1, row_2, row_3 = RowHandler().create_rows( row_1, row_2, row_3 = (
user=user, RowHandler()
table=table, .create_rows(
rows_values=[ user=user,
{field.db_column: f"Hello @{user.id}!"}, table=table,
{field.db_column: f"Ciao @{user.id}!"}, rows_values=[
{field.db_column: f"Hola @{user.id}!"}, {field.db_column: f"Hello @{user.id}!"},
], {field.db_column: f"Ciao @{user.id}!"},
{field.db_column: f"Hola @{user.id}!"},
],
)
.created_rows
) )
mentions = RichTextFieldMention.objects.all() mentions = RichTextFieldMention.objects.all()

View file

@ -825,19 +825,23 @@ def test_can_modify_row_containing_lookup(
link_row_table=table2, link_row_table=table2,
) )
a, b = RowHandler().create_rows( a, b = (
user, RowHandler()
table2, .create_rows(
[ user,
{ table2,
looked_up_field.db_column: f"2021-02-01", [
table2_primary_field.db_column: "primary a", {
}, looked_up_field.db_column: f"2021-02-01",
{ table2_primary_field.db_column: "primary a",
looked_up_field.db_column: f"2022-02-03", },
table2_primary_field.db_column: "primary b", {
}, looked_up_field.db_column: f"2022-02-03",
], table2_primary_field.db_column: "primary b",
},
],
)
.created_rows
) )
table_row = RowHandler().create_row( table_row = RowHandler().create_row(
@ -1347,20 +1351,24 @@ def test_deleting_table_with_dependants_works(
) )
table2_model = table2.get_model() table2_model = table2.get_model()
a, b = RowHandler().create_rows( a, b = (
user, RowHandler()
table2, .create_rows(
rows_values=[ user,
{ table2,
looked_up_field.db_column: "2021-02-01", rows_values=[
table2_primary_field.db_column: "primary a", {
}, looked_up_field.db_column: "2021-02-01",
{ table2_primary_field.db_column: "primary a",
looked_up_field.db_column: "2022-02-03", },
table2_primary_field.db_column: "primary b", {
}, looked_up_field.db_column: "2022-02-03",
], table2_primary_field.db_column: "primary b",
model=table2_model, },
],
model=table2_model,
)
.created_rows
) )
table_model = table.get_model() table_model = table.get_model()
@ -1847,34 +1855,42 @@ def test_can_modify_row_containing_lookup_diamond_dep(
starting_row = RowHandler().create_row( starting_row = RowHandler().create_row(
user, table1, {primary_table1.db_column: "table1_primary_row_1"} user, table1, {primary_table1.db_column: "table1_primary_row_1"}
) )
table2_row1, table2_row2 = RowHandler().create_rows( table2_row1, table2_row2 = (
user, RowHandler()
table2, .create_rows(
[ user,
{ table2,
primary_table2.db_column: "table2_row1", [
table2_link_to_table1.db_column: [starting_row.id], {
}, primary_table2.db_column: "table2_row1",
{ table2_link_to_table1.db_column: [starting_row.id],
primary_table2.db_column: "table2_row2", },
table2_link_to_table1.db_column: [starting_row.id], {
}, primary_table2.db_column: "table2_row2",
], table2_link_to_table1.db_column: [starting_row.id],
},
],
)
.created_rows
) )
table3_row1, table3_row2 = RowHandler().create_rows( table3_row1, table3_row2 = (
user, RowHandler()
table3, .create_rows(
[ user,
{ table3,
primary_table3.db_column: "table3_row1", [
table3_link_to_table2_a.db_column: [table2_row1.id], {
}, primary_table3.db_column: "table3_row1",
{ table3_link_to_table2_a.db_column: [table2_row1.id],
primary_table3.db_column: "table3_row2", },
table3_link_to_table2_b.db_column: [table2_row2.id], {
}, primary_table3.db_column: "table3_row2",
], table3_link_to_table2_b.db_column: [table2_row2.id],
},
],
)
.created_rows
) )
FieldHandler().create_field( FieldHandler().create_field(

View file

@ -849,12 +849,12 @@ def test_multiple_collaborators_field_type_values_can_be_searched(data_fixture):
{collaborator_field.db_column: [{"id": luigi.id}]}, {collaborator_field.db_column: [{"id": luigi.id}]},
{collaborator_field.db_column: [{"id": mario.id}, {"id": luigi.id}]}, {collaborator_field.db_column: [{"id": mario.id}, {"id": luigi.id}]},
], ],
) ).created_rows
rows_a_to_b = row_handler.force_create_rows( rows_a_to_b = row_handler.force_create_rows(
user=mario, user=mario,
table=table_a, table=table_a,
rows_values=[{link_a_to_b.db_column: [row_b.id]} for row_b in rows_b], rows_values=[{link_a_to_b.db_column: [row_b.id]} for row_b in rows_b],
) ).created_rows
# search in B # search in B
model_b = table_b.get_model() model_b = table_b.get_model()
@ -931,7 +931,7 @@ def test_multiple_collaborators_formula_field_cache_users_query(data_fixture):
{field_id: [{"id": user_2.id}, {"id": user_3.id}]}, {field_id: [{"id": user_2.id}, {"id": user_3.id}]},
], ],
model=table_model, model=table_model,
) ).created_rows
# The number of queries should not increas as we export more rows # The number of queries should not increas as we export more rows
with CaptureQueriesContext(connection) as queries_for_all_others: with CaptureQueriesContext(connection) as queries_for_all_others:

View file

@ -450,7 +450,7 @@ def test_multiple_select_field_type_multiple_rows(data_fixture):
assert len(row_5_field) == 1 assert len(row_5_field) == 1
assert getattr(row_5_field[0], "id") == select_options[0].id assert getattr(row_5_field[0], "id") == select_options[0].id
_, error_report = row_handler.create_rows( error_report = row_handler.create_rows(
user, user,
table, table,
rows_values=[ rows_values=[
@ -460,7 +460,7 @@ def test_multiple_select_field_type_multiple_rows(data_fixture):
{f"field_{field.id}": [99999, "missing"]}, {f"field_{field.id}": [99999, "missing"]},
], ],
generate_error_report=True, generate_error_report=True,
) ).errors
assert list(error_report.keys()) == [0, 2, 3] assert list(error_report.keys()) == [0, 2, 3]
assert f"field_{field.id}" in error_report[0] assert f"field_{field.id}" in error_report[0]
@ -2300,7 +2300,7 @@ def test_multiple_select_adjacent_row(data_fixture):
f"field_{multiple_select_field.id}": [option_a.id], f"field_{multiple_select_field.id}": [option_a.id],
}, },
], ],
) ).created_rows
base_queryset = ViewHandler().apply_sorting( base_queryset = ViewHandler().apply_sorting(
grid_view, table.get_model().objects.all() grid_view, table.get_model().objects.all()
@ -2595,7 +2595,7 @@ def test_get_group_by_metadata_in_rows_with_many_to_many_field(data_fixture):
], ],
}, },
], ],
) ).created_rows
model = table.get_model() model = table.get_model()
@ -2792,7 +2792,7 @@ def test_get_group_by_metadata_in_rows_multiple_and_single_select_fields(data_fi
], ],
}, },
], ],
) ).created_rows
model = table.get_model() model = table.get_model()
@ -2992,11 +2992,15 @@ def setup_view_for_multiple_select_field(data_fixture, option_values):
return {} return {}
return {multiple_select_field.db_column: [opt.id for opt in options]} return {multiple_select_field.db_column: [opt.id for opt in options]}
rows = RowHandler().force_create_rows( rows = (
user, RowHandler()
table, .force_create_rows(
[prep_row([option] if option is not None else None) for option in options], user,
model=model, table,
[prep_row([option] if option is not None else None) for option in options],
model=model,
)
.created_rows
) )
fields = { fields = {

View file

@ -274,7 +274,7 @@ def test_number_field_adjacent_row(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_b.id, previous=True, view=grid_view table_model, row_b.id, previous=True, view=grid_view

View file

@ -65,7 +65,7 @@ def number_lookup_filter_proc(
linked_rows = t.row_handler.create_rows( linked_rows = t.row_handler.create_rows(
user=t.user, table=t.other_table, rows_values=dict_rows user=t.user, table=t.other_table, rows_values=dict_rows
) ).created_rows
# helper to get linked rows by indexes # helper to get linked rows by indexes
def get_linked_rows(*indexes) -> list[int]: def get_linked_rows(*indexes) -> list[int]:

View file

@ -320,7 +320,7 @@ def test_rating_field_adjacent_row(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_b.id, previous=True, view=grid_view table_model, row_b.id, previous=True, view=grid_view

View file

@ -1105,7 +1105,7 @@ def test_single_select_adjacent_row(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
table_model, row_b.id, previous=True, view=grid_view table_model, row_b.id, previous=True, view=grid_view
@ -1141,7 +1141,7 @@ def test_single_select_adjacent_row_working_with_sorts_and_null_values(data_fixt
{}, {},
], ],
model=table_model, model=table_model,
) ).created_rows
next_row = handler.get_adjacent_row(table_model, row_a.id, view=grid_view) next_row = handler.get_adjacent_row(table_model, row_a.id, view=grid_view)
assert next_row.id == row_b.id assert next_row.id == row_b.id
@ -1379,8 +1379,12 @@ def setup_view_for_single_select_field(data_fixture, option_values):
def prep_row(option): def prep_row(option):
return {single_select_field.db_column: option.id if option else None} return {single_select_field.db_column: option.id if option else None}
rows = RowHandler().force_create_rows( rows = (
user, table, [prep_row(option) for option in options], model=model RowHandler()
.force_create_rows(
user, table, [prep_row(option) for option in options], model=model
)
.created_rows
) )
fields = { fields = {

View file

@ -156,7 +156,7 @@ def test_create_uuid_row_in_bulk(data_fixture):
rows = row_handler.create_rows( rows = row_handler.create_rows(
user=user, table=table, rows_values=[{}, {}], model=model user=user, table=table, rows_values=[{}, {}], model=model
) ).created_rows
assert isinstance(rows[0].uuid, UUID) assert isinstance(rows[0].uuid, UUID)
assert isinstance(rows[1].uuid, UUID) assert isinstance(rows[1].uuid, UUID)

View file

@ -9,6 +9,8 @@ from pyinstrument import Profiler
from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler
from baserow.contrib.database.fields.exceptions import ( from baserow.contrib.database.fields.exceptions import (
FieldNotInTable,
IncompatibleField,
InvalidBaserowFieldName, InvalidBaserowFieldName,
MaxFieldLimitExceeded, MaxFieldLimitExceeded,
MaxFieldNameLengthExceeded, MaxFieldNameLengthExceeded,
@ -16,7 +18,10 @@ from baserow.contrib.database.fields.exceptions import (
) )
from baserow.contrib.database.fields.field_cache import FieldCache from baserow.contrib.database.fields.field_cache import FieldCache
from baserow.contrib.database.fields.models import SelectOption, TextField from baserow.contrib.database.fields.models import SelectOption, TextField
from baserow.contrib.database.rows.exceptions import ReportMaxErrorCountExceeded from baserow.contrib.database.rows.exceptions import (
InvalidRowLength,
ReportMaxErrorCountExceeded,
)
from baserow.contrib.database.table.exceptions import ( from baserow.contrib.database.table.exceptions import (
InitialTableDataDuplicateName, InitialTableDataDuplicateName,
InitialTableDataLimitExceeded, InitialTableDataLimitExceeded,
@ -43,23 +48,25 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
run_async_job(job.id) run_async_job(job.id)
with patch_filefield_storage(), pytest.raises(InvalidInitialTableData): with patch_filefield_storage(), pytest.raises(InvalidInitialTableData):
job = data_fixture.create_file_import_job(data=[]) job = data_fixture.create_file_import_job(data={"data": []})
run_async_job(job.id) run_async_job(job.id)
with patch_filefield_storage(), pytest.raises(InvalidInitialTableData): with patch_filefield_storage(), pytest.raises(InvalidInitialTableData):
job = data_fixture.create_file_import_job(data=[[]]) job = data_fixture.create_file_import_job(data={"data": [[]]})
run_async_job(job.id) run_async_job(job.id)
with override_settings( with override_settings(
INITIAL_TABLE_DATA_LIMIT=2 INITIAL_TABLE_DATA_LIMIT=2
), patch_filefield_storage(), pytest.raises(InitialTableDataLimitExceeded): ), patch_filefield_storage(), pytest.raises(InitialTableDataLimitExceeded):
job = data_fixture.create_file_import_job(data=[[], [], []]) job = data_fixture.create_file_import_job(data={"data": [[], [], []]})
run_async_job(job.id) run_async_job(job.id)
with override_settings(MAX_FIELD_LIMIT=2), patch_filefield_storage(), pytest.raises( with override_settings(MAX_FIELD_LIMIT=2), patch_filefield_storage(), pytest.raises(
MaxFieldLimitExceeded MaxFieldLimitExceeded
): ):
job = data_fixture.create_file_import_job(data=[["fields"] * 3, ["rows"] * 3]) job = data_fixture.create_file_import_job(
data={"data": [["fields"] * 3, ["rows"] * 3]}
)
run_async_job(job.id) run_async_job(job.id)
too_long_field_name = "x" * 256 too_long_field_name = "x" * 256
@ -73,35 +80,37 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
] ]
with patch_filefield_storage(), pytest.raises(MaxFieldNameLengthExceeded): with patch_filefield_storage(), pytest.raises(MaxFieldNameLengthExceeded):
job = data_fixture.create_file_import_job(data=data) job = data_fixture.create_file_import_job(data={"data": data})
run_async_job(job.id) run_async_job(job.id)
data[0][0] = field_name_with_ok_length data[0][0] = field_name_with_ok_length
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job(data=data) job = data_fixture.create_file_import_job(data={"data": data})
run_async_job(job.id) run_async_job(job.id)
with patch_filefield_storage(), pytest.raises(ReservedBaserowFieldNameException): with patch_filefield_storage(), pytest.raises(ReservedBaserowFieldNameException):
job = data_fixture.create_file_import_job(data=[["id"]]) job = data_fixture.create_file_import_job(data={"data": [["id"]]})
run_async_job(job.id) run_async_job(job.id)
with patch_filefield_storage(), pytest.raises(InitialTableDataDuplicateName): with patch_filefield_storage(), pytest.raises(InitialTableDataDuplicateName):
job = data_fixture.create_file_import_job(data=[["test", "test"]]) job = data_fixture.create_file_import_job(data={"data": [["test", "test"]]})
run_async_job(job.id) run_async_job(job.id)
with patch_filefield_storage(), pytest.raises(InvalidBaserowFieldName): with patch_filefield_storage(), pytest.raises(InvalidBaserowFieldName):
job = data_fixture.create_file_import_job(data=[[" "]]) job = data_fixture.create_file_import_job(data={"data": [[" "]]})
run_async_job(job.id) run_async_job(job.id)
# Basic use # Basic use
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job( job = data_fixture.create_file_import_job(
data=[ data={
["A", "B", "C", "D"], "data": [
["1-1", "1-2", "1-3", "1-4", "1-5"], ["A", "B", "C", "D"],
["2-1", "2-2", "2-3"], ["1-1", "1-2", "1-3", "1-4", "1-5"],
["3-1", "3-2"], ["2-1", "2-2", "2-3"],
] ["3-1", "3-2"],
]
}
) )
run_async_job(job.id) run_async_job(job.id)
@ -130,11 +139,13 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
# Without first row header # Without first row header
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job( job = data_fixture.create_file_import_job(
data=[ data={
["1-1"], "data": [
["2-1", "2-2", "2-3"], ["1-1"],
["3-1", "3-2"], ["2-1", "2-2", "2-3"],
], ["3-1", "3-2"],
]
},
first_row_header=False, first_row_header=False,
) )
run_async_job(job.id) run_async_job(job.id)
@ -151,17 +162,19 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
# Robust to strange field names # Robust to strange field names
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job( job = data_fixture.create_file_import_job(
data=[ data={
[ "data": [
"TEst 1", [
"10.00", "TEst 1",
'Falsea"""', "10.00",
'a"a"a"a"a,', 'Falsea"""',
"a", 'a"a"a"a"a,',
1.3, "a",
"/w. r/awr", 1.3,
], "/w. r/awr",
], ],
]
},
) )
run_async_job(job.id) run_async_job(job.id)
@ -196,7 +209,7 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
model = table.get_model() model = table.get_model()
# Import data to an existing table # Import data to an existing table
data = [["baz", 3, -3, "foo", None], ["bob", -4, 2.5, "bar", "a" * 255]] data = {"data": [["baz", 3, -3, "foo", None], ["bob", -4, 2.5, "bar", "a" * 255]]}
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job( job = data_fixture.create_file_import_job(
@ -212,13 +225,15 @@ def test_run_file_import_task(data_fixture, patch_filefield_storage):
assert len(rows) == 2 assert len(rows) == 2
# Import data with different length # Import data with different length
data = [ data = {
["good", "test", "test", "Anything"], "data": [
[], ["good", "test", "test", "Anything"],
[None, None], [],
["good", 2.5, None, "Anything"], [None, None],
["good", 2.5, None, "Anything", "too much", "values"], ["good", 2.5, None, "Anything"],
] ["good", 2.5, None, "Anything", "too much", "values"],
]
}
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job( job = data_fixture.create_file_import_job(
@ -331,6 +346,7 @@ def test_run_file_import_task_for_special_fields(data_fixture, patch_filefield_s
[], [],
], ],
] ]
data = {"data": data}
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job( job = data_fixture.create_file_import_job(
@ -397,6 +413,7 @@ def test_run_file_import_task_for_special_fields(data_fixture, patch_filefield_s
"bug", "bug",
], ],
] ]
data = {"data": data}
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job( job = data_fixture.create_file_import_job(
@ -454,8 +471,8 @@ def test_run_file_import_test_chunk(data_fixture, patch_filefield_storage):
table, _, _ = data_fixture.build_table( table, _, _ = data_fixture.build_table(
columns=[ columns=[
(f"col1", "text"), ("col1", "text"),
(f"col2", "number"), ("col2", "number"),
], ],
rows=[], rows=[],
user=user, user=user,
@ -483,11 +500,16 @@ def test_run_file_import_test_chunk(data_fixture, patch_filefield_storage):
data[1024] = ["test", 2, 99999] data[1024] = ["test", 2, 99999]
data[1027] = ["test", "bad", single_select_option_2.id] data[1027] = ["test", "bad", single_select_option_2.id]
print("data", len(data))
data = {"data": data}
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job(table=table, data=data, user=user) job = data_fixture.create_file_import_job(table=table, data=data, user=user)
run_async_job(job.id) run_async_job(job.id)
job.refresh_from_db() job.refresh_from_db()
assert job.finished
assert not job.failed
model = job.table.get_model() model = job.table.get_model()
assert model.objects.count() == row_count - 5 assert model.objects.count() == row_count - 5
@ -509,8 +531,8 @@ def test_run_file_import_limit(data_fixture, patch_filefield_storage):
table, _, _ = data_fixture.build_table( table, _, _ = data_fixture.build_table(
columns=[ columns=[
(f"col1", "text"), ("col1", "text"),
(f"col2", "number"), ("col2", "number"),
], ],
rows=[], rows=[],
user=user, user=user,
@ -529,7 +551,9 @@ def test_run_file_import_limit(data_fixture, patch_filefield_storage):
data += [["test", "bad", single_select_option_1.id]] * (max_error + 5) data += [["test", "bad", single_select_option_1.id]] * (max_error + 5)
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job(table=table, data=data, user=user) job = data_fixture.create_file_import_job(
table=table, data={"data": data}, user=user
)
with pytest.raises(ReportMaxErrorCountExceeded): with pytest.raises(ReportMaxErrorCountExceeded):
run_async_job(job.id) run_async_job(job.id)
@ -550,7 +574,9 @@ def test_run_file_import_limit(data_fixture, patch_filefield_storage):
data += [["test", 1, 0]] * (max_error + 5) data += [["test", 1, 0]] * (max_error + 5)
with patch_filefield_storage(): with patch_filefield_storage():
job = data_fixture.create_file_import_job(table=table, data=data, user=user) job = data_fixture.create_file_import_job(
table=table, data={"data": data}, user=user
)
with pytest.raises(ReportMaxErrorCountExceeded): with pytest.raises(ReportMaxErrorCountExceeded):
run_async_job(job.id) run_async_job(job.id)
@ -646,3 +672,315 @@ def test_cleanup_file_import_job(data_fixture, settings, patch_filefield_storage
job3.refresh_from_db() job3.refresh_from_db()
assert job3.state == JOB_FINISHED assert job3.state == JOB_FINISHED
assert job3.updated_on == time_before_soft_limit assert job3.updated_on == time_before_soft_limit
@pytest.mark.django_db(transaction=True)
def test_run_file_import_task_with_upsert_fields_not_in_table(
data_fixture, patch_filefield_storage
):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(user=user, database=database)
data_fixture.create_text_field(table=table, order=1, name="text 1")
init_data = [["foo"], ["bar"]]
with pytest.raises(FieldNotInTable):
with patch_filefield_storage():
job = data_fixture.create_file_import_job(
data={
"data": init_data,
"configuration": {"upsert_fields": [100, 120]},
},
table=table,
user=user,
)
run_async_job(job.id)
model = table.get_model()
assert len(model.objects.all()) == 0
@pytest.mark.django_db(transaction=True)
def test_run_file_import_task_with_upsert_fields_not_usable(
data_fixture, patch_filefield_storage
):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(user=user, database=database)
f1 = data_fixture.create_text_field(table=table, order=1, name="text 1")
f2 = data_fixture.create_formula_field(table=table, order=2, name="formula field")
model = table.get_model()
# dummy data just to ensure later on the table wasn't modified.
init_data = [
[
"aa-",
],
[
"aa-",
],
]
with patch_filefield_storage():
job = data_fixture.create_file_import_job(
data={"data": init_data},
table=table,
user=user,
)
run_async_job(job.id)
job.refresh_from_db()
assert job.state == JOB_FINISHED
assert job.progress_percentage == 100
with pytest.raises(IncompatibleField):
with patch_filefield_storage():
job = data_fixture.create_file_import_job(
data={
"data": [["bbb"], ["ccc"], ["aaa"]],
"configuration": {
# we're trying to use formula field, which is not supported
"upsert_fields": [f2.id],
"upsert_values": [["aaa"], ["aaa"], ["aaa"]],
},
},
table=table,
user=user,
first_row_header=False,
)
run_async_job(job.id)
rows = model.objects.all()
assert len(rows) == 2
assert all([getattr(r, f1.db_column) == "aa-" for r in rows])
@pytest.mark.django_db(transaction=True)
def test_run_file_import_task_with_upsert_fields_invalid_length(
data_fixture, patch_filefield_storage
):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(user=user, database=database)
f1 = data_fixture.create_text_field(table=table, order=1, name="text 1")
model = table.get_model()
with pytest.raises(InvalidRowLength):
with patch_filefield_storage():
job = data_fixture.create_file_import_job(
data={
"data": [["bbb"], ["ccc"], ["aaa"]],
"configuration": {
# fields and values have different lengths
"upsert_fields": [f1.id],
"upsert_values": [
["aaa", "bbb"],
],
},
},
table=table,
user=user,
first_row_header=False,
)
run_async_job(job.id)
job.refresh_from_db()
assert job.failed
rows = model.objects.all()
assert len(rows) == 0
@pytest.mark.django_db(transaction=True)
def test_run_file_import_task_with_upsert(data_fixture, patch_filefield_storage):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(user=user, database=database)
f1 = data_fixture.create_text_field(table=table, order=1, name="text 1")
f2 = data_fixture.create_number_field(
table=table, order=2, name="number 1", number_negative=True
)
f3 = data_fixture.create_date_field(user=user, table=table, order=3, name="date 1")
f4 = data_fixture.create_date_field(
user=user, table=table, order=4, name="datetime 1", date_include_time=True
)
f5 = data_fixture.create_number_field(
table=table,
order=5,
name="value field",
number_negative=True,
number_decimal_places=10,
)
f6 = data_fixture.create_text_field(table=table, order=6, name="text 2")
model = table.get_model()
init_data = [
[
"aaa",
1,
"2024-01-01",
"2024-01-01T01:02:03.004+01:00",
0.1,
"aaa-1-1",
],
[
"aab",
1,
"2024-01-01",
"2024-01-01T01:02:03",
0.2,
"aab-1-1",
],
[
"aac",
1,
"2024-01-01",
"2024-01-01T01:02:03",
0.2,
"aac-1-1",
],
[
None,
None,
None,
None,
None,
None,
],
[
None,
None,
None,
None,
None,
None,
],
[
"aac",
1,
None,
"2024-01-01T01:02:03",
0.2,
"aac-1-2",
],
[
"aab",
1,
"2024-01-01",
None,
0.2,
"aac-1-2",
],
[
"aaa",
1,
"2024-01-01",
"2024-01-01T01:02:03.004+01:00",
0.1,
"aaa-1-1",
],
[
"aaa",
1,
"2024-01-02",
"2024-01-01 01:02:03.004 +01:00",
0.1,
"aaa-1-1",
],
]
with patch_filefield_storage():
job = data_fixture.create_file_import_job(
data={"data": init_data},
table=table,
user=user,
)
run_async_job(job.id)
job.refresh_from_db()
assert job.state == JOB_FINISHED
assert job.progress_percentage == 100
rows = model.objects.all()
assert len(rows) == len(init_data)
update_with_duplicates = [
# first three are duplicates
[
"aab",
1,
"2024-01-01",
"2024-01-01T01:02:03",
0.3,
"aab-1-1-modified",
],
[
"aaa",
1,
"2024-01-01",
"2024-01-01T01:02:03.004+01:00",
0.2,
"aaa-1-1-modified",
],
[
"aab",
1,
"2024-01-01",
None,
0.33333,
"aac-1-2-modified",
],
# insert
[
"aab",
1,
None,
None,
125,
"aab-1-3-new",
],
[
"aab",
1,
"2024-01-01",
None,
0.33333,
"aab-1-4-new",
],
]
# Without first row header
with patch_filefield_storage():
job = data_fixture.create_file_import_job(
data={
"data": update_with_duplicates,
"configuration": {
"upsert_fields": [f1.id, f2.id, f3.id, f4.id],
"upsert_values": [i[:4] for i in update_with_duplicates],
},
},
table=table,
user=user,
first_row_header=False,
)
run_async_job(job.id)
job.refresh_from_db()
assert job.finished
assert not job.failed
rows = list(model.objects.all())
assert len(rows) == len(init_data) + 2
last = rows[-1]
assert getattr(last, f1.db_column) == "aab"
assert getattr(last, f6.db_column) == "aab-1-4-new"
last = rows[-2]
assert getattr(last, f1.db_column) == "aab"
assert getattr(last, f6.db_column) == "aab-1-3-new"

View file

@ -1751,18 +1751,22 @@ def test_can_filter_in_aggregated_formulas(data_fixture):
name="autonr", name="autonr",
) )
rows_b = RowHandler().create_rows( rows_b = (
user, RowHandler()
table_b, .create_rows(
[ user,
{boolean_field.db_column: True}, table_b,
{}, [
{boolean_field.db_column: True}, {boolean_field.db_column: True},
{}, {},
{}, {boolean_field.db_column: True},
{boolean_field.db_column: True}, {},
{}, {},
], {boolean_field.db_column: True},
{},
],
)
.created_rows
) )
formula_field = data_fixture.create_formula_field( formula_field = data_fixture.create_formula_field(
@ -1771,14 +1775,18 @@ def test_can_filter_in_aggregated_formulas(data_fixture):
formula=f"max(filter(lookup('link', 'autonr'), lookup('link', 'check')))", formula=f"max(filter(lookup('link', 'autonr'), lookup('link', 'check')))",
) )
row_a1, row_a2, row_a3 = RowHandler().create_rows( row_a1, row_a2, row_a3 = (
user, RowHandler()
table_a, .create_rows(
[ user,
{link_field.db_column: [rows_b[0].id, rows_b[1].id]}, table_a,
{link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]}, [
{link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]}, {link_field.db_column: [rows_b[0].id, rows_b[1].id]},
], {link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]},
{link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]},
],
)
.created_rows
) )
# autonr of row_b[0], because it's the only one with check=True # autonr of row_b[0], because it's the only one with check=True
@ -1800,27 +1808,31 @@ def test_can_filter_in_aggregated_formulas_with_multipleselects(data_fixture):
option_c = data_fixture.create_select_option(field=multiple_select_field, value="c") option_c = data_fixture.create_select_option(field=multiple_select_field, value="c")
option_d = data_fixture.create_select_option(field=multiple_select_field, value="d") option_d = data_fixture.create_select_option(field=multiple_select_field, value="d")
rows_b = RowHandler().create_rows( rows_b = (
user, RowHandler()
table_b, .create_rows(
[ user,
{ table_b,
boolean_field.db_column: True, [
multiple_select_field.db_column: [option_a.id, option_b.id], {
}, boolean_field.db_column: True,
{multiple_select_field.db_column: [option_c.id]}, multiple_select_field.db_column: [option_a.id, option_b.id],
{ },
boolean_field.db_column: True, {multiple_select_field.db_column: [option_c.id]},
multiple_select_field.db_column: [option_d.id], {
}, boolean_field.db_column: True,
{multiple_select_field.db_column: [option_a.id, option_b.id]}, multiple_select_field.db_column: [option_d.id],
{multiple_select_field.db_column: [option_c.id, option_d.id]}, },
{ {multiple_select_field.db_column: [option_a.id, option_b.id]},
boolean_field.db_column: True, {multiple_select_field.db_column: [option_c.id, option_d.id]},
multiple_select_field.db_column: [option_b.id], {
}, boolean_field.db_column: True,
{}, multiple_select_field.db_column: [option_b.id],
], },
{},
],
)
.created_rows
) )
formula_field = data_fixture.create_formula_field( formula_field = data_fixture.create_formula_field(
@ -1829,14 +1841,18 @@ def test_can_filter_in_aggregated_formulas_with_multipleselects(data_fixture):
formula=f"count(filter(lookup('link', 'mm'), lookup('link', 'check')))", formula=f"count(filter(lookup('link', 'mm'), lookup('link', 'check')))",
) )
row_a1, row_a2, row_a3 = RowHandler().create_rows( row_a1, row_a2, row_a3 = (
user, RowHandler()
table_a, .create_rows(
[ user,
{link_field.db_column: [rows_b[0].id, rows_b[1].id]}, table_a,
{link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]}, [
{link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]}, {link_field.db_column: [rows_b[0].id, rows_b[1].id]},
], {link_field.db_column: [rows_b[2].id, rows_b[3].id, rows_b[4].id]},
{link_field.db_column: [rows_b[4].id, rows_b[5].id, rows_b[6].id]},
],
)
.created_rows
) )
# autonr of row_b[0], because it's the only one with check=True # autonr of row_b[0], because it's the only one with check=True
@ -1869,19 +1885,23 @@ def test_formulas_with_lookup_url_field_type(data_fixture):
table=linked_table, table=linked_table,
) )
linked_row_1, linked_row_2 = RowHandler().create_rows( linked_row_1, linked_row_2 = (
user, RowHandler()
linked_table, .create_rows(
[ user,
{ linked_table,
linked_table_primary_field.db_column: "URL #1", [
linked_table_url_field.db_column: "https://baserow.io/1", {
}, linked_table_primary_field.db_column: "URL #1",
{ linked_table_url_field.db_column: "https://baserow.io/1",
linked_table_primary_field.db_column: "URL #2", },
linked_table_url_field.db_column: "https://baserow.io/2", {
}, linked_table_primary_field.db_column: "URL #2",
], linked_table_url_field.db_column: "https://baserow.io/2",
},
],
)
.created_rows
) )
link_field = FieldHandler().create_field( link_field = FieldHandler().create_field(
@ -1981,8 +2001,10 @@ def test_lookup_arrays(data_fixture):
rows=[["b1"], ["b2"]], rows=[["b1"], ["b2"]],
fields=[table_b_primary_field], fields=[table_b_primary_field],
) )
(row_a1,) = RowHandler().create_rows( (row_a1,) = (
user, table_a, [{link_field.db_column: [row_b1.id, row_b2.id]}] RowHandler()
.create_rows(user, table_a, [{link_field.db_column: [row_b1.id, row_b2.id]}])
.created_rows
) )
lookup_field = FieldHandler().create_field( lookup_field = FieldHandler().create_field(
user, user,
@ -2038,17 +2060,21 @@ def test_formulas_with_lookup_to_uuid_primary_field(data_fixture):
table=linked_table, table=linked_table,
) )
linked_row_1, linked_row_2 = RowHandler().create_rows( linked_row_1, linked_row_2 = (
user, RowHandler()
linked_table, .create_rows(
[ user,
{ linked_table,
linked_table_text_field.db_column: "Linked row #1", [
}, {
{ linked_table_text_field.db_column: "Linked row #1",
linked_table_text_field.db_column: "Linked row #2", },
}, {
], linked_table_text_field.db_column: "Linked row #2",
},
],
)
.created_rows
) )
link_field = FieldHandler().create_field( link_field = FieldHandler().create_field(

View file

@ -258,23 +258,25 @@ def test_can_undo_importing_rows(data_fixture):
action_type_registry.get_by_type(ImportRowsActionType).do( action_type_registry.get_by_type(ImportRowsActionType).do(
user, user,
table, table,
data=[ data={
[ "data": [
"Tesla", [
240, "Tesla",
59999.99, 240,
], 59999.99,
[ ],
"Giulietta", [
210, "Giulietta",
34999.99, 210,
], 34999.99,
[ ],
"Panda", [
160, "Panda",
8999.99, 160,
], 8999.99,
], ],
]
},
) )
assert model.objects.all().count() == 3 assert model.objects.all().count() == 3
@ -314,23 +316,25 @@ def test_can_undo_redo_importing_rows(row_send_mock, table_send_mock, data_fixtu
action_type_registry.get_by_type(ImportRowsActionType).do( action_type_registry.get_by_type(ImportRowsActionType).do(
user, user,
table, table,
data=[ data={
[ "data": [
"Tesla", [
240, "Tesla",
59999.99, 240,
], 59999.99,
[ ],
"Giulietta", [
210, "Giulietta",
34999.99, 210,
], 34999.99,
[ ],
"Panda", [
160, "Panda",
8999.99, 160,
], 8999.99,
], ],
]
},
) )
table_send_mock.assert_called_once() table_send_mock.assert_called_once()
@ -363,14 +367,16 @@ def test_can_undo_redo_importing_rows(row_send_mock, table_send_mock, data_fixtu
action_type_registry.get_by_type(ImportRowsActionType).do( action_type_registry.get_by_type(ImportRowsActionType).do(
user, user,
table, table,
data=[ data={
[ "data": [
"Tesla", [
240, "Tesla",
59999.99, 240,
], 59999.99,
] ],
* 51, ]
* 51
},
) )
row_send_mock.reset_mock() row_send_mock.reset_mock()
@ -506,26 +512,30 @@ def test_can_undo_deleting_rows(data_fixture):
) )
model = table.get_model() model = table.get_model()
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{name_field.id}": "Tesla", rows_values=[
f"field_{speed_field.id}": 240, {
f"field_{price_field.id}": 59999.99, f"field_{name_field.id}": "Tesla",
}, f"field_{speed_field.id}": 240,
{ f"field_{price_field.id}": 59999.99,
f"field_{name_field.id}": "Giulietta", },
f"field_{speed_field.id}": 210, {
f"field_{price_field.id}": 34999.99, f"field_{name_field.id}": "Giulietta",
}, f"field_{speed_field.id}": 210,
{ f"field_{price_field.id}": 34999.99,
f"field_{name_field.id}": "Panda", },
f"field_{speed_field.id}": 160, {
f"field_{price_field.id}": 8999.99, f"field_{name_field.id}": "Panda",
}, f"field_{speed_field.id}": 160,
], f"field_{price_field.id}": 8999.99,
},
],
)
.created_rows
) )
assert model.objects.all().count() == 3 assert model.objects.all().count() == 3
@ -565,26 +575,30 @@ def test_can_undo_redo_deleting_rows(data_fixture):
) )
model = table.get_model() model = table.get_model()
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{name_field.id}": "Tesla", rows_values=[
f"field_{speed_field.id}": 240, {
f"field_{price_field.id}": 59999.99, f"field_{name_field.id}": "Tesla",
}, f"field_{speed_field.id}": 240,
{ f"field_{price_field.id}": 59999.99,
f"field_{name_field.id}": "Giulietta", },
f"field_{speed_field.id}": 210, {
f"field_{price_field.id}": 34999.99, f"field_{name_field.id}": "Giulietta",
}, f"field_{speed_field.id}": 210,
{ f"field_{price_field.id}": 34999.99,
f"field_{name_field.id}": "Panda", },
f"field_{speed_field.id}": 160, {
f"field_{price_field.id}": 8999.99, f"field_{name_field.id}": "Panda",
}, f"field_{speed_field.id}": 160,
], f"field_{price_field.id}": 8999.99,
},
],
)
.created_rows
) )
assert model.objects.all().count() == 3 assert model.objects.all().count() == 3

View file

@ -339,7 +339,7 @@ def test_get_adjacent_row(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
next_row = handler.get_adjacent_row(table_model, rows[1].id) next_row = handler.get_adjacent_row(table_model, rows[1].id)
previous_row = handler.get_adjacent_row(table_model, rows[1].id, previous=True) previous_row = handler.get_adjacent_row(table_model, rows[1].id, previous=True)
@ -373,7 +373,7 @@ def test_get_adjacent_row_with_custom_filters(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
base_queryset = ( base_queryset = (
table.get_model() table.get_model()
@ -421,7 +421,7 @@ def test_get_adjacent_row_with_view_sort(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
next_row = handler.get_adjacent_row(table_model, row_2.id, view=view) next_row = handler.get_adjacent_row(table_model, row_2.id, view=view)
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
@ -460,7 +460,7 @@ def test_get_adjacent_row_with_view_group_by(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
next_row = handler.get_adjacent_row(table_model, row_2.id, view=view) next_row = handler.get_adjacent_row(table_model, row_2.id, view=view)
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
@ -497,7 +497,7 @@ def test_get_adjacent_row_with_search(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
search = "a" search = "a"
next_row = handler.get_adjacent_row(table_model, row_2.id, view=view, search=search) next_row = handler.get_adjacent_row(table_model, row_2.id, view=view, search=search)
@ -551,7 +551,7 @@ def test_get_adjacent_row_with_view_group_by_and_view_sort(data_fixture):
}, },
], ],
model=table_model, model=table_model,
) ).created_rows
next_row = handler.get_adjacent_row(table_model, row_2.id, view=view) next_row = handler.get_adjacent_row(table_model, row_2.id, view=view)
previous_row = handler.get_adjacent_row( previous_row = handler.get_adjacent_row(
@ -582,7 +582,7 @@ def test_get_adjacent_row_performance_many_rows(data_fixture):
table_model = table.get_model() table_model = table.get_model()
rows = handler.create_rows( rows = handler.create_rows(
user=user, table=table, rows_values=row_values, model=table_model user=user, table=table, rows_values=row_values, model=table_model
) ).created_rows
profiler = Profiler() profiler = Profiler()
profiler.start() profiler.start()
@ -621,7 +621,7 @@ def test_get_adjacent_row_performance_many_fields(data_fixture):
table_model = table.get_model() table_model = table.get_model()
rows = handler.create_rows( rows = handler.create_rows(
user=user, table=table, rows_values=row_values, model=table_model user=user, table=table, rows_values=row_values, model=table_model
) ).created_rows
profiler = Profiler() profiler = Profiler()
profiler.start() profiler.start()
@ -747,7 +747,7 @@ def test_update_rows_return_original_values_and_fields_metadata(data_fixture):
user=user, user=user,
table=table, table=table,
rows_values=[{}, {}], rows_values=[{}, {}],
) ).created_rows
result = handler.update_rows( result = handler.update_rows(
user=user, user=user,
@ -842,7 +842,9 @@ def test_create_rows_created_on_and_last_modified(data_fixture):
handler = RowHandler() handler = RowHandler()
with freeze_time("2020-01-01 12:00"): with freeze_time("2020-01-01 12:00"):
rows = handler.create_rows(user=user, table=table, rows_values=[{}]) rows = handler.create_rows(
user=user, table=table, rows_values=[{}]
).created_rows
row = rows[0] row = rows[0]
assert row.created_on == datetime(2020, 1, 1, 12, 0, tzinfo=timezone.utc) assert row.created_on == datetime(2020, 1, 1, 12, 0, tzinfo=timezone.utc)
assert row.updated_on == datetime(2020, 1, 1, 12, 0, tzinfo=timezone.utc) assert row.updated_on == datetime(2020, 1, 1, 12, 0, tzinfo=timezone.utc)
@ -862,7 +864,7 @@ def test_create_rows_last_modified_by(data_fixture):
{f"field_{name_field.id}": "Test"}, {f"field_{name_field.id}": "Test"},
{f"field_{name_field.id}": "Test 2"}, {f"field_{name_field.id}": "Test 2"},
], ],
) ).created_rows
assert rows[0].last_modified_by == user assert rows[0].last_modified_by == user
assert rows[1].last_modified_by == user assert rows[1].last_modified_by == user
@ -1562,15 +1564,19 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_created(
# An UPDATE query to set the formula field value + 1 query due # An UPDATE query to set the formula field value + 1 query due
# to FormulaFieldType.after_rows_created # to FormulaFieldType.after_rows_created
with django_assert_num_queries(len(captured.captured_queries) + 2): with django_assert_num_queries(len(captured.captured_queries) + 2):
(r,) = RowHandler().force_create_rows( (r,) = (
user=user, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=user,
{ table=table,
f"field_{name_field.id}": "Giulietta", rows_values=[
} {
], f"field_{name_field.id}": "Giulietta",
model=model, }
],
model=model,
)
.created_rows
) )
assert getattr(r, f"field_{f1.id}") == "Giulietta-a" assert getattr(r, f"field_{f1.id}") == "Giulietta-a"
@ -1584,15 +1590,19 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_created(
model = table.get_model() model = table.get_model()
with django_assert_num_queries(len(captured.captured_queries) + 2): with django_assert_num_queries(len(captured.captured_queries) + 2):
(r,) = RowHandler().force_create_rows( (r,) = (
user=user, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=user,
{ table=table,
f"field_{name_field.id}": "Stelvio", rows_values=[
} {
], f"field_{name_field.id}": "Stelvio",
model=model, }
],
model=model,
)
.created_rows
) )
assert getattr(r, f"field_{f1.id}") == "Stelvio-a" assert getattr(r, f"field_{f1.id}") == "Stelvio-a"
assert getattr(r, f"field_{f2.id}") == "Stelvio-b" assert getattr(r, f"field_{f2.id}") == "Stelvio-b"
@ -1609,15 +1619,19 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_created(
# Now a second UPDATE query is needed, so that F3 can use the result # Now a second UPDATE query is needed, so that F3 can use the result
# of F1 to correctly calculate its value # of F1 to correctly calculate its value
with django_assert_num_queries(len(captured.captured_queries) + 3): with django_assert_num_queries(len(captured.captured_queries) + 3):
(r,) = RowHandler().force_create_rows( (r,) = (
user=user, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=user,
{ table=table,
f"field_{name_field.id}": "Tonale", rows_values=[
} {
], f"field_{name_field.id}": "Tonale",
model=model, }
],
model=model,
)
.created_rows
) )
assert getattr(r, f"field_{f1.id}") == "Tonale-a" assert getattr(r, f"field_{f1.id}") == "Tonale-a"
assert getattr(r, f"field_{f2.id}") == "Tonale-b" assert getattr(r, f"field_{f2.id}") == "Tonale-b"
@ -1642,7 +1656,11 @@ def test_formula_referencing_fields_add_additional_queries_on_rows_updated(
# in the FieldDependencyHandler: # in the FieldDependencyHandler:
# link_row_field_content_type = ContentType.objects.get_for_model(LinkRowField) # link_row_field_content_type = ContentType.objects.get_for_model(LinkRowField)
# so let's create a row first to avoid counting that query # so let's create a row first to avoid counting that query
(r,) = RowHandler().force_create_rows(user=user, table=table, rows_values=[{}]) (r,) = (
RowHandler()
.force_create_rows(user=user, table=table, rows_values=[{}])
.created_rows
)
with CaptureQueriesContext(connection) as captured: with CaptureQueriesContext(connection) as captured:
RowHandler().force_update_rows( RowHandler().force_update_rows(
@ -1740,18 +1758,26 @@ def test_can_move_rows_and_formulas_are_updated_correctly(data_fixture):
table_a, table_b, link_a_b = data_fixture.create_two_linked_tables(user=user) table_a, table_b, link_a_b = data_fixture.create_two_linked_tables(user=user)
prim_b = data_fixture.create_text_field(table=table_b, primary=True, name="name") prim_b = data_fixture.create_text_field(table=table_b, primary=True, name="name")
row_b1, row_b2 = RowHandler().create_rows( row_b1, row_b2 = (
user, table_b, [{prim_b.db_column: "b1"}, {prim_b.db_column: "b2"}] RowHandler()
.create_rows(
user, table_b, [{prim_b.db_column: "b1"}, {prim_b.db_column: "b2"}]
)
.created_rows
) )
lookup_a = data_fixture.create_formula_field( lookup_a = data_fixture.create_formula_field(
table=table_a, formula="join(lookup('link', 'name'), '')" table=table_a, formula="join(lookup('link', 'name'), '')"
) )
row_a1, row_a2 = RowHandler().create_rows( row_a1, row_a2 = (
user, RowHandler()
table_a, .create_rows(
[{link_a_b.db_column: [row_b1.id]}, {link_a_b.db_column: [row_b2.id]}], user,
table_a,
[{link_a_b.db_column: [row_b1.id]}, {link_a_b.db_column: [row_b2.id]}],
)
.created_rows
) )
assert getattr(row_a1, lookup_a.db_column) == "b1" assert getattr(row_a1, lookup_a.db_column) == "b1"

View file

@ -482,39 +482,43 @@ def test_order_by_fields_string_queryset(data_fixture):
field=multiple_select_field, value="D", color="red" field=multiple_select_field, value="D", color="red"
) )
row_1, row_2, row_3, row_4 = RowHandler().force_create_rows( row_1, row_2, row_3, row_4 = (
user=None, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=None,
{ table=table,
name_field.db_column: "BMW", rows_values=[
color_field.db_column: "Blue", {
price_field.db_column: 10000, name_field.db_column: "BMW",
description_field.db_column: "Sports car.", color_field.db_column: "Blue",
single_select_field.db_column: option_a.id, price_field.db_column: 10000,
multiple_select_field.db_column: [option_c.id], description_field.db_column: "Sports car.",
}, single_select_field.db_column: option_a.id,
{ multiple_select_field.db_column: [option_c.id],
name_field.db_column: "Audi", },
color_field.db_column: "Orange", {
price_field.db_column: 20000, name_field.db_column: "Audi",
description_field.db_column: "This is the most expensive car we have.", color_field.db_column: "Orange",
single_select_field.db_column: option_b.id, price_field.db_column: 20000,
multiple_select_field.db_column: [option_d.id], description_field.db_column: "This is the most expensive car we have.",
}, single_select_field.db_column: option_b.id,
{ multiple_select_field.db_column: [option_d.id],
name_field.db_column: "Volkswagen", },
color_field.db_column: "White", {
price_field.db_column: 5000, name_field.db_column: "Volkswagen",
description_field.db_column: "A very old car.", color_field.db_column: "White",
}, price_field.db_column: 5000,
{ description_field.db_column: "A very old car.",
name_field.db_column: "Volkswagen", },
color_field.db_column: "Green", {
price_field.db_column: 4000, name_field.db_column: "Volkswagen",
description_field.db_column: "Strange color.", color_field.db_column: "Green",
}, price_field.db_column: 4000,
], description_field.db_column: "Strange color.",
},
],
)
.created_rows
) )
model = table.get_model() model = table.get_model()
@ -704,19 +708,23 @@ def test_order_by_fields_string_queryset_with_type(data_fixture):
field=single_select_field, value="B", color="red", order=1 field=single_select_field, value="B", color="red", order=1
) )
row_1, row_2 = RowHandler().force_create_rows( row_1, row_2 = (
user=None, RowHandler()
table=table, .force_create_rows(
rows_values=[ user=None,
{ table=table,
name_field.db_column: "BMW", rows_values=[
single_select_field.db_column: option_a.id, {
}, name_field.db_column: "BMW",
{ single_select_field.db_column: option_a.id,
name_field.db_column: "Audi", },
single_select_field.db_column: option_b.id, {
}, name_field.db_column: "Audi",
], single_select_field.db_column: option_b.id,
},
],
)
.created_rows
) )
model = table.get_model() model = table.get_model()

View file

@ -105,7 +105,7 @@ if settings.CACHALOT_ENABLED:
{f"field_{field.id}": [select_options[0].id, select_options[1].value]}, {f"field_{field.id}": [select_options[0].id, select_options[1].value]},
{f"field_{field.id}": [select_options[2].value, select_options[0].id]}, {f"field_{field.id}": [select_options[2].value, select_options[0].id]},
], ],
) ).created_rows
url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id}) url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id})
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"}) response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})

View file

@ -43,17 +43,27 @@ def test_import_export_database(data_fixture):
data_fixture.create_view_sort(view=view, field=text_field) data_fixture.create_view_sort(view=view, field=text_field)
with freeze_time("2021-01-01 12:30"): with freeze_time("2021-01-01 12:30"):
row, _ = RowHandler().force_create_rows( row = (
user, RowHandler()
table, .force_create_rows(
[{f"field_{text_field.id}": "Test"}, {f"field_{text_field.id}": "Test 2"}], user,
table,
[
{f"field_{text_field.id}": "Test"},
{f"field_{text_field.id}": "Test 2"},
],
)
.created_rows[0]
) )
with freeze_time("2021-01-02 13:30"): with freeze_time("2021-01-02 13:30"):
res = RowHandler().force_update_rows( row = (
user, table, [{"id": row.id, f"field_{text_field.id}": "Test"}] RowHandler()
.force_update_rows(
user, table, [{"id": row.id, f"field_{text_field.id}": "Test"}]
)
.updated_rows[0]
) )
row = res.updated_rows[0]
database_type = application_type_registry.get("database") database_type = application_type_registry.get("database")
config = ImportExportConfig(include_permission_data=True) config = ImportExportConfig(include_permission_data=True)

View file

@ -92,7 +92,7 @@ def boolean_lookup_filter_proc(
linked_rows = test_setup.row_handler.create_rows( linked_rows = test_setup.row_handler.create_rows(
user=test_setup.user, table=test_setup.other_table, rows_values=dict_rows user=test_setup.user, table=test_setup.other_table, rows_values=dict_rows
) ).created_rows
rows = [ rows = [
# mixed # mixed
{ {
@ -126,7 +126,7 @@ def boolean_lookup_filter_proc(
] ]
r_mixed, r_false, r_true, r_none = test_setup.row_handler.create_rows( r_mixed, r_false, r_true, r_none = test_setup.row_handler.create_rows(
user=test_setup.user, table=test_setup.table, rows_values=rows user=test_setup.user, table=test_setup.table, rows_values=rows
) ).created_rows
rows = [r_mixed, r_false, r_true, r_none] rows = [r_mixed, r_false, r_true, r_none]
selected = [rows[idx] for idx in expected_rows] selected = [rows[idx] for idx in expected_rows]
@ -2423,7 +2423,7 @@ def setup_multiple_select_rows(data_fixture):
{f"field_{test_setup.target_field.id}": row_B_value}, {f"field_{test_setup.target_field.id}": row_B_value},
{f"field_{test_setup.target_field.id}": row_empty_value}, {f"field_{test_setup.target_field.id}": row_empty_value},
], ],
) ).created_rows
row_1 = test_setup.row_handler.create_row( row_1 = test_setup.row_handler.create_row(
user=test_setup.user, user=test_setup.user,
table=test_setup.table, table=test_setup.table,
@ -2629,7 +2629,7 @@ def setup_date_rows(data_fixture, field_factory):
{}, {},
], ],
model=test_setup.other_table_model, model=test_setup.other_table_model,
) ).created_rows
row_1, row_2, empty_row = test_setup.row_handler.force_create_rows( row_1, row_2, empty_row = test_setup.row_handler.force_create_rows(
user, user,
test_setup.table, test_setup.table,
@ -2639,7 +2639,7 @@ def setup_date_rows(data_fixture, field_factory):
{test_setup.link_row_field.db_column: [other_row_3.id]}, {test_setup.link_row_field.db_column: [other_row_3.id]},
], ],
model=test_setup.model, model=test_setup.model,
) ).created_rows
return test_setup, [row_1, row_2, empty_row] return test_setup, [row_1, row_2, empty_row]
@ -2745,16 +2745,20 @@ def table_view_fields_rows(data_fixture):
datetime_field = data_fixture.create_date_field( datetime_field = data_fixture.create_date_field(
table=orig_table, date_include_time=True table=orig_table, date_include_time=True
) )
orig_rows = RowHandler().force_create_rows( orig_rows = (
user, RowHandler()
orig_table, .force_create_rows(
[ user,
{ orig_table,
date_field.db_column: date_value, [
datetime_field.db_column: date_value, {
} date_field.db_column: date_value,
for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES datetime_field.db_column: date_value,
], }
for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES
],
)
.created_rows
) )
table = data_fixture.create_database_table(database=orig_table.database) table = data_fixture.create_database_table(database=orig_table.database)
@ -2777,10 +2781,14 @@ def table_view_fields_rows(data_fixture):
through_field_name=link_field.name, through_field_name=link_field.name,
target_field_name=datetime_field.name, target_field_name=datetime_field.name,
) )
rows = RowHandler().force_create_rows( rows = (
user, RowHandler()
table, .force_create_rows(
[{link_field.db_column: [r.id]} for r in orig_rows], user,
table,
[{link_field.db_column: [r.id]} for r in orig_rows],
)
.created_rows
) )
grid_view = data_fixture.create_grid_view(table=table) grid_view = data_fixture.create_grid_view(table=table)

View file

@ -89,33 +89,37 @@ def test_equal_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
row, row_2, row_3 = RowHandler().create_rows( row, row_2, row_3 = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{text_field.id}": "Test", rows_values=[
f"field_{long_text_field.id}": "Long", {
f"field_{integer_field.id}": 10, f"field_{text_field.id}": "Test",
f"field_{decimal_field.id}": 20.20, f"field_{long_text_field.id}": "Long",
f"field_{boolean_field.id}": True, f"field_{integer_field.id}": 10,
}, f"field_{decimal_field.id}": 20.20,
{ f"field_{boolean_field.id}": True,
f"field_{text_field.id}": "", },
f"field_{long_text_field.id}": "", {
f"field_{integer_field.id}": None, f"field_{text_field.id}": "",
f"field_{decimal_field.id}": None, f"field_{long_text_field.id}": "",
f"field_{boolean_field.id}": False, f"field_{integer_field.id}": None,
}, f"field_{decimal_field.id}": None,
{ f"field_{boolean_field.id}": False,
f"field_{text_field.id}": "NOT", },
f"field_{long_text_field.id}": "NOT2", {
f"field_{integer_field.id}": 99, f"field_{text_field.id}": "NOT",
f"field_{decimal_field.id}": 99.99, f"field_{long_text_field.id}": "NOT2",
f"field_{boolean_field.id}": False, f"field_{integer_field.id}": 99,
}, f"field_{decimal_field.id}": 99.99,
], f"field_{boolean_field.id}": False,
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -225,33 +229,37 @@ def test_not_equal_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
row, row_2, row_3 = RowHandler().create_rows( row, row_2, row_3 = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{text_field.id}": "Test", rows_values=[
f"field_{long_text_field.id}": "Long", {
f"field_{integer_field.id}": 10, f"field_{text_field.id}": "Test",
f"field_{decimal_field.id}": 20.20, f"field_{long_text_field.id}": "Long",
f"field_{boolean_field.id}": True, f"field_{integer_field.id}": 10,
}, f"field_{decimal_field.id}": 20.20,
{ f"field_{boolean_field.id}": True,
f"field_{text_field.id}": "", },
f"field_{long_text_field.id}": "", {
f"field_{integer_field.id}": None, f"field_{text_field.id}": "",
f"field_{decimal_field.id}": None, f"field_{long_text_field.id}": "",
f"field_{boolean_field.id}": False, f"field_{integer_field.id}": None,
}, f"field_{decimal_field.id}": None,
{ f"field_{boolean_field.id}": False,
f"field_{text_field.id}": "NOT", },
f"field_{long_text_field.id}": "NOT2", {
f"field_{integer_field.id}": 99, f"field_{text_field.id}": "NOT",
f"field_{decimal_field.id}": 99.99, f"field_{long_text_field.id}": "NOT2",
f"field_{boolean_field.id}": False, f"field_{integer_field.id}": 99,
}, f"field_{decimal_field.id}": 99.99,
], f"field_{boolean_field.id}": False,
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -394,36 +402,40 @@ def test_contains_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
row, _, row_3 = RowHandler().create_rows( row, _, row_3 = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{text_field.id}": "My name is John Doe.", rows_values=[
f"field_{long_text_field.id}": "Long text that is not empty.", {
f"field_{date_field.id}": "2020-02-01 01:23", f"field_{text_field.id}": "My name is John Doe.",
f"field_{number_field.id}": "98989898", f"field_{long_text_field.id}": "Long text that is not empty.",
f"field_{single_select_field.id}": option_a, f"field_{date_field.id}": "2020-02-01 01:23",
f"field_{multiple_select_field.id}": [option_c.id, option_d.id], f"field_{number_field.id}": "98989898",
}, f"field_{single_select_field.id}": option_a,
{ f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
f"field_{text_field.id}": "", },
f"field_{long_text_field.id}": "", {
f"field_{date_field.id}": None, f"field_{text_field.id}": "",
f"field_{number_field.id}": None, f"field_{long_text_field.id}": "",
f"field_{single_select_field.id}": None, f"field_{date_field.id}": None,
}, f"field_{number_field.id}": None,
{ f"field_{single_select_field.id}": None,
f"field_{text_field.id}": "This is a test field.", },
f"field_{long_text_field.id}": "This text is a bit longer, but it also " {
"contains.\n A multiline approach.", f"field_{text_field.id}": "This is a test field.",
f"field_{date_field.id}": "0001-01-02 00:12", f"field_{long_text_field.id}": "This text is a bit longer, but it also "
f"field_{number_field.id}": "10000", "contains.\n A multiline approach.",
f"field_{single_select_field.id}": option_b, f"field_{date_field.id}": "0001-01-02 00:12",
f"field_{multiple_select_field.id}": [option_c.id], f"field_{number_field.id}": "10000",
}, f"field_{single_select_field.id}": option_b,
], f"field_{multiple_select_field.id}": [option_c.id],
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -603,36 +615,40 @@ def test_contains_not_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
row, row_2, row_3 = RowHandler().create_rows( row, row_2, row_3 = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{text_field.id}": "My name is John Doe.", rows_values=[
f"field_{long_text_field.id}": "Long text that is not empty.", {
f"field_{date_field.id}": "2020-02-01 01:23", f"field_{text_field.id}": "My name is John Doe.",
f"field_{number_field.id}": "98989898", f"field_{long_text_field.id}": "Long text that is not empty.",
f"field_{single_select_field.id}": option_a, f"field_{date_field.id}": "2020-02-01 01:23",
f"field_{multiple_select_field.id}": [option_c.id, option_d.id], f"field_{number_field.id}": "98989898",
}, f"field_{single_select_field.id}": option_a,
{ f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
f"field_{text_field.id}": "", },
f"field_{long_text_field.id}": "", {
f"field_{date_field.id}": None, f"field_{text_field.id}": "",
f"field_{number_field.id}": None, f"field_{long_text_field.id}": "",
f"field_{single_select_field.id}": None, f"field_{date_field.id}": None,
}, f"field_{number_field.id}": None,
{ f"field_{single_select_field.id}": None,
f"field_{text_field.id}": "This is a test field.", },
f"field_{long_text_field.id}": "This text is a bit longer, but it also " {
"contains.\n A multiline approach.", f"field_{text_field.id}": "This is a test field.",
f"field_{date_field.id}": "0001-01-02 00:12", f"field_{long_text_field.id}": "This text is a bit longer, but it also "
f"field_{number_field.id}": "10000", "contains.\n A multiline approach.",
f"field_{single_select_field.id}": option_b, f"field_{date_field.id}": "0001-01-02 00:12",
f"field_{multiple_select_field.id}": [option_d.id], f"field_{number_field.id}": "10000",
}, f"field_{single_select_field.id}": option_b,
], f"field_{multiple_select_field.id}": [option_d.id],
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -818,36 +834,40 @@ def test_contains_word_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
row, row_2, row_3 = RowHandler().create_rows( row, row_2, row_3 = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{text_field.id}": "My name is John Doe.", rows_values=[
f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.", {
f"field_{url_field.id}": "https://www.example.com", f"field_{text_field.id}": "My name is John Doe.",
f"field_{email_field.id}": "test.user@example.com", f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.",
f"field_{single_select_field.id}": option_a, f"field_{url_field.id}": "https://www.example.com",
f"field_{multiple_select_field.id}": [option_c.id, option_d.id], f"field_{email_field.id}": "test.user@example.com",
}, f"field_{single_select_field.id}": option_a,
{ f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
f"field_{text_field.id}": "", },
f"field_{long_text_field.id}": "", {
f"field_{url_field.id}": "", f"field_{text_field.id}": "",
f"field_{email_field.id}": "", f"field_{long_text_field.id}": "",
f"field_{single_select_field.id}": None, f"field_{url_field.id}": "",
}, f"field_{email_field.id}": "",
{ f"field_{single_select_field.id}": None,
f"field_{text_field.id}": "This is a test field with the word Johny.", },
f"field_{long_text_field.id}": "This text is a bit longer, but it also " {
"contains.\n A multiline approach.", f"field_{text_field.id}": "This is a test field with the word Johny.",
f"field_{url_field.id}": "https://www.examplewebsite.com", f"field_{long_text_field.id}": "This text is a bit longer, but it also "
f"field_{email_field.id}": "test.user@examplewebsite.com", "contains.\n A multiline approach.",
f"field_{single_select_field.id}": option_b, f"field_{url_field.id}": "https://www.examplewebsite.com",
f"field_{multiple_select_field.id}": [option_c.id], f"field_{email_field.id}": "test.user@examplewebsite.com",
}, f"field_{single_select_field.id}": option_b,
], f"field_{multiple_select_field.id}": [option_c.id],
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -1011,36 +1031,40 @@ def test_doesnt_contain_word_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
row, row_2, row_3 = RowHandler().create_rows( row, row_2, row_3 = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{text_field.id}": "My name is John Doe.", rows_values=[
f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.", {
f"field_{url_field.id}": "https://www.example.com", f"field_{text_field.id}": "My name is John Doe.",
f"field_{email_field.id}": "test.user@example.com", f"field_{long_text_field.id}": "Long text that is not empty, but also not multilined.",
f"field_{single_select_field.id}": option_a, f"field_{url_field.id}": "https://www.example.com",
f"field_{multiple_select_field.id}": [option_c.id, option_d.id], f"field_{email_field.id}": "test.user@example.com",
}, f"field_{single_select_field.id}": option_a,
{ f"field_{multiple_select_field.id}": [option_c.id, option_d.id],
f"field_{text_field.id}": "", },
f"field_{long_text_field.id}": "", {
f"field_{url_field.id}": "", f"field_{text_field.id}": "",
f"field_{email_field.id}": "", f"field_{long_text_field.id}": "",
f"field_{single_select_field.id}": None, f"field_{url_field.id}": "",
}, f"field_{email_field.id}": "",
{ f"field_{single_select_field.id}": None,
f"field_{text_field.id}": "This is a test field with the word Johny.", },
f"field_{long_text_field.id}": "This text is a bit longer, but it also " {
"contains.\n A multiline approach.", f"field_{text_field.id}": "This is a test field with the word Johny.",
f"field_{url_field.id}": "https://www.examplewebsite.com", f"field_{long_text_field.id}": "This text is a bit longer, but it also "
f"field_{email_field.id}": "test.user@examplewebsite.com", "contains.\n A multiline approach.",
f"field_{single_select_field.id}": option_b, f"field_{url_field.id}": "https://www.examplewebsite.com",
f"field_{multiple_select_field.id}": [option_c.id], f"field_{email_field.id}": "test.user@examplewebsite.com",
}, f"field_{single_select_field.id}": option_b,
], f"field_{multiple_select_field.id}": [option_c.id],
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -3275,56 +3299,60 @@ def test_empty_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
row, row_2, row_3 = RowHandler().create_rows( row, row_2, row_3 = (
user, RowHandler()
table, .create_rows(
[ user,
{ table,
f"field_{text_field.id}": "", [
f"field_{long_text_field.id}": "", {
f"field_{integer_field.id}": None, f"field_{text_field.id}": "",
f"field_{decimal_field.id}": None, f"field_{long_text_field.id}": "",
f"field_{date_field.id}": None, f"field_{integer_field.id}": None,
f"field_{date_time_field.id}": None, f"field_{decimal_field.id}": None,
f"field_{boolean_field.id}": False, f"field_{date_field.id}": None,
f"field_{file_field.id}": [], f"field_{date_time_field.id}": None,
f"field_{single_select_field.id}_id": None, f"field_{boolean_field.id}": False,
}, f"field_{file_field.id}": [],
{ f"field_{single_select_field.id}_id": None,
f"field_{text_field.id}": "Value", },
f"field_{long_text_field.id}": "Value", {
f"field_{integer_field.id}": 10, f"field_{text_field.id}": "Value",
f"field_{decimal_field.id}": 1022, f"field_{long_text_field.id}": "Value",
f"field_{date_field.id}": date(2020, 6, 17), f"field_{integer_field.id}": 10,
f"field_{date_time_field.id}": datetime( f"field_{decimal_field.id}": 1022,
2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc f"field_{date_field.id}": date(2020, 6, 17),
), f"field_{date_time_field.id}": datetime(
f"field_{boolean_field.id}": True, 2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc
f"field_{file_field.id}": [{"name": file_a.name}], ),
f"field_{single_select_field.id}_id": option_1.id, f"field_{boolean_field.id}": True,
f"field_{link_row_field.id}": [tmp_row.id], f"field_{file_field.id}": [{"name": file_a.name}],
f"field_{multiple_select_field.id}": [option_2.id], f"field_{single_select_field.id}_id": option_1.id,
}, f"field_{link_row_field.id}": [tmp_row.id],
{ f"field_{multiple_select_field.id}": [option_2.id],
f"field_{text_field.id}": "other value", },
f"field_{long_text_field.id}": " ", {
f"field_{integer_field.id}": 0, f"field_{text_field.id}": "other value",
f"field_{decimal_field.id}": 0.00, f"field_{long_text_field.id}": " ",
f"field_{date_field.id}": date(1970, 1, 1), f"field_{integer_field.id}": 0,
f"field_{date_time_field.id}": datetime( f"field_{decimal_field.id}": 0.00,
1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc f"field_{date_field.id}": date(1970, 1, 1),
), f"field_{date_time_field.id}": datetime(
f"field_{boolean_field.id}": True, 1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc
f"field_{file_field.id}": [ ),
{"name": file_a.name}, f"field_{boolean_field.id}": True,
{"name": file_b.name}, f"field_{file_field.id}": [
], {"name": file_a.name},
f"field_{single_select_field.id}_id": option_1.id, {"name": file_b.name},
f"field_{link_row_field.id}": [tmp_row.id], ],
f"field_{multiple_select_field.id}": [option_2.id, option_3.id], f"field_{single_select_field.id}_id": option_1.id,
}, f"field_{link_row_field.id}": [tmp_row.id],
], f"field_{multiple_select_field.id}": [option_2.id, option_3.id],
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -3434,38 +3462,42 @@ def test_not_empty_filter_type(data_fixture):
handler = ViewHandler() handler = ViewHandler()
model = table.get_model() model = table.get_model()
_, row_2 = RowHandler().create_rows( _, row_2 = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{text_field.id}": "", rows_values=[
f"field_{long_text_field.id}": "", {
f"field_{integer_field.id}": None, f"field_{text_field.id}": "",
f"field_{decimal_field.id}": None, f"field_{long_text_field.id}": "",
f"field_{date_field.id}": None, f"field_{integer_field.id}": None,
f"field_{date_time_field.id}": None, f"field_{decimal_field.id}": None,
f"field_{boolean_field.id}": False, f"field_{date_field.id}": None,
f"field_{file_field.id}": [], f"field_{date_time_field.id}": None,
f"field_{single_select_field.id}": None, f"field_{boolean_field.id}": False,
}, f"field_{file_field.id}": [],
{ f"field_{single_select_field.id}": None,
f"field_{text_field.id}": "Value", },
f"field_{long_text_field.id}": "Value", {
f"field_{integer_field.id}": 10, f"field_{text_field.id}": "Value",
f"field_{decimal_field.id}": 1022, f"field_{long_text_field.id}": "Value",
f"field_{date_field.id}": date(2020, 6, 17), f"field_{integer_field.id}": 10,
f"field_{date_time_field.id}": datetime( f"field_{decimal_field.id}": 1022,
2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc f"field_{date_field.id}": date(2020, 6, 17),
), f"field_{date_time_field.id}": datetime(
f"field_{boolean_field.id}": True, 2020, 6, 17, 1, 30, 0, tzinfo=timezone.utc
f"field_{file_field.id}": [{"name": file_a.name}], ),
f"field_{single_select_field.id}_id": option_1.id, f"field_{boolean_field.id}": True,
f"field_{link_row_field.id}": [tmp_row.id], f"field_{file_field.id}": [{"name": file_a.name}],
f"field_{multiple_select_field.id}": [option_2.id, option_3.id], f"field_{single_select_field.id}_id": option_1.id,
}, f"field_{link_row_field.id}": [tmp_row.id],
], f"field_{multiple_select_field.id}": [option_2.id, option_3.id],
model=model, },
],
model=model,
)
.created_rows
) )
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -5729,7 +5761,7 @@ def test_multiple_collaborators_empty_filter_type(data_fixture):
multiple_collaborators_field.db_column: [], multiple_collaborators_field.db_column: [],
}, },
], ],
) ).created_rows
handler = ViewHandler() handler = ViewHandler()
for field in [multiple_collaborators_field, ref_multiple_collaborators_field]: for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
grid_view = data_fixture.create_grid_view(table=table) grid_view = data_fixture.create_grid_view(table=table)
@ -5786,7 +5818,7 @@ def test_multiple_collaborators_not_empty_filter_type(data_fixture):
multiple_collaborators_field.db_column: [], multiple_collaborators_field.db_column: [],
}, },
], ],
) ).created_rows
handler = ViewHandler() handler = ViewHandler()
for field in [multiple_collaborators_field, ref_multiple_collaborators_field]: for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
grid_view = data_fixture.create_grid_view(table=table) grid_view = data_fixture.create_grid_view(table=table)
@ -5852,7 +5884,7 @@ def test_multiple_collaborators_has_filter_type(data_fixture):
], ],
}, },
], ],
) ).created_rows
handler = ViewHandler() handler = ViewHandler()
for field in [multiple_collaborators_field, ref_multiple_collaborators_field]: for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
@ -5980,7 +6012,7 @@ def test_multiple_collaborators_has_not_filter_type(data_fixture):
], ],
}, },
], ],
) ).created_rows
handler = ViewHandler() handler = ViewHandler()
for field in [multiple_collaborators_field, ref_multiple_collaborators_field]: for field in [multiple_collaborators_field, ref_multiple_collaborators_field]:
@ -6668,16 +6700,20 @@ def table_view_fields_rows(data_fixture):
grid_view = data_fixture.create_grid_view(table=table) grid_view = data_fixture.create_grid_view(table=table)
date_field = data_fixture.create_date_field(table=table) date_field = data_fixture.create_date_field(table=table)
datetime_field = data_fixture.create_date_field(table=table, date_include_time=True) datetime_field = data_fixture.create_date_field(table=table, date_include_time=True)
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
[ user,
{ table,
date_field.db_column: date_value, [
datetime_field.db_column: date_value, {
} date_field.db_column: date_value,
for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES datetime_field.db_column: date_value,
], }
for date_value in TEST_MULTI_STEP_DATE_OPERATORS_DATETIMES
],
)
.created_rows
) )
return table, grid_view, date_field, datetime_field, rows return table, grid_view, date_field, datetime_field, rows

View file

@ -4422,14 +4422,18 @@ def test_can_duplicate_views_with_multiple_collaborator_has_filter(data_fixture)
view=grid, field=field, type="multiple_collaborators_has", value=user_1.id view=grid, field=field, type="multiple_collaborators_has", value=user_1.id
) )
rows = RowHandler().force_create_rows( rows = (
user_1, RowHandler()
table, .force_create_rows(
[ user_1,
{field.db_column: []}, table,
{field.db_column: [{"id": user_1.id, "name": user_1.first_name}]}, [
{field.db_column: [{"id": user_2.id, "name": user_2.first_name}]}, {field.db_column: []},
], {field.db_column: [{"id": user_1.id, "name": user_1.first_name}]},
{field.db_column: [{"id": user_2.id, "name": user_2.first_name}]},
],
)
.created_rows
) )
results = ViewHandler().get_queryset(grid) results = ViewHandler().get_queryset(grid)

View file

@ -156,7 +156,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p: with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows( (new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
) ).created_rows
p.assert_not_called() p.assert_not_called()
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p: with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
@ -169,7 +169,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p: with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows( (new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
) ).created_rows
p.assert_called_once() p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [new_row.id] assert p.call_args[1]["row_ids"] == [new_row.id]
@ -188,7 +188,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p: with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows( (new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
) ).created_rows
assert p.call_count == 2 assert p.call_count == 2
assert p.call_args_list[0][1]["view"].id == view_a.id assert p.call_args_list[0][1]["view"].id == view_a.id
assert p.call_args_list[0][1]["row_ids"] == [new_row.id] assert p.call_args_list[0][1]["row_ids"] == [new_row.id]
@ -209,7 +209,7 @@ def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p: with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows( (new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
) ).created_rows
p.assert_not_called() p.assert_not_called()
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p: with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
@ -498,10 +498,10 @@ def test_rows_enter_and_exit_view_when_data_changes_in_looked_up_tables(
model_b = table_b.get_model() model_b = table_b.get_model()
(row_b1,) = row_handler.force_create_rows( (row_b1,) = row_handler.force_create_rows(
user, table_b, [{text_field_b.db_column: ""}], model=model_b user, table_b, [{text_field_b.db_column: ""}], model=model_b
) ).created_rows
_, row_a2 = row_handler.force_create_rows( _, row_a2 = row_handler.force_create_rows(
user, table_a, [{}, {link_a_to_b.db_column: [row_b1.id]}], model=model_a user, table_a, [{}, {link_a_to_b.db_column: [row_b1.id]}], model=model_a
) ).created_rows
view_a = data_fixture.create_grid_view(table=table_a) view_a = data_fixture.create_grid_view(table=table_a)
view_filter = data_fixture.create_view_filter( view_filter = data_fixture.create_view_filter(
@ -519,7 +519,7 @@ def test_rows_enter_and_exit_view_when_data_changes_in_looked_up_tables(
(row_a3,) = row_handler.force_create_rows( (row_a3,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b1.id]}], model=model_a user, table_a, [{link_a_to_b.db_column: [row_b1.id]}], model=model_a
) ).created_rows
assert p.call_count == 2 assert p.call_count == 2
assert p.call_args_list[1][1]["view"].id == view_a.id assert p.call_args_list[1][1]["view"].id == view_a.id

View file

@ -203,10 +203,14 @@ def test_batch_rows_created_public_views_receive_restricted_row_created_ws_event
{f"field_{visible_field.id}": "Visible", f"field_{hidden_field.id}": "Hidden"}, {f"field_{visible_field.id}": "Visible", f"field_{hidden_field.id}": "Hidden"},
] ]
rows = RowHandler().create_rows( rows = (
user=user, RowHandler()
table=table, .create_rows(
rows_values=rows_to_create, user=user,
table=table,
rows_values=rows_to_create,
)
.created_rows
) )
assert mock_broadcast_to_channel_group.delay.mock_calls == ( assert mock_broadcast_to_channel_group.delay.mock_calls == (
@ -316,10 +320,14 @@ def test_batch_rows_created_public_views_receive_row_created_when_filters_match(
{f"field_{visible_field.id}": "Visible", f"field_{hidden_field.id}": "Hidden"}, {f"field_{visible_field.id}": "Visible", f"field_{hidden_field.id}": "Hidden"},
] ]
rows = RowHandler().create_rows( rows = (
user=user, RowHandler()
table=table, .create_rows(
rows_values=rows_to_create, user=user,
table=table,
rows_values=rows_to_create,
)
.created_rows
) )
assert mock_broadcast_to_channel_group.delay.mock_calls == ( assert mock_broadcast_to_channel_group.delay.mock_calls == (

View file

@ -322,14 +322,18 @@ def test_local_baserow_list_rows_service_dispatch_data_with_view_and_service_fil
], ],
) )
field = table.field_set.get(name="Ingredient") field = table.field_set.get(name="Ingredient")
[row_1, row_2, _] = RowHandler().create_rows( [row_1, row_2, _] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "Cheese"}, table,
{f"field_{field.id}": "Chicken"}, rows_values=[
{f"field_{field.id}": "Milk"}, {f"field_{field.id}": "Cheese"},
], {f"field_{field.id}": "Chicken"},
{f"field_{field.id}": "Milk"},
],
)
.created_rows
) )
view = data_fixture.create_grid_view(user, table=table, owned_by=user) view = data_fixture.create_grid_view(user, table=table, owned_by=user)
@ -385,15 +389,19 @@ def test_local_baserow_list_rows_service_dispatch_data_with_varying_filter_types
) )
ingredient = table.field_set.get(name="Ingredient") ingredient = table.field_set.get(name="Ingredient")
cost = table.field_set.get(name="Cost") cost = table.field_set.get(name="Cost")
[row_1, row_2, row_3, _] = RowHandler().create_rows( [row_1, row_2, row_3, _] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{ingredient.id}": "Duck", f"field_{cost.id}": 50}, table,
{f"field_{ingredient.id}": "Duckling", f"field_{cost.id}": 25}, rows_values=[
{f"field_{ingredient.id}": "Goose", f"field_{cost.id}": 150}, {f"field_{ingredient.id}": "Duck", f"field_{cost.id}": 50},
{f"field_{ingredient.id}": "Beef", f"field_{cost.id}": 250}, {f"field_{ingredient.id}": "Duckling", f"field_{cost.id}": 25},
], {f"field_{ingredient.id}": "Goose", f"field_{cost.id}": 150},
{f"field_{ingredient.id}": "Beef", f"field_{cost.id}": 250},
],
)
.created_rows
) )
view = data_fixture.create_grid_view( view = data_fixture.create_grid_view(
@ -470,14 +478,18 @@ def test_local_baserow_list_rows_service_dispatch_data_with_view_and_service_sor
) )
ingredients = table.field_set.get(name="Ingredient") ingredients = table.field_set.get(name="Ingredient")
cost = table.field_set.get(name="Cost") cost = table.field_set.get(name="Cost")
[row_1, row_2, row_3] = RowHandler().create_rows( [row_1, row_2, row_3] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{ingredients.id}": "Duck", f"field_{cost.id}": 50}, table,
{f"field_{ingredients.id}": "Goose", f"field_{cost.id}": 150}, rows_values=[
{f"field_{ingredients.id}": "Beef", f"field_{cost.id}": 250}, {f"field_{ingredients.id}": "Duck", f"field_{cost.id}": 50},
], {f"field_{ingredients.id}": "Goose", f"field_{cost.id}": 150},
{f"field_{ingredients.id}": "Beef", f"field_{cost.id}": 250},
],
)
.created_rows
) )
view = data_fixture.create_grid_view(user, table=table, owned_by=user) view = data_fixture.create_grid_view(user, table=table, owned_by=user)
service_type = LocalBaserowListRowsUserServiceType() service_type = LocalBaserowListRowsUserServiceType()

View file

@ -44,15 +44,19 @@ def test_local_baserow_table_service_filterable_mixin_get_table_queryset(
table_model = table.get_model() table_model = table.get_model()
service = data_fixture.create_local_baserow_list_rows_service(table=table) service = data_fixture.create_local_baserow_list_rows_service(table=table)
[alessia, alex, alastair, alexandra] = RowHandler().create_rows( [alessia, alex, alastair, alexandra] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "Alessia"}, table,
{f"field_{field.id}": "Alex"}, rows_values=[
{f"field_{field.id}": "Alastair"}, {f"field_{field.id}": "Alessia"},
{f"field_{field.id}": "Alexandra"}, {f"field_{field.id}": "Alex"},
], {f"field_{field.id}": "Alastair"},
{f"field_{field.id}": "Alexandra"},
],
)
.created_rows
) )
dispatch_context = FakeDispatchContext() dispatch_context = FakeDispatchContext()
@ -254,15 +258,19 @@ def test_local_baserow_table_service_sortable_mixin_get_table_queryset(
table_model = table.get_model() table_model = table.get_model()
service = data_fixture.create_local_baserow_list_rows_service(table=table) service = data_fixture.create_local_baserow_list_rows_service(table=table)
[aardvark, badger, crow, dragonfly] = RowHandler().create_rows( [aardvark, badger, crow, dragonfly] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "Aardvark"}, table,
{f"field_{field.id}": "Badger"}, rows_values=[
{f"field_{field.id}": "Crow"}, {f"field_{field.id}": "Aardvark"},
{f"field_{field.id}": "Dragonfly"}, {f"field_{field.id}": "Badger"},
], {f"field_{field.id}": "Crow"},
{f"field_{field.id}": "Dragonfly"},
],
)
.created_rows
) )
dispatch_context = FakeDispatchContext() dispatch_context = FakeDispatchContext()
@ -357,15 +365,19 @@ def test_local_baserow_table_service_searchable_mixin_get_table_queryset(
table = data_fixture.create_database_table(user=user) table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(name="Names", table=table) field = data_fixture.create_text_field(name="Names", table=table)
service = data_fixture.create_local_baserow_list_rows_service(table=table) service = data_fixture.create_local_baserow_list_rows_service(table=table)
[alessia, alex, alastair, alexandra] = RowHandler().create_rows( [alessia, alex, alastair, alexandra] = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{field.id}": "Alessia"}, table,
{f"field_{field.id}": "Alex"}, rows_values=[
{f"field_{field.id}": "Alastair"}, {f"field_{field.id}": "Alessia"},
{f"field_{field.id}": "Alexandra"}, {f"field_{field.id}": "Alex"},
], {f"field_{field.id}": "Alastair"},
{f"field_{field.id}": "Alexandra"},
],
)
.created_rows
) )
table_model = table.get_model() table_model = table.get_model()

View file

@ -0,0 +1,8 @@
{
"type": "feature",
"message": "Introduce row update functionality during table import",
"domain": "database",
"issue_number": 2213,
"bullet_points": [],
"created_at": "2025-03-13"
}

View file

@ -2778,35 +2778,39 @@ def test_grouped_aggregate_rows_service_dispatch_max_buckets_sort_on_primary_fie
direction="ASC", direction="ASC",
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{ table,
f"field_{field.id}": 40, rows_values=[
f"field_{field_2.id}": "Z", {
}, f"field_{field.id}": 40,
{ f"field_{field_2.id}": "Z",
f"field_{field.id}": 20, },
f"field_{field_2.id}": "K", {
}, f"field_{field.id}": 20,
{ f"field_{field_2.id}": "K",
f"field_{field.id}": 30, },
f"field_{field_2.id}": "L", {
}, f"field_{field.id}": 30,
{ f"field_{field_2.id}": "L",
f"field_{field.id}": 10, },
f"field_{field_2.id}": "A", {
}, f"field_{field.id}": 10,
{ f"field_{field_2.id}": "A",
f"field_{field.id}": 60, },
f"field_{field_2.id}": "H", {
}, f"field_{field.id}": 60,
{ f"field_{field_2.id}": "H",
f"field_{field.id}": 50, },
f"field_{field_2.id}": "M", {
}, f"field_{field.id}": 50,
], f"field_{field_2.id}": "M",
},
],
)
.created_rows
) )
dispatch_context = FakeDispatchContext() dispatch_context = FakeDispatchContext()

View file

@ -392,7 +392,7 @@ def test_rows_enter_view_event_type_paginate_data(
} }
with transaction.atomic(): with transaction.atomic():
webhook = WebhookHandler().create_table_webhook( WebhookHandler().create_table_webhook(
user=user, user=user,
table=table, table=table,
url="http://localhost/", url="http://localhost/",
@ -403,7 +403,7 @@ def test_rows_enter_view_event_type_paginate_data(
use_user_field_names=True, use_user_field_names=True,
) )
rows = RowHandler().force_create_rows( RowHandler().force_create_rows(
user=user, user=user,
table=table, table=table,
rows_values=[ rows_values=[

View file

@ -1,2 +1,2 @@
@import "@baserow_premium/assets/scss/default"; @import '@baserow_premium/assets/scss/default';
@import "components/all"; @import 'components/all';

View file

@ -33,10 +33,14 @@ def test_generate_ai_field_value_without_license(premium_data_fixture, api_clien
table = premium_data_fixture.create_database_table(name="table", database=database) table = premium_data_fixture.create_database_table(name="table", database=database)
field = premium_data_fixture.create_ai_field(table=table, name="ai") field = premium_data_fixture.create_ai_field(table=table, name="ai")
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{}], user,
table,
rows_values=[{}],
)
.created_rows
) )
response = api_client.post( response = api_client.post(
@ -71,10 +75,14 @@ def test_generate_ai_field_value_view_field_does_not_exist(
table = premium_data_fixture.create_database_table(name="table", database=database) table = premium_data_fixture.create_database_table(name="table", database=database)
field = premium_data_fixture.create_ai_field(table=table, name="ai") field = premium_data_fixture.create_ai_field(table=table, name="ai")
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{}], user,
table,
rows_values=[{}],
)
.created_rows
) )
response = api_client.post( response = api_client.post(
@ -110,10 +118,14 @@ def test_generate_ai_field_value_view_row_does_not_exist(
table = premium_data_fixture.create_database_table(name="table", database=database) table = premium_data_fixture.create_database_table(name="table", database=database)
field = premium_data_fixture.create_ai_field(table=table, name="ai") field = premium_data_fixture.create_ai_field(table=table, name="ai")
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{}], user,
table,
rows_values=[{}],
)
.created_rows
) )
response = api_client.post( response = api_client.post(
@ -155,10 +167,14 @@ def test_generate_ai_field_value_view_user_not_in_workspace(
table = premium_data_fixture.create_database_table(name="table", database=database) table = premium_data_fixture.create_database_table(name="table", database=database)
field = premium_data_fixture.create_ai_field(table=table, name="ai") field = premium_data_fixture.create_ai_field(table=table, name="ai")
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{}], user,
table,
rows_values=[{}],
)
.created_rows
) )
response = api_client.post( response = api_client.post(
@ -196,10 +212,14 @@ def test_generate_ai_field_value_view_generative_ai_does_not_exist(
table=table, name="ai", ai_generative_ai_type="does_not_exist" table=table, name="ai", ai_generative_ai_type="does_not_exist"
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{}], user,
table,
rows_values=[{}],
)
.created_rows
) )
response = api_client.post( response = api_client.post(
@ -237,12 +257,16 @@ def test_generate_ai_field_value_view_generative_ai_model_does_not_belong_to_typ
table=table, name="ai", ai_generative_ai_model="does_not_exist" table=table, name="ai", ai_generative_ai_model="does_not_exist"
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{}, table,
], rows_values=[
{},
],
)
.created_rows
) )
response = api_client.post( response = api_client.post(
@ -281,10 +305,14 @@ def test_generate_ai_field_value_view_generative_ai(
table=table, name="ai", ai_prompt="'Hello'" table=table, name="ai", ai_prompt="'Hello'"
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{}], user,
table,
rows_values=[{}],
)
.created_rows
) )
assert patched_generate_ai_values_for_rows.call_count == 0 assert patched_generate_ai_values_for_rows.call_count == 0
@ -313,10 +341,14 @@ def test_batch_generate_ai_field_value_limit(api_client, premium_data_fixture):
field = premium_data_fixture.create_ai_field( field = premium_data_fixture.create_ai_field(
table=table, name="ai", ai_prompt="'Hello'" table=table, name="ai", ai_prompt="'Hello'"
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{}] * (settings.BATCH_ROWS_SIZE_LIMIT + 1), user,
table,
rows_values=[{}] * (settings.BATCH_ROWS_SIZE_LIMIT + 1),
)
.created_rows
) )
row_ids = [row.id for row in rows] row_ids = [row.id for row in rows]

View file

@ -29,7 +29,7 @@ def test_generate_ai_field_value_view_generative_ai(
table=table, name="ai", ai_prompt="'Hello'" table=table, name="ai", ai_prompt="'Hello'"
) )
rows = RowHandler().create_rows(user, table, rows_values=[{}]) rows = RowHandler().create_rows(user, table, rows_values=[{}]).created_rows
assert patched_rows_updated.call_count == 0 assert patched_rows_updated.call_count == 0
generate_ai_values_for_rows(user.id, field.id, [rows[0].id]) generate_ai_values_for_rows(user.id, field.id, [rows[0].id])
@ -61,7 +61,7 @@ def test_generate_ai_field_value_view_generative_ai_with_temperature(
table=table, name="ai", ai_prompt="'Hello'", ai_temperature=0.7 table=table, name="ai", ai_prompt="'Hello'", ai_temperature=0.7
) )
rows = RowHandler().create_rows(user, table, rows_values=[{}]) rows = RowHandler().create_rows(user, table, rows_values=[{}]).created_rows
generate_ai_values_for_rows(user.id, field.id, [rows[0].id]) generate_ai_values_for_rows(user.id, field.id, [rows[0].id])
updated_row = patched_rows_updated.call_args[1]["rows"][0] updated_row = patched_rows_updated.call_args[1]["rows"][0]
@ -92,12 +92,16 @@ def test_generate_ai_field_value_view_generative_ai_parse_formula(
table=table, name="ai", ai_prompt=formula table=table, name="ai", ai_prompt=formula
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[ user,
{f"field_{firstname.id}": "Bram", f"field_{lastname.id}": "Wiepjes"}, table,
], rows_values=[
{f"field_{firstname.id}": "Bram", f"field_{lastname.id}": "Wiepjes"},
],
)
.created_rows
) )
assert patched_rows_updated.call_count == 0 assert patched_rows_updated.call_count == 0
@ -132,10 +136,14 @@ def test_generate_ai_field_value_view_generative_ai_invalid_field(
table=table, name="ai", ai_prompt=formula table=table, name="ai", ai_prompt=formula
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{f"field_{firstname.id}": "Bram"}], user,
table,
rows_values=[{f"field_{firstname.id}": "Bram"}],
)
.created_rows
) )
assert patched_rows_updated.call_count == 0 assert patched_rows_updated.call_count == 0
generate_ai_values_for_rows(user.id, field.id, [rows[0].id]) generate_ai_values_for_rows(user.id, field.id, [rows[0].id])
@ -172,10 +180,14 @@ def test_generate_ai_field_value_view_generative_ai_invalid_prompt(
ai_prompt=formula, ai_prompt=formula,
) )
rows = RowHandler().create_rows( rows = (
user, RowHandler()
table, .create_rows(
rows_values=[{f"field_{firstname.id}": "Bram"}], user,
table,
rows_values=[{f"field_{firstname.id}": "Bram"}],
)
.created_rows
) )
assert patched_rows_ai_values_generation_error.call_count == 0 assert patched_rows_ai_values_generation_error.call_count == 0

View file

@ -1066,13 +1066,17 @@ def test_link_row_field_can_be_sorted_when_linking_an_ai_field(premium_data_fixt
field=primary_b, value="b", color="green", order=0 field=primary_b, value="b", color="green", order=0
) )
row_b1, row_b2 = RowHandler().force_create_rows( row_b1, row_b2 = (
user, RowHandler()
table_b, .force_create_rows(
[ user,
{primary_b.db_column: opt_1.id}, table_b,
{primary_b.db_column: opt_2.id}, [
], {primary_b.db_column: opt_1.id},
{primary_b.db_column: opt_2.id},
],
)
.created_rows
) )
table_a, table_b, link_field = premium_data_fixture.create_two_linked_tables( table_a, table_b, link_field = premium_data_fixture.create_two_linked_tables(

View file

@ -6,4 +6,4 @@
&:hover { &:hover {
color: $color-neutral-800; color: $color-neutral-800;
} }
} }

View file

@ -4,4 +4,4 @@
@import 'kanban'; @import 'kanban';
@import 'decorators'; @import 'decorators';
@import 'view_date_selector'; @import 'view_date_selector';
@import 'view_date_indicator'; @import 'view_date_indicator';

View file

@ -4,4 +4,4 @@
@import 'timeline_date_settings_init_box'; @import 'timeline_date_settings_init_box';
@import 'timeline_grid'; @import 'timeline_grid';
@import 'timeline_grid_row'; @import 'timeline_grid_row';
@import 'timeline_timescale_context'; @import 'timeline_timescale_context';

View file

@ -44,7 +44,7 @@
.timeline-grid-row__label { .timeline-grid-row__label {
@extend %ellipsis; @extend %ellipsis;
margin-right: 8px; margin-right: 8px;
font-size: 12px; font-size: 12px;
line-height: 20px; line-height: 20px;

View file

@ -6,4 +6,4 @@
display: flex; display: flex;
align-items: center; align-items: center;
justify-content: space-between; justify-content: space-between;
} }

View file

@ -2,4 +2,4 @@
width: 100%; width: 100%;
height: 100%; height: 100%;
overflow-y: auto; overflow-y: auto;
} }

View file

@ -36,11 +36,9 @@
<div class="license-detail__item-value"> <div class="license-detail__item-value">
<Badge :color="licenseType.getLicenseBadgeColor()" bold> <Badge :color="licenseType.getLicenseBadgeColor()" bold>
{{ licenseType.getName() }} {{ licenseType.getName() }}
</Badge </Badge>
> <Badge v-if="!license.is_active" color="red"
<Badge v-if="!license.is_active" color="red">{{ >{{ $t('licenses.expired') }}
$t('licenses.expired')
}}
</Badge> </Badge>
</div> </div>
</div> </div>
@ -105,7 +103,8 @@
</div> </div>
</div> </div>
<div class="license-detail__item-value"> <div class="license-detail__item-value">
{{ license.application_users_taken }} / {{ license.application_users }} {{ license.application_users_taken }} /
{{ license.application_users }}
</div> </div>
</div> </div>
<div class="license-detail__item"> <div class="license-detail__item">
@ -180,15 +179,14 @@
<i18n path="license.disconnectDescription" tag="p"> <i18n path="license.disconnectDescription" tag="p">
<template #contact> <template #contact>
<a href="https://baserow.io/contact" target="_blank" <a href="https://baserow.io/contact" target="_blank"
>baserow.io/contact</a >baserow.io/contact</a
> >
</template> </template>
</i18n> </i18n>
<Button type="danger" @click="$refs.disconnectModal.show()"> <Button type="danger" @click="$refs.disconnectModal.show()">
{{ $t('license.disconnectLicense') }} {{ $t('license.disconnectLicense') }}
</Button </Button>
>
<DisconnectLicenseModal <DisconnectLicenseModal
ref="disconnectModal" ref="disconnectModal"
:license="license" :license="license"
@ -204,18 +202,15 @@
import moment from '@baserow/modules/core/moment' import moment from '@baserow/modules/core/moment'
import { notifyIf } from '@baserow/modules/core/utils/error' import { notifyIf } from '@baserow/modules/core/utils/error'
import LicenseService from '@baserow_premium/services/license' import LicenseService from '@baserow_premium/services/license'
import DisconnectLicenseModal import DisconnectLicenseModal from '@baserow_premium/components/license/DisconnectLicenseModal'
from '@baserow_premium/components/license/DisconnectLicenseModal' import ManualLicenseSeatsForm from '@baserow_premium/components/license/ManualLicenseSeatForm'
import ManualLicenseSeatsForm import AutomaticLicenseSeats from '@baserow_premium/components/license/AutomaticLicenseSeats'
from '@baserow_premium/components/license/ManualLicenseSeatForm'
import AutomaticLicenseSeats
from '@baserow_premium/components/license/AutomaticLicenseSeats'
export default { export default {
components: { components: {
DisconnectLicenseModal, DisconnectLicenseModal,
ManualLicenseSeatsForm, ManualLicenseSeatsForm,
AutomaticLicenseSeats AutomaticLicenseSeats,
}, },
layout: 'app', layout: 'app',
middleware: 'staff', middleware: 'staff',
@ -226,14 +221,14 @@ export default {
} catch { } catch {
return error({ return error({
statusCode: 404, statusCode: 404,
message: 'The license was not found.' message: 'The license was not found.',
}) })
} }
}, },
data() { data() {
return { return {
user: null, user: null,
checkLoading: false checkLoading: false,
} }
}, },
computed: { computed: {
@ -269,7 +264,7 @@ export default {
} }
this.checkLoading = false this.checkLoading = false
} },
} },
} }
</script> </script>

View file

@ -127,8 +127,12 @@
{{ license.seats_taken }} / {{ license.seats }} {{ license.seats_taken }} / {{ license.seats }}
{{ $t('licenses.seats') }} {{ $t('licenses.seats') }}
</li> </li>
<li v-if="license.application_users" class="licenses__item-detail-item"> <li
{{ license.application_users_taken }} / {{ license.application_users }} v-if="license.application_users"
class="licenses__item-detail-item"
>
{{ license.application_users_taken }} /
{{ license.application_users }}
{{ $t('licenses.applicationUsers') }} {{ $t('licenses.applicationUsers') }}
</li> </li>
</ul> </ul>
@ -148,7 +152,6 @@
></i> ></i>
</li> </li>
</ul> </ul>
</nuxt-link> </nuxt-link>
</div> </div>
</div> </div>

View file

@ -55,7 +55,39 @@
@header="onHeader($event)" @header="onHeader($event)"
@data="onData($event)" @data="onData($event)"
@getData="onGetData($event)" @getData="onGetData($event)"
/> >
<template #upsertMapping>
<div class="control margin-top-1">
<label class="control__label control__label--small">
{{ $t('importFileModal.useUpsertField') }}
<HelpIcon
:icon="'info-empty'"
:tooltip="$t('importFileModal.upsertTooltip')"
/>
</label>
<div class="control__elements">
<Checkbox
v-model="useUpsertField"
:disabled="!mappingNotEmpty"
>{{ $t('common.yes') }}</Checkbox
>
</div>
<Dropdown
v-model="upsertField"
:disabled="!useUpsertField"
class="margin-top-1"
>
<DropdownItem
v-for="item in availableUpsertFields"
:key="item.id"
:name="item.name"
:value="item.id"
/>
</Dropdown>
</div>
</template>
</component>
</div> </div>
<ImportErrorReport :job="job" :error="error"></ImportErrorReport> <ImportErrorReport :job="job" :error="error"></ImportErrorReport>
@ -204,6 +236,8 @@ export default {
getData: null, getData: null,
previewData: [], previewData: [],
dataLoaded: false, dataLoaded: false,
useUpsertField: false,
upsertField: undefined,
} }
}, },
computed: { computed: {
@ -213,12 +247,19 @@ export default {
} }
return this.database.tables.some(({ id }) => id === this.job.table_id) return this.database.tables.some(({ id }) => id === this.job.table_id)
}, },
mappingNotEmpty() {
return Object.values(this.mapping).some(
(value) => this.fieldIndexMap[value] !== undefined
)
},
canBeSubmitted() { canBeSubmitted() {
return ( return (
this.importer && this.importer &&
Object.values(this.mapping).some( Object.values(this.mapping).some(
(value) => this.fieldIndexMap[value] !== undefined (value) => this.fieldIndexMap[value] !== undefined
) ) &&
(!this.useUpsertField ||
Object.values(this.mapping).includes(this.upsertField))
) )
}, },
fieldTypes() { fieldTypes() {
@ -307,6 +348,14 @@ export default {
selectedFields() { selectedFields() {
return Object.values(this.mapping) return Object.values(this.mapping)
}, },
availableUpsertFields() {
const selected = Object.values(this.mapping)
return this.fields.filter((field) => {
return (
selected.includes(field.id) && this.fieldTypes[field.type].canUpsert()
)
})
},
progressPercentage() { progressPercentage() {
switch (this.state) { switch (this.state) {
case null: case null:
@ -417,6 +466,14 @@ export default {
this.showProgressBar = false this.showProgressBar = false
this.reset(false) this.reset(false)
let data = null let data = null
const importConfiguration = {}
if (this.upsertField) {
// at the moment we use only one field, but the key may be composed of several
// fields.
importConfiguration.upsert_fields = [this.upsertField]
importConfiguration.upsert_values = []
}
if (typeof this.getData === 'function') { if (typeof this.getData === 'function') {
try { try {
@ -425,6 +482,18 @@ export default {
await this.$ensureRender() await this.$ensureRender()
data = await this.getData() data = await this.getData()
const upsertFields = importConfiguration.upsert_fields || []
const upsertValues = importConfiguration.upsert_values || []
const upsertFieldIndexes = []
Object.entries(this.mapping).forEach(
([importIndex, targetFieldId]) => {
if (upsertFields.includes(targetFieldId)) {
upsertFieldIndexes.push(importIndex)
}
}
)
const fieldMapping = Object.entries(this.mapping) const fieldMapping = Object.entries(this.mapping)
.filter( .filter(
([, targetFieldId]) => ([, targetFieldId]) =>
@ -456,22 +525,41 @@ export default {
// Processes the data by chunk to avoid UI freezes // Processes the data by chunk to avoid UI freezes
const result = [] const result = []
for (const chunk of _.chunk(data, 1000)) { for (const chunk of _.chunk(data, 1000)) {
result.push( result.push(
chunk.map((row) => { chunk.map((row) => {
const newRow = clone(defaultRow) const newRow = clone(defaultRow)
const upsertRow = []
fieldMapping.forEach(([importIndex, targetIndex]) => { fieldMapping.forEach(([importIndex, targetIndex]) => {
newRow[targetIndex] = prepareValueByField[targetIndex]( newRow[targetIndex] = prepareValueByField[targetIndex](
row[importIndex] row[importIndex]
) )
if (upsertFieldIndexes.includes(importIndex)) {
upsertRow.push(newRow[targetIndex])
}
}) })
if (upsertFields.length > 0 && upsertRow.length > 0) {
if (upsertFields.length !== upsertRow.length) {
throw new Error(
"upsert row length doesn't match required fields"
)
}
upsertValues.push(upsertRow)
}
return newRow return newRow
}) })
) )
await this.$ensureRender() await this.$ensureRender()
} }
data = result.flat() data = result.flat()
if (upsertFields.length > 0) {
if (upsertValues.length !== data.length) {
throw new Error('upsert values lenght mismatch')
}
importConfiguration.upsert_values = upsertValues
}
} catch (error) { } catch (error) {
this.reset() this.reset()
this.handleError(error, 'application') this.handleError(error, 'application')
@ -493,7 +581,8 @@ export default {
data, data,
{ {
onUploadProgress, onUploadProgress,
} },
importConfiguration.upsert_fields ? importConfiguration : null
) )
this.startJobPoller(job) this.startJobPoller(job)
} catch (error) { } catch (error) {

View file

@ -106,6 +106,9 @@
</div> </div>
</div> </div>
</div> </div>
<div v-if="values.filename !== ''" class="row">
<div class="col col-8 margin-top-1"><slot name="upsertMapping" /></div>
</div>
<Alert v-if="error !== ''" type="error"> <Alert v-if="error !== ''" type="error">
<template #title> {{ $t('common.wrong') }} </template> <template #title> {{ $t('common.wrong') }} </template>
{{ error }} {{ error }}

View file

@ -75,6 +75,11 @@
></CharsetDropdown> ></CharsetDropdown>
</div> </div>
</div> </div>
<div v-if="values.filename !== ''" class="control margin-top-2">
<slot name="upsertMapping" />
</div>
<Alert v-if="error !== ''" type="error"> <Alert v-if="error !== ''" type="error">
<template #title> {{ $t('common.wrong') }} </template> <template #title> {{ $t('common.wrong') }} </template>
{{ error }} {{ error }}

View file

@ -28,6 +28,10 @@
</Checkbox> </Checkbox>
</FormGroup> </FormGroup>
<div v-if="values.filename !== ''" class="control margin-top-0">
<slot name="upsertMapping" />
</div>
<Alert v-if="error !== ''" type="error"> <Alert v-if="error !== ''" type="error">
<template #title> {{ $t('common.wrong') }} </template> <template #title> {{ $t('common.wrong') }} </template>
{{ error }} {{ error }}

View file

@ -27,7 +27,7 @@
</div> </div>
</template> </template>
<div class="control__elements"> <div class="control__elements">
<div class="file-upload"> <div class="file-upload margin-top-1">
<input <input
v-show="false" v-show="false"
ref="file" ref="file"
@ -61,6 +61,10 @@
<div v-if="v$.values.filename.$error" class="error"> <div v-if="v$.values.filename.$error" class="error">
{{ v$.values.filename.$errors[0]?.$message }} {{ v$.values.filename.$errors[0]?.$message }}
</div> </div>
<div v-if="values.filename !== ''" class="control margin-top-1">
<slot name="upsertMapping" />
</div>
</div> </div>
</div> </div>
<Alert v-if="error !== ''" type="error"> <Alert v-if="error !== ''" type="error">

View file

@ -542,10 +542,13 @@ export class FieldType extends Registerable {
} }
/** /**
* This hook is called before the field's value is copied to the clipboard. * Can a field of this type be used to perform an update during import on rows that
* Optionally formatting can be done here. By default the value is always * contain the same value as imported one.
* converted to a string.
*/ */
canUpsert() {
return false
}
/** /**
* This hook is called before the field's value is copied to the clipboard. * This hook is called before the field's value is copied to the clipboard.
* Optionally formatting can be done here. By default the value is always * Optionally formatting can be done here. By default the value is always
@ -991,6 +994,10 @@ export class TextFieldType extends FieldType {
return field.text_default return field.text_default
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const stringA = a[name] === null ? '' : '' + a[name] const stringA = a[name] === null ? '' : '' + a[name]
@ -1102,6 +1109,10 @@ export class LongTextFieldType extends FieldType {
return '' return ''
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const stringA = a[name] === null ? '' : '' + a[name] const stringA = a[name] === null ? '' : '' + a[name]
@ -1551,6 +1562,10 @@ export class NumberFieldType extends FieldType {
return ['text', '1', '9'] return ['text', '1', '9']
} }
canUpsert() {
return true
}
/** /**
* When searching a cell's value, this should return the value to match the user's * When searching a cell's value, this should return the value to match the user's
* search term against. We can't use `toHumanReadableString` here as it needs to be * search term against. We can't use `toHumanReadableString` here as it needs to be
@ -1765,6 +1780,10 @@ export class RatingFieldType extends FieldType {
return 0 return 0
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
if (a[name] === b[name]) { if (a[name] === b[name]) {
@ -1899,6 +1918,10 @@ export class BooleanFieldType extends FieldType {
return ['icon', 'baserow-icon-circle-empty', 'baserow-icon-circle-checked'] return ['icon', 'baserow-icon-circle-empty', 'baserow-icon-circle-checked']
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const intA = +a[name] const intA = +a[name]
@ -2252,6 +2275,10 @@ export class DateFieldType extends BaseDateFieldType {
return true return true
} }
canUpsert() {
return true
}
parseQueryParameter(field, value) { parseQueryParameter(field, value) {
return this.formatValue( return this.formatValue(
field.field, field.field,
@ -2718,6 +2745,10 @@ export class DurationFieldType extends FieldType {
return this.formatValue(field, value) return this.formatValue(field, value)
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const aValue = a[name] const aValue = a[name]
@ -2865,6 +2896,10 @@ export class URLFieldType extends FieldType {
return isValidURL(value) ? value : '' return isValidURL(value) ? value : ''
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const stringA = a[name] === null ? '' : '' + a[name] const stringA = a[name] === null ? '' : '' + a[name]
@ -2964,6 +2999,10 @@ export class EmailFieldType extends FieldType {
return isValidEmail(value) ? value : '' return isValidEmail(value) ? value : ''
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const stringA = a[name] === null ? '' : '' + a[name] const stringA = a[name] === null ? '' : '' + a[name]
@ -3810,6 +3849,10 @@ export class PhoneNumberFieldType extends FieldType {
return isSimplePhoneNumber(value) ? value : '' return isSimplePhoneNumber(value) ? value : ''
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const stringA = a[name] === null ? '' : '' + a[name] const stringA = a[name] === null ? '' : '' + a[name]
@ -4456,6 +4499,10 @@ export class UUIDFieldType extends FieldType {
return RowCardFieldUUID return RowCardFieldUUID
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
const stringA = a[name] === null ? '' : '' + a[name] const stringA = a[name] === null ? '' : '' + a[name]
@ -4535,6 +4582,10 @@ export class AutonumberFieldType extends FieldType {
return RowCardFieldAutonumber return RowCardFieldAutonumber
} }
canUpsert() {
return true
}
getSort(name, order) { getSort(name, order) {
return (a, b) => { return (a, b) => {
if (a[name] === b[name]) { if (a[name] === b[name]) {

View file

@ -441,7 +441,9 @@
"fieldMappingDescription": "We have automatically mapped the columns of the Baserow fields in your table. You can change them below. Any incompatible cell will remain empty after the import.", "fieldMappingDescription": "We have automatically mapped the columns of the Baserow fields in your table. You can change them below. Any incompatible cell will remain empty after the import.",
"selectImportMessage": "Please select data to import.", "selectImportMessage": "Please select data to import.",
"filePreview": "File content preview", "filePreview": "File content preview",
"importPreview": "Import preview" "importPreview": "Import preview",
"useUpsertField": "Update rows if they already exist",
"upsertTooltip": "Match existing rows using a unique field to overwrite data with imported values."
}, },
"formulaAdvancedEditContext": { "formulaAdvancedEditContext": {
"textAreaFormulaInputPlaceholder": "Click to edit the formula", "textAreaFormulaInputPlaceholder": "Click to edit the formula",

View file

@ -9,6 +9,16 @@ import {
const IMPORT_PREVIEW_MAX_ROW_COUNT = 6 const IMPORT_PREVIEW_MAX_ROW_COUNT = 6
export default { export default {
props: {
mapping: {
type: Object,
required: false,
default: () => {
return {}
},
},
},
data() { data() {
return { return {
fileLoadingProgress: 0, fileLoadingProgress: 0,

View file

@ -29,10 +29,15 @@ export default (client) => {
return client.post(`/database/tables/database/${databaseId}/`, values) return client.post(`/database/tables/database/${databaseId}/`, values)
}, },
importData(tableId, data, config = null) { importData(tableId, data, config = null, importConfiguration = null) {
const payload = { data }
if (importConfiguration) {
payload.configuration = importConfiguration
}
return client.post( return client.post(
`/database/tables/${tableId}/import/async/`, `/database/tables/${tableId}/import/async/`,
{ data }, payload,
config config
) )
}, },