1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-07 22:35:36 +00:00

[1/3] Import airtable grid view filters

This commit is contained in:
Bram Wiepjes 2025-03-14 10:34:55 +00:00
parent f870926221
commit a0a7770ef2
17 changed files with 2084 additions and 31 deletions

View file

@ -0,0 +1,423 @@
from baserow.contrib.database.views.registries import view_filter_type_registry
from baserow.core.utils import get_value_at_path
from .exceptions import AirtableSkipFilter
from .helpers import to_import_select_option_id
from .registry import AirtableFilterOperator
from .utils import (
airtable_date_filter_value_to_baserow,
skip_filter_if_type_duration_and_value_too_high,
)
class AirtableContainsOperator(AirtableFilterOperator):
type = "contains"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in ["foreignKey"]:
return view_filter_type_registry.get("link_row_contains"), value
return view_filter_type_registry.get("contains"), value
class AirtableDoesNotContainOperator(AirtableFilterOperator):
type = "doesNotContain"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in ["foreignKey"]:
return view_filter_type_registry.get("link_row_not_contains"), value
if raw_airtable_column["type"] in ["multiSelect"]:
value = [f"{raw_airtable_column['id']}_{v}" for v in value]
value = ",".join(value)
return view_filter_type_registry.get("multiple_select_has_not"), value
return view_filter_type_registry.get("contains_not"), value
class AirtableEqualOperator(AirtableFilterOperator):
type = "="
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in [
"text",
"multilineText",
"number",
"rating",
"phone",
"autoNumber",
]:
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
return view_filter_type_registry.get("equal"), str(value)
if raw_airtable_column["type"] in ["checkbox"]:
return (
view_filter_type_registry.get("boolean"),
"true" if value else "false",
)
if raw_airtable_column["type"] in ["select"]:
value = to_import_select_option_id(raw_airtable_column["id"], value)
return view_filter_type_registry.get("single_select_equal"), value
if raw_airtable_column["type"] in ["multiSelect"]:
value = [f"{raw_airtable_column['id']}_{v}" for v in value]
value = ",".join(value)
return view_filter_type_registry.get("multiple_select_has"), value
if raw_airtable_column["type"] in ["collaborator"]:
return view_filter_type_registry.get("multiple_collaborators_has"), value
if raw_airtable_column["type"] in ["date"]:
value = airtable_date_filter_value_to_baserow(value)
return view_filter_type_registry.get("date_is"), value
if raw_airtable_column["type"] in ["foreignKey"]:
if isinstance(value, list):
if len(value) > 1:
raise AirtableSkipFilter
foreign_table_id = get_value_at_path(
raw_airtable_column, "typeOptions.foreignTableId"
)
table_row_id_mapping = row_id_mapping.get(foreign_table_id, {})
value = [
str(table_row_id_mapping.get(v))
for v in value
if v in table_row_id_mapping
]
value = ",".join(value)
return view_filter_type_registry.get("link_row_has"), value
raise AirtableSkipFilter
class AirtableNotEqualOperator(AirtableFilterOperator):
type = "!="
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in [
"text",
"multilineText",
"number",
"rating",
"phone",
"autoNumber",
]:
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
return view_filter_type_registry.get("not_equal"), str(value)
if raw_airtable_column["type"] in ["select"]:
value = to_import_select_option_id(raw_airtable_column["id"], value)
return view_filter_type_registry.get("single_select_not_equal"), value
if raw_airtable_column["type"] in ["collaborator"]:
return (
view_filter_type_registry.get("multiple_collaborators_has_not"),
value,
)
if raw_airtable_column["type"] in ["date"]:
value = airtable_date_filter_value_to_baserow(value)
return view_filter_type_registry.get("date_is_not"), value
raise AirtableSkipFilter
class AirtableIsEmptyOperator(AirtableFilterOperator):
type = "isEmpty"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
return view_filter_type_registry.get("empty"), ""
class AirtableIsNotEmptyOperator(AirtableFilterOperator):
type = "isNotEmpty"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
return view_filter_type_registry.get("not_empty"), ""
class AirtableFilenameOperator(AirtableFilterOperator):
type = "filename"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
return view_filter_type_registry.get("filename_contains"), value
class AirtableFiletypeOperator(AirtableFilterOperator):
type = "filetype"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if value == "image":
value = "image"
elif value == "text":
value = "document"
else:
raise AirtableSkipFilter
return view_filter_type_registry.get("has_file_type"), value
class AirtableIsAnyOfOperator(AirtableFilterOperator):
type = "isAnyOf"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in ["select"]:
value = [
to_import_select_option_id(raw_airtable_column["id"], v) for v in value
]
value = ",".join(value)
return view_filter_type_registry.get("single_select_is_any_of"), value
raise AirtableSkipFilter
class AirtableIsNoneOfOperator(AirtableFilterOperator):
type = "isNoneOf"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in ["select"]:
value = [
to_import_select_option_id(raw_airtable_column["id"], v) for v in value
]
value = ",".join(value)
return view_filter_type_registry.get("single_select_is_none_of"), value
raise AirtableSkipFilter
class AirtableHasAnyOfOperator(AirtableFilterOperator):
type = "|"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
raise AirtableSkipFilter
class AirtableHasAllOfOperator(AirtableFilterOperator):
type = "&"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
raise AirtableSkipFilter
class AirtableLessThanOperator(AirtableFilterOperator):
type = "<"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in [
"number",
"rating",
"autoNumber",
]:
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
return view_filter_type_registry.get("lower_than"), str(value)
if raw_airtable_column["type"] in ["date"]:
value = airtable_date_filter_value_to_baserow(value)
return view_filter_type_registry.get("date_is_before"), value
raise AirtableSkipFilter
class AirtableMoreThanOperator(AirtableFilterOperator):
type = ">"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in [
"number",
"rating",
"autoNumber",
]:
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
return view_filter_type_registry.get("higher_than"), str(value)
if raw_airtable_column["type"] in ["date"]:
value = airtable_date_filter_value_to_baserow(value)
return view_filter_type_registry.get("date_is_after"), value
raise AirtableSkipFilter
class AirtableLessThanOrEqualOperator(AirtableFilterOperator):
type = "<="
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in [
"number",
"rating",
"autoNumber",
]:
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
return view_filter_type_registry.get("lower_than_or_equal"), str(value)
if raw_airtable_column["type"] in ["date"]:
value = airtable_date_filter_value_to_baserow(value)
return view_filter_type_registry.get("date_is_on_or_before"), value
raise AirtableSkipFilter
class AirtableMoreThanOrEqualOperator(AirtableFilterOperator):
type = ">="
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in [
"number",
"rating",
"autoNumber",
]:
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
return view_filter_type_registry.get("higher_than_or_equal"), str(value)
if raw_airtable_column["type"] in ["date"]:
value = airtable_date_filter_value_to_baserow(value)
return view_filter_type_registry.get("date_is_on_or_after"), value
raise AirtableSkipFilter
class AirtableIsWithinOperator(AirtableFilterOperator):
type = "isWithin"
def to_baserow_filter_and_value(
self,
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
value,
):
if raw_airtable_column["type"] in ["date"]:
value = airtable_date_filter_value_to_baserow(value)
return view_filter_type_registry.get("date_is_within"), value
raise AirtableSkipFilter

View file

@ -58,3 +58,23 @@ AIRTABLE_ASCENDING_MAP = {
"ascending": True,
"descending": False,
}
AIRTABLE_DATE_FILTER_VALUE_MAP = {
"daysAgo": "{timeZone}?{numberOfDays}?nr_days_ago",
"daysFromNow": "{timeZone}?{numberOfDays}?nr_days_from_now",
"exactDate": "{timeZone}?{exactDate}?exact_date",
"nextMonth": "{timeZone}??next_month",
"nextNumberOfDays": "{timeZone}?{numberOfDays}?nr_days_from_now",
"nextWeek": "{timeZone}??next_week",
"oneMonthAgo": "{timeZone}??one_month_ago",
"oneWeekAgo": "{timeZone}?1?nr_weeks_ago",
"oneMonthFromNow": "{timeZone}?1?nr_months_from_now",
"oneWeekFromNow": "{timeZone}?1?nr_weeks_from_now",
"pastMonth": "{timeZone}?1?nr_months_ago",
"pastNumberOfDays": "{timeZone}?{numberOfDays}?nr_days_ago",
"pastWeek": "{timeZone}?1?nr_weeks_ago",
"pastYear": "{timeZone}?1?nr_years_ago",
"thisCalendarYear": "{timeZone}?0?nr_years_ago",
"today": "{timeZone}??today",
"tomorrow": "{timeZone}??tomorrow",
"yesterday": "{timeZone}??yesterday",
}

View file

@ -15,3 +15,9 @@ class AirtableSkipCellValue(Exception):
Raised when an Airtable cell value must be skipped, and be omitted from the
export.
"""
class AirtableSkipFilter(Exception):
"""
Raised when an Airtable filter is not compatible and must be skipped.
"""

View file

@ -650,7 +650,13 @@ class AirtableHandler:
)
serialized_view = (
airtable_view_type_registry.from_airtable_view_to_serialized(
field_mapping, table, view, view_data, config, import_report
field_mapping,
row_id_mapping,
table,
view,
view_data,
config,
import_report,
)
)

View file

@ -17,6 +17,10 @@ def import_airtable_date_type_options(type_options) -> dict:
}
def to_import_select_option_id(field_id, choice_id):
return f"{field_id}_{choice_id}"
def import_airtable_choices(field_id: str, type_options: dict) -> List[SelectOption]:
order = type_options.get("choiceOrder", [])
choices = type_options.get("choices", [])
@ -24,7 +28,7 @@ def import_airtable_choices(field_id: str, type_options: dict) -> List[SelectOpt
SelectOption(
# Combine select id with choice id as choice id is not guaranteed to be
# unique across table
id=f"{field_id}_{choice['id']}",
id=to_import_select_option_id(field_id, choice["id"]),
value=choice["name"],
color=AIRTABLE_BASEROW_COLOR_MAPPING.get(
# The color isn't always provided, hence the fallback to an empty

View file

@ -22,17 +22,20 @@ SCOPE_VIEW_SORT = SelectOption(
SCOPE_VIEW_GROUP_BY = SelectOption(
id="scope_view_group_by", value="View group by", color="light-brown", order=5
)
SCOPE_VIEW_FILTER = SelectOption(
id="scope_view_filter", value="View filter", color="light-pink", order=6
)
SCOPE_VIEW_FIELD_OPTIONS = SelectOption(
id="scope_view_field_options",
value="View field options",
color="light-purple",
order=6,
order=7,
)
SCOPE_AUTOMATIONS = SelectOption(
id="scope_automations", value="Automations", color="light-orange", order=7
id="scope_automations", value="Automations", color="light-orange", order=8
)
SCOPE_INTERFACES = SelectOption(
id="scope_interfaces", value="Interfaces", color="light-yellow", order=8
id="scope_interfaces", value="Interfaces", color="light-yellow", order=9
)
ALL_SCOPES = [
SCOPE_FIELD,
@ -40,6 +43,7 @@ ALL_SCOPES = [
SCOPE_VIEW,
SCOPE_VIEW_SORT,
SCOPE_VIEW_GROUP_BY,
SCOPE_VIEW_FILTER,
SCOPE_AUTOMATIONS,
SCOPE_INTERFACES,
]

View file

@ -2,24 +2,41 @@ from typing import Any, Dict, List, Optional, Tuple, Union
from baserow.contrib.database.airtable.config import AirtableImportConfig
from baserow.contrib.database.airtable.constants import AIRTABLE_ASCENDING_MAP
from baserow.contrib.database.airtable.exceptions import AirtableSkipCellValue
from baserow.contrib.database.airtable.exceptions import (
AirtableSkipCellValue,
AirtableSkipFilter,
)
from baserow.contrib.database.airtable.import_report import (
ERROR_TYPE_UNSUPPORTED_FEATURE,
SCOPE_FIELD,
SCOPE_VIEW_FILTER,
SCOPE_VIEW_GROUP_BY,
SCOPE_VIEW_SORT,
AirtableImportReport,
)
from baserow.contrib.database.airtable.utils import get_airtable_column_name
from baserow.contrib.database.airtable.utils import (
get_airtable_column_name,
unknown_value_to_human_readable,
)
from baserow.contrib.database.fields.field_filters import (
FILTER_TYPE_AND,
FILTER_TYPE_OR,
)
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.views.models import (
SORT_ORDER_ASC,
SORT_ORDER_DESC,
View,
ViewFilter,
ViewFilterGroup,
ViewGroupBy,
ViewSort,
)
from baserow.contrib.database.views.registries import ViewType, view_type_registry
from baserow.contrib.database.views.registries import (
ViewFilterType,
ViewType,
view_type_registry,
)
from baserow.core.registry import Instance, Registry
@ -307,9 +324,178 @@ class AirtableViewType(Instance):
return view_group_by
def get_filter(
self,
field_mapping: dict,
row_id_mapping: Dict[str, Dict[str, int]],
raw_airtable_view: dict,
raw_airtable_table: dict,
import_report: AirtableImportReport,
filter_object: dict,
parent_group: Optional[ViewFilterGroup] = None,
):
"""
This method converts a raw airtable filter object into a Baserow filter object
that's ready for the export system.
"""
# If it's not a group, then it's an individual filter, and it must be
# parsed accordingly.
if filter_object["columnId"] not in field_mapping:
column_name = get_airtable_column_name(
raw_airtable_table, filter_object["columnId"]
)
filter_value = unknown_value_to_human_readable(filter_object["value"])
import_report.add_failed(
f'View "{raw_airtable_view["name"]}", Field ID "{column_name}"',
SCOPE_VIEW_FILTER,
raw_airtable_table["name"],
ERROR_TYPE_UNSUPPORTED_FEATURE,
f'The "{filter_object["operator"]}" filter with value '
f'"{filter_value}" on field "{column_name}" was ignored '
f'in view {raw_airtable_view["name"]} because the field was not '
f"imported.",
)
return None
mapping_entry = field_mapping[filter_object["columnId"]]
baserow_field_type = mapping_entry["baserow_field_type"]
baserow_field = mapping_entry["baserow_field"]
raw_airtable_column = mapping_entry["raw_airtable_column"]
can_filter_by = baserow_field_type.check_can_filter_by(baserow_field)
if not can_filter_by:
filter_value = unknown_value_to_human_readable(filter_object["value"])
import_report.add_failed(
f'View "{raw_airtable_view["name"]}", Field "{baserow_field.name}"',
SCOPE_VIEW_FILTER,
raw_airtable_table["name"],
ERROR_TYPE_UNSUPPORTED_FEATURE,
f'The "{filter_object["operator"]}" filter with value '
f'"{filter_value}" on field "{baserow_field.name}" was '
f'ignored in view {raw_airtable_view["name"]} because it\'s not '
f"possible to filter by that field type.",
)
return None
try:
filter_operator = airtable_filter_operator_registry.get(
filter_object["operator"]
)
filter_type, value = filter_operator.to_baserow_filter_and_value(
row_id_mapping,
raw_airtable_table,
raw_airtable_column,
baserow_field,
import_report,
filter_object["value"],
)
if not filter_type.field_is_compatible(baserow_field):
raise AirtableSkipFilter
except (
airtable_filter_operator_registry.does_not_exist_exception_class,
# If the `AirtableSkipFilter` exception is raised, then the Airtable
# filter existing, but is not compatible with the Baserow filters. This
# can be raised in the `to_baserow_filter_and_value`, but also if it
# appears to not be compatible afterward.
AirtableSkipFilter,
):
filter_value = unknown_value_to_human_readable(filter_object["value"])
import_report.add_failed(
f'View "{raw_airtable_view["name"]}", Field "{baserow_field.name}"',
SCOPE_VIEW_FILTER,
raw_airtable_table["name"],
ERROR_TYPE_UNSUPPORTED_FEATURE,
f'The "{filter_object["operator"]}" filter with value '
f'"{filter_value}" on field "{baserow_field.name}" was '
f'ignored in view {raw_airtable_view["name"]} because not no '
f"compatible filter exists.",
)
return None
return ViewFilter(
id=filter_object["id"],
type=filter_type.type,
value=value,
field_id=filter_object["columnId"],
view_id=raw_airtable_view["id"],
group_id=parent_group.id if parent_group else None,
)
def get_filters(
self,
field_mapping: dict,
row_id_mapping: Dict[str, Dict[str, int]],
raw_airtable_view: dict,
raw_airtable_table: dict,
import_report: AirtableImportReport,
filter_object: dict,
filter_groups: Optional[List[ViewFilterGroup]] = None,
parent_group: Optional[ViewFilterGroup] = None,
) -> Union[List[ViewFilter], List[ViewFilterGroup]]:
"""
Recursive method that either loops over the filters in the `filter_object`, and
converts it to two flat lists containing the Baserow ViewFilter and
ViewFilterGroup objects.
"""
if filter_groups is None:
filter_groups = []
filters = []
conjunction = filter_object.get("conjunction", None)
filter_set = filter_object.get("filterSet", None)
if conjunction and filter_set:
# The filter_object is a nested structure, where if the `conjunction` and
# `filterSet` are in the object, it means that it's a filter group.
view_group = ViewFilterGroup(
# Specifically keep the id `None` for the root group because that
# doesn't exist in Baserow.
id=filter_object.get("id", None),
parent_group=parent_group,
filter_type=FILTER_TYPE_OR if conjunction == "or" else FILTER_TYPE_AND,
view_id=raw_airtable_view["id"],
)
if view_group not in filter_groups:
filter_groups.append(view_group)
for child_filter in filter_set:
child_filters, _ = self.get_filters(
field_mapping,
row_id_mapping,
raw_airtable_view,
raw_airtable_table,
import_report,
child_filter,
filter_groups,
view_group,
)
filters.extend(child_filters)
return filters, filter_groups
else:
baserow_filter = self.get_filter(
field_mapping,
row_id_mapping,
raw_airtable_view,
raw_airtable_table,
import_report,
filter_object,
parent_group,
)
if baserow_filter is None:
return [], []
else:
return [baserow_filter], []
def to_serialized_baserow_view(
self,
field_mapping,
row_id_mapping,
raw_airtable_table,
raw_airtable_view,
raw_airtable_view_data,
@ -329,6 +515,22 @@ class AirtableViewType(Instance):
order=raw_airtable_table["viewOrder"].index(raw_airtable_view["id"]) + 1,
)
filters_object = raw_airtable_view_data.get("filters", None)
filters = []
filter_groups = []
if view_type.can_filter and filters_object is not None:
filters, filter_groups = self.get_filters(
field_mapping,
row_id_mapping,
raw_airtable_view,
raw_airtable_table,
import_report,
filters_object,
)
# Pop the first group because that shouldn't in Baserow, and the type is
# defined on the view.
view.filter_type = filter_groups.pop(0).filter_type
sorts = self.get_sorts(
field_mapping,
view_type,
@ -348,8 +550,8 @@ class AirtableViewType(Instance):
view.get_field_options = lambda *args, **kwargs: []
view._prefetched_objects_cache = {
"viewfilter_set": [],
"filter_groups": [],
"viewfilter_set": filters,
"filter_groups": filter_groups,
"viewsort_set": sorts,
"viewgroupby_set": group_bys,
"viewdecoration_set": [],
@ -410,6 +612,7 @@ class AirtableViewTypeRegistry(Registry):
def from_airtable_view_to_serialized(
self,
field_mapping: dict,
row_id_mapping: Dict[str, Dict[str, int]],
raw_airtable_table: dict,
raw_airtable_view: dict,
raw_airtable_view_data: dict,
@ -421,6 +624,8 @@ class AirtableViewTypeRegistry(Registry):
None is returned, the view is not compatible with Baserow and must be ignored.
:param field_mapping: A dict containing all the imported fields.
:param row_id_mapping: A dict mapping the Airable row IDs to Baserow row IDs
per table ID.
:param raw_airtable_table: The raw Airtable table data related to the column.
:param raw_airtable_view: The raw Airtable column data that must be imported.
:param raw_airtable_view_data: The raw Airtable view data containing filters,
@ -437,6 +642,7 @@ class AirtableViewTypeRegistry(Registry):
airtable_view_type = self.get(type_name)
serialized_view = airtable_view_type.to_serialized_baserow_view(
field_mapping,
row_id_mapping,
raw_airtable_table,
raw_airtable_view,
raw_airtable_view_data,
@ -451,7 +657,43 @@ class AirtableViewTypeRegistry(Registry):
return None
class AirtableFilterOperator(Instance):
def to_baserow_filter_and_value(
self,
row_id_mapping: Dict[str, Dict[str, int]],
raw_airtable_table: dict,
raw_airtable_column: dict,
baserow_field: Field,
import_report: AirtableImportReport,
value: str,
) -> Union[ViewFilterType, str]:
"""
Converts the given Airtable value into the matching Baserow filter type and
correct value.
:param row_id_mapping: A dict mapping the Airable row IDs to Baserow row IDs
per table ID.
:param raw_airtable_table: The raw Airtable table data related to the filter.
:param raw_airtable_column: The raw Airtable column data related to the filter.
:param baserow_field: The Baserow field related to the filter.
:param import_report: Used to collect what wasn't imported to report to the
user.
:param value: The value that must be converted.
:raises AirtableSkipFilter: If no compatible Baserow filter can be found.
:return: The matching Baserow filter type and value.
"""
raise NotImplementedError(
f"The `to_baserow_filter` must be implemented for {self.type}."
)
class AirtableFilterOperatorRegistry(Registry):
name = "airtable_filter_operator"
# A default airtable column type registry is created here, this is the one that is used
# throughout the whole Baserow application to add a new airtable column type.
airtable_column_type_registry = AirtableColumnTypeRegistry()
airtable_view_type_registry = AirtableViewTypeRegistry()
airtable_filter_operator_registry = AirtableFilterOperatorRegistry()

View file

@ -1,9 +1,15 @@
import json
import re
from typing import Any, Optional
from requests import Response
from baserow.core.utils import remove_invalid_surrogate_characters
from baserow.contrib.database.airtable.constants import (
AIRTABLE_DATE_FILTER_VALUE_MAP,
AIRTABLE_MAX_DURATION_VALUE,
)
from baserow.contrib.database.airtable.exceptions import AirtableSkipFilter
from baserow.core.utils import get_value_at_path, remove_invalid_surrogate_characters
def extract_share_id_from_url(public_base_url: str) -> str:
@ -62,6 +68,25 @@ def get_airtable_column_name(raw_airtable_table, column_id) -> str:
return column_id
def unknown_value_to_human_readable(value: Any) -> str:
"""
If a value can't be converted to human-readable value, then this function can be
used to generate something user-friendly.
:param value: The value that must be converted.
:return: The human-readable string value.
"""
if value is None:
return ""
if isinstance(value, list):
value_len = len(value)
return "1 item" if value_len == 1 else f"{value_len} items"
if isinstance(value, str) and value.startswith("usr"):
return "1 item"
return str(value)
def parse_json_and_remove_invalid_surrogate_characters(response: Response) -> dict:
"""
The response from Airtable can sometimes contain invalid surrogate characters. This
@ -218,3 +243,48 @@ def quill_to_markdown(ops: list) -> str:
flush_line()
return "".join(md_output).strip()
def airtable_date_filter_value_to_baserow(value: Optional[dict]) -> str:
"""
Converts the provided Airtable filter date value to the Baserow compatible date
value string.
:param value: A dict containing the Airtable date value.
:return: e.g. Europe/Amsterdam?2025-01-01?exact_date
"""
if value is None:
return ""
mode = value["mode"]
if "exactDate" in value:
# By default, Airtable adds the time, but that is not needed in Baserow.
value["exactDate"] = value["exactDate"][:10]
date_string = AIRTABLE_DATE_FILTER_VALUE_MAP[mode]
return date_string.format(**value)
def skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value):
"""
If the provided Airtable column is a number with duration formatting, and if the
value exceeds the maximum we can process, then the `AirtableSkipFilter` is raised.
:param raw_airtable_column: The related raw Airtable column.
:param value: The value that must be checked.
:raises: AirtableSkipFilter
"""
is_duration = (
get_value_at_path(raw_airtable_column, "typeOptions.format") == "duration"
)
if not is_duration:
return
try:
value = int(value)
if abs(value) > AIRTABLE_MAX_DURATION_VALUE:
raise AirtableSkipFilter
except ValueError:
pass

View file

@ -159,6 +159,7 @@ class DatabaseConfig(AppConfig):
from .airtable.registry import (
airtable_column_type_registry,
airtable_filter_operator_registry,
airtable_view_type_registry,
)
from .data_sync.registries import data_sync_type_registry
@ -655,6 +656,44 @@ class DatabaseConfig(AppConfig):
airtable_view_type_registry.register(GridAirtableViewType())
from .airtable.airtable_filter_operators import (
AirtableContainsOperator,
AirtableDoesNotContainOperator,
AirtableEqualOperator,
AirtableFilenameOperator,
AirtableFiletypeOperator,
AirtableHasAllOfOperator,
AirtableHasAnyOfOperator,
AirtableIsAnyOfOperator,
AirtableIsEmptyOperator,
AirtableIsNoneOfOperator,
AirtableIsNotEmptyOperator,
AirtableIsWithinOperator,
AirtableLessThanOperator,
AirtableLessThanOrEqualOperator,
AirtableMoreThanOperator,
AirtableMoreThanOrEqualOperator,
AirtableNotEqualOperator,
)
airtable_filter_operator_registry.register(AirtableContainsOperator())
airtable_filter_operator_registry.register(AirtableDoesNotContainOperator())
airtable_filter_operator_registry.register(AirtableEqualOperator())
airtable_filter_operator_registry.register(AirtableNotEqualOperator())
airtable_filter_operator_registry.register(AirtableIsEmptyOperator())
airtable_filter_operator_registry.register(AirtableIsNotEmptyOperator())
airtable_filter_operator_registry.register(AirtableFilenameOperator())
airtable_filter_operator_registry.register(AirtableFiletypeOperator())
airtable_filter_operator_registry.register(AirtableIsAnyOfOperator())
airtable_filter_operator_registry.register(AirtableIsNoneOfOperator())
airtable_filter_operator_registry.register(AirtableHasAnyOfOperator())
airtable_filter_operator_registry.register(AirtableHasAllOfOperator())
airtable_filter_operator_registry.register(AirtableLessThanOperator())
airtable_filter_operator_registry.register(AirtableMoreThanOperator())
airtable_filter_operator_registry.register(AirtableLessThanOrEqualOperator())
airtable_filter_operator_registry.register(AirtableMoreThanOrEqualOperator())
airtable_filter_operator_registry.register(AirtableIsWithinOperator())
from .data_sync.data_sync_types import (
ICalCalendarDataSyncType,
PostgreSQLDataSyncType,

View file

@ -38,6 +38,42 @@ def parse_ids_from_csv_string(value: str) -> list[int]:
return []
def map_ids_from_csv_string(
value_string: str, mapping: Optional[dict] = None
) -> list[Union[str, int]]:
"""
Parses the provided value if needed and returns a list ids.
:param value_string: The value that has been provided by the user.
:param mapping: Key is given option id, and the value is th target option id.
:return: A list of integers that represent ids.
"""
# There is a small chance the value is an int in case a raw ID was provided in
# the row coloring, where the filters are stored as JSON. Cast it to a string to
# make it compatible.
if not isinstance(value_string, str):
value_string = str(value_string)
parsed_values = []
for value in value_string.split(","):
# In some cases, the select option ID is a string, like with the Airtable
# import. If the value can be found in the mapping, then we'll directly use
# that value.
if value in mapping:
parsed_values.append(str(mapping[value]))
continue
if value.strip().isdigit():
# Convert to int because the serialized value can be a string, but the key
# in the mapping is an int.
value = int(value)
if value in mapping:
parsed_values.append(str(mapping[value]))
return parsed_values
class AnnotatedQ:
"""
A simple wrapper class combining a params for a Queryset.annotate call with a

View file

@ -21,6 +21,7 @@ from baserow.contrib.database.fields.field_filters import (
FilterBuilder,
OptionallyAnnotatedQ,
filename_contains_filter,
map_ids_from_csv_string,
parse_ids_from_csv_string,
)
from baserow.contrib.database.fields.field_types import (
@ -1102,13 +1103,12 @@ class SingleSelectEqualViewFilterType(ViewFilterType):
return filter_function(field_name, value, model_field, field)
def set_import_serialized_value(self, value, id_mapping):
mapping = id_mapping["database_field_select_options"]
try:
value = int(value)
except ValueError:
return map_ids_from_csv_string(value, mapping)[0]
except IndexError:
return ""
return str(id_mapping["database_field_select_options"].get(value, ""))
class SingleSelectNotEqualViewFilterType(
NotViewFilterTypeMixin, SingleSelectEqualViewFilterType
@ -1159,13 +1159,8 @@ class SingleSelectIsAnyOfViewFilterType(ViewFilterType):
return filter_function(field_name, option_ids, model_field, field)
def set_import_serialized_value(self, value: str | None, id_mapping: dict) -> str:
# Parses the old option ids and remaps them to the new option ids.
old_options_ids = parse_ids_from_csv_string(value or "")
select_option_map = id_mapping["database_field_select_options"]
new_values = []
for old_id in old_options_ids:
if new_id := select_option_map.get(old_id):
new_values.append(str(new_id))
new_values = map_ids_from_csv_string(value or "", select_option_map)
return ",".join(new_values)
@ -1414,15 +1409,8 @@ class MultipleSelectHasViewFilterType(ManyToManyHasBaseViewFilter):
return filter_function(field_name, option_ids, model_field, field)
def set_import_serialized_value(self, value: str | None, id_mapping: dict) -> str:
# Parses the old option ids and remaps them to the new option ids.
old_options_ids = parse_ids_from_csv_string(value or "")
select_option_map = id_mapping["database_field_select_options"]
new_values = []
for old_id in old_options_ids:
if new_id := select_option_map.get(old_id):
new_values.append(str(new_id))
new_values = map_ids_from_csv_string(value or "", select_option_map)
return ",".join(new_values)

View file

@ -897,6 +897,9 @@ class MirrorDict(defaultdict):
def __missing__(self, key):
return key
def __contains__(self, key):
return True
def get(self, key, default=None):
return self[key]

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,12 @@
import pytest
from baserow.contrib.database.airtable.utils import (
airtable_date_filter_value_to_baserow,
extract_share_id_from_url,
get_airtable_column_name,
get_airtable_row_primary_value,
quill_to_markdown,
unknown_value_to_human_readable,
)
@ -271,3 +273,37 @@ def test_quill_to_markdown_airtable_example_two_lists():
- ~Item~
- [link](https://airtable.com)"""
)
def test_airtable_date_filter_value_to_baserow():
assert (
airtable_date_filter_value_to_baserow(
{
"mode": "exactDate",
"exactDate": "2025-02-05T00:00:00.000Z",
"timeZone": "Europe/Amsterdam",
"shouldUseCorrectTimeZoneForFormulaicColumn": True,
}
)
== "Europe/Amsterdam?2025-02-05?exact_date"
)
def test_airtable_invalid_date_filter_value_to_baserow():
with pytest.raises(KeyError):
assert airtable_date_filter_value_to_baserow(
{
"mode": "not_found",
"exactDate": "2025-02-05T00:00:00.000Z",
"timeZone": "Europe/Amsterdam",
"shouldUseCorrectTimeZoneForFormulaicColumn": True,
}
)
def test_unknown_value_to_human_readable():
assert unknown_value_to_human_readable(None) == ""
assert unknown_value_to_human_readable(["1", "2"]) == "2 items"
assert unknown_value_to_human_readable(["1"]) == "1 item"
assert unknown_value_to_human_readable("usrGUN1234") == "1 item"
assert unknown_value_to_human_readable("random") == "random"

View file

@ -1,5 +1,9 @@
from copy import deepcopy
from django.contrib.contenttypes.models import ContentType
import pytest
from baserow.contrib.database.airtable.config import AirtableImportConfig
from baserow.contrib.database.airtable.import_report import (
SCOPE_VIEW_GROUP_BY,
@ -38,6 +42,7 @@ RAW_AIRTABLE_TABLE = {
"viewSectionsById": {},
"schemaChecksum": "46f523a43433afe37d63e00d1a0f36c64310f06e4e0af2c32b6e99f26ab0e51a",
}
ROW_ID_MAPPING = {}
FIELD_MAPPING = {
"fldwSc9PqedIhTSqhi1": {
"baserow_field": TextField(
@ -90,7 +95,7 @@ RAW_VIEW_DATA_FILTERS = {
{
"id": "flthuYL0uubbDF2Xy",
"type": "nested",
"conjunction": "or",
"conjunction": "and",
"filterSet": [
{
"id": "flt70g1l245672xRi",
@ -107,7 +112,7 @@ RAW_VIEW_DATA_FILTERS = {
],
},
],
"conjunction": "and",
"conjunction": "or",
}
RAW_VIEW_DATA_SORTS = {
"sortSet": [
@ -134,6 +139,7 @@ def test_import_grid_view():
airtable_view_type = airtable_view_type_registry.get("grid")
serialized_view = airtable_view_type.to_serialized_baserow_view(
FIELD_MAPPING,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
RAW_AIRTABLE_VIEW_DATA,
@ -188,6 +194,7 @@ def test_import_grid_view_xlarge_row_height():
airtable_view_type = airtable_view_type_registry.get("grid")
serialized_view = airtable_view_type.to_serialized_baserow_view(
FIELD_MAPPING,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -205,6 +212,7 @@ def test_import_grid_view_unknown_row_height():
airtable_view_type = airtable_view_type_registry.get("grid")
serialized_view = airtable_view_type.to_serialized_baserow_view(
FIELD_MAPPING,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -221,6 +229,7 @@ def test_import_grid_view_sorts():
airtable_view_type = airtable_view_type_registry.get("grid")
serialized_view = airtable_view_type.to_serialized_baserow_view(
FIELD_MAPPING,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -235,6 +244,7 @@ def test_import_grid_view_sorts():
airtable_view_type = airtable_view_type_registry.get("grid")
serialized_view = airtable_view_type.to_serialized_baserow_view(
FIELD_MAPPING,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -253,6 +263,7 @@ def test_import_grid_view_sort_field_not_found():
import_report = AirtableImportReport()
serialized_view = airtable_view_type.to_serialized_baserow_view(
{},
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -279,6 +290,7 @@ def test_import_grid_view_sort_field_unsupported():
import_report = AirtableImportReport()
serialized_view = airtable_view_type.to_serialized_baserow_view(
field_mapping,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -301,6 +313,7 @@ def test_import_grid_view_group_bys():
airtable_view_type = airtable_view_type_registry.get("grid")
serialized_view = airtable_view_type.to_serialized_baserow_view(
FIELD_MAPPING,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -315,6 +328,7 @@ def test_import_grid_view_group_bys():
airtable_view_type = airtable_view_type_registry.get("grid")
serialized_view = airtable_view_type.to_serialized_baserow_view(
FIELD_MAPPING,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -333,6 +347,7 @@ def test_import_grid_view_group_by_field_not_found():
import_report = AirtableImportReport()
serialized_view = airtable_view_type.to_serialized_baserow_view(
{},
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -359,6 +374,7 @@ def test_import_grid_view_group_by_field_unsupported():
import_report = AirtableImportReport()
serialized_view = airtable_view_type.to_serialized_baserow_view(
field_mapping,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -385,6 +401,7 @@ def test_import_grid_view_group_by_order_unsupported():
import_report = AirtableImportReport()
serialized_view = airtable_view_type.to_serialized_baserow_view(
field_mapping,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -409,6 +426,7 @@ def test_import_grid_view_field_order_and_visibility():
import_report = AirtableImportReport()
serialized_view = airtable_view_type.to_serialized_baserow_view(
field_mapping,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
@ -436,3 +454,57 @@ def test_import_grid_view_field_order_and_visibility():
"aggregation_raw_type": "",
},
]
@pytest.mark.django_db
def test_import_grid_view_filters_and_groups():
view_data = deepcopy(RAW_AIRTABLE_VIEW_DATA)
field_mapping = deepcopy(FIELD_MAPPING)
for field_object in field_mapping.values():
field_object["baserow_field"].content_type = ContentType.objects.get_for_model(
field_object["baserow_field"]
)
view_data["filters"] = RAW_VIEW_DATA_FILTERS
airtable_view_type = airtable_view_type_registry.get("grid")
import_report = AirtableImportReport()
serialized_view = airtable_view_type.to_serialized_baserow_view(
field_mapping,
ROW_ID_MAPPING,
RAW_AIRTABLE_TABLE,
RAW_AIRTABLE_VIEW,
view_data,
AirtableImportConfig(),
import_report,
)
assert serialized_view["filter_type"] == "OR"
assert serialized_view["filters_disabled"] is False
assert serialized_view["filters"] == [
{
"id": "fltp2gabc8P91234f",
"field_id": "fldwSc9PqedIhTSqhi1",
"type": "not_empty",
"value": "",
"group": None,
},
{
"id": "flt70g1l245672xRi",
"field_id": "fldwSc9PqedIhTSqhi1",
"type": "not_equal",
"value": "test",
"group": "flthuYL0uubbDF2Xy",
},
{
"id": "fltVg238719fbIKqC",
"field_id": "fldwSc9PqedIhTSqhi2",
"type": "not_equal",
"value": "test2",
"group": "flthuYL0uubbDF2Xy",
},
]
assert serialized_view["filter_groups"] == [
{"id": "flthuYL0uubbDF2Xy", "filter_type": "AND", "parent_group": None}
]

View file

@ -1458,6 +1458,14 @@ def test_single_select_equal_filter_type_export_import():
assert view_filter_type.set_import_serialized_value("wrong", id_mapping) == ""
@pytest.mark.django_db
def test_single_select_equal_filter_type_export_import_string_keys():
view_filter_type = view_filter_type_registry.get("single_select_equal")
id_mapping = {"database_field_select_options": {"test": 2}}
assert view_filter_type.set_import_serialized_value("test", id_mapping) == "2"
assert view_filter_type.set_import_serialized_value("test2", id_mapping) == ""
@pytest.mark.django_db
@pytest.mark.parametrize(
"field_name", ["single_select", "ref_single_select", "ref_ref_single_select"]
@ -1755,6 +1763,36 @@ def test_single_select_is_any_of_filter_type_export_import():
assert view_filter_type.set_import_serialized_value(None, id_mapping) == ""
@pytest.mark.django_db
def test_single_select_is_any_of_filter_type_export_import_string_keys():
view_filter_type = view_filter_type_registry.get("single_select_is_any_of")
id_mapping = {"database_field_select_options": {"test": 2, "test2": 3}}
assert view_filter_type.set_import_serialized_value("1", id_mapping) == ""
assert view_filter_type.set_import_serialized_value("", id_mapping) == ""
assert view_filter_type.set_import_serialized_value("test", id_mapping) == "2"
assert (
view_filter_type.set_import_serialized_value("test,test2", id_mapping) == "2,3"
)
assert (
view_filter_type.set_import_serialized_value("test,invalid", id_mapping) == "2"
)
@pytest.mark.django_db
def test_single_multiple_select_has_type_export_import_string_keys():
view_filter_type = view_filter_type_registry.get("multiple_select_has")
id_mapping = {"database_field_select_options": {"test": 2, "test2": 3}}
assert view_filter_type.set_import_serialized_value("1", id_mapping) == ""
assert view_filter_type.set_import_serialized_value("", id_mapping) == ""
assert view_filter_type.set_import_serialized_value("test", id_mapping) == "2"
assert (
view_filter_type.set_import_serialized_value("test,test2", id_mapping) == "2,3"
)
assert (
view_filter_type.set_import_serialized_value("test,invalid", id_mapping) == "2"
)
@pytest.mark.django_db
@pytest.mark.parametrize(
"field_name", ["single_select", "ref_single_select", "ref_ref_single_select"]

View file

@ -0,0 +1,8 @@
{
"type": "feature",
"message": "Import Airtable view filters.",
"domain": "database",
"issue_number": 793,
"bullet_points": [],
"created_at": "2025-03-04"
}