mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-07 22:35:36 +00:00
Resolve "Show select_option suggestion when converting TO a single_select field"
This commit is contained in:
parent
a0cf33d8f7
commit
fdc439369b
22 changed files with 629 additions and 144 deletions
backend
src/baserow
config/settings
contrib/database
tests/baserow/contrib/database
web-frontend/modules/database
|
@ -437,6 +437,8 @@ BATCH_ROWS_SIZE_LIMIT = int(
|
|||
|
||||
TRASH_PAGE_SIZE_LIMIT = 200 # How many trash entries can be requested at once.
|
||||
ROW_COMMENT_PAGE_SIZE_LIMIT = 200 # How many row comments can be requested at once.
|
||||
# How many unique row values can be requested at once.
|
||||
UNIQUE_ROW_VALUES_SIZE_LIMIT = 50
|
||||
|
||||
# The amount of rows that can be imported when creating a table.
|
||||
INITIAL_TABLE_DATA_LIMIT = None
|
||||
|
|
|
@ -101,3 +101,8 @@ ERROR_INVALID_SELECT_OPTION_VALUES = (
|
|||
HTTP_400_BAD_REQUEST,
|
||||
"The provided select option ids {e.ids} are not valid select options.",
|
||||
)
|
||||
ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES = (
|
||||
"ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"The requested field type is not compatible with generating unique values.",
|
||||
)
|
||||
|
|
|
@ -184,3 +184,19 @@ class MustBeEmptyField(serializers.Field):
|
|||
|
||||
def to_internal_value(self, data):
|
||||
return None
|
||||
|
||||
|
||||
class UniqueRowValueParamsSerializer(serializers.Serializer):
|
||||
limit = serializers.IntegerField(
|
||||
required=False, help_text="Defines how many values should be returned."
|
||||
)
|
||||
split_comma_separated = serializers.BooleanField(
|
||||
required=False,
|
||||
help_text="Indicates whether the original column values must be splitted by "
|
||||
"comma.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class UniqueRowValuesSerializer(serializers.Serializer):
|
||||
values = serializers.ListSerializer(child=serializers.CharField())
|
||||
|
|
|
@ -2,12 +2,17 @@ from django.urls import re_path
|
|||
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
|
||||
from .views import FieldsView, FieldView
|
||||
from .views import FieldsView, FieldView, UniqueRowValueFieldView
|
||||
|
||||
|
||||
app_name = "baserow.contrib.database.api.fields"
|
||||
|
||||
urlpatterns = field_type_registry.api_urls + [
|
||||
re_path(r"table/(?P<table_id>[0-9]+)/$", FieldsView.as_view(), name="list"),
|
||||
re_path(
|
||||
r"(?P<field_id>[0-9]+)/unique_row_values/$",
|
||||
UniqueRowValueFieldView.as_view(),
|
||||
name="unique_row_values",
|
||||
),
|
||||
re_path(r"(?P<field_id>[0-9]+)/$", FieldView.as_view(), name="item"),
|
||||
]
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.db import transaction
|
||||
from django.conf import settings
|
||||
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.decorators import permission_classes as method_permission_classes
|
||||
|
@ -6,7 +7,11 @@ from rest_framework.permissions import IsAuthenticated, AllowAny
|
|||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from baserow.api.decorators import validate_body_custom_fields, map_exceptions
|
||||
from baserow.api.decorators import (
|
||||
validate_body_custom_fields,
|
||||
map_exceptions,
|
||||
validate_query_parameters,
|
||||
)
|
||||
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.api.trash.errors import ERROR_CANNOT_DELETE_ALREADY_DELETED_ITEM
|
||||
|
@ -22,6 +27,7 @@ from baserow.contrib.database.api.fields.errors import (
|
|||
ERROR_INVALID_BASEROW_FIELD_NAME,
|
||||
ERROR_FIELD_SELF_REFERENCE,
|
||||
ERROR_FIELD_CIRCULAR_REFERENCE,
|
||||
ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES,
|
||||
)
|
||||
from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.tokens.authentications import TokenAuthentication
|
||||
|
@ -34,6 +40,7 @@ from baserow.contrib.database.fields.exceptions import (
|
|||
ReservedBaserowFieldNameException,
|
||||
FieldWithSameNameAlreadyExists,
|
||||
InvalidBaserowFieldName,
|
||||
IncompatibleFieldTypeForUniqueValues,
|
||||
)
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
|
@ -50,6 +57,8 @@ from .serializers import (
|
|||
UpdateFieldSerializer,
|
||||
FieldSerializerWithRelatedFields,
|
||||
RelatedFieldsSerializer,
|
||||
UniqueRowValueParamsSerializer,
|
||||
UniqueRowValuesSerializer,
|
||||
)
|
||||
from baserow.contrib.database.fields.dependencies.exceptions import (
|
||||
SelfReferenceFieldDependencyError,
|
||||
|
@ -394,3 +403,63 @@ class FieldView(APIView):
|
|||
updated_fields = FieldHandler().delete_field(request.user, field)
|
||||
|
||||
return Response(RelatedFieldsSerializer({}, related_fields=updated_fields).data)
|
||||
|
||||
|
||||
class UniqueRowValueFieldView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="field_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Returns the values related to the provided field.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="limit",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Defines how many values should be returned.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="split_comma_separated",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.BOOL,
|
||||
description="Indicates whether the original column values must be "
|
||||
"splitted by comma.",
|
||||
),
|
||||
],
|
||||
tags=["Database table fields"],
|
||||
operation_id="get_database_field_unique_row_values",
|
||||
description=(
|
||||
"Returns a list of all the unique row values for an existing field, sorted "
|
||||
"in order of frequency."
|
||||
),
|
||||
responses={
|
||||
200: UniqueRowValuesSerializer,
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_FIELD_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
IncompatibleFieldTypeForUniqueValues: ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES,
|
||||
}
|
||||
)
|
||||
@validate_query_parameters(UniqueRowValueParamsSerializer)
|
||||
def get(self, request, field_id, query_params):
|
||||
field = FieldHandler().get_field(field_id)
|
||||
limit = query_params.get("limit")
|
||||
split_comma_separated = query_params.get("split_comma_separated")
|
||||
|
||||
if not limit or limit > settings.UNIQUE_ROW_VALUES_SIZE_LIMIT:
|
||||
limit = settings.UNIQUE_ROW_VALUES_SIZE_LIMIT
|
||||
|
||||
values = FieldHandler().get_unique_row_values(
|
||||
field, limit, split_comma_separated=split_comma_separated
|
||||
)
|
||||
|
||||
return Response(UniqueRowValuesSerializer({"values": values}).data)
|
||||
|
|
|
@ -4,6 +4,8 @@ from django.db import connection, transaction
|
|||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.utils import strip_quotes
|
||||
|
||||
from .sql_queries import sql_drop_try_cast, sql_create_try_cast
|
||||
|
||||
|
||||
class PostgresqlLenientDatabaseSchemaEditor:
|
||||
"""
|
||||
|
@ -18,27 +20,6 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
"ALTER COLUMN %(column)s TYPE %(type)s "
|
||||
"USING pg_temp.try_cast(%(column)s::text)"
|
||||
)
|
||||
sql_drop_try_cast = "DROP FUNCTION IF EXISTS pg_temp.try_cast(text, int)"
|
||||
sql_create_try_cast = """
|
||||
create or replace function pg_temp.try_cast(
|
||||
p_in text,
|
||||
p_default int default null
|
||||
)
|
||||
returns %(type)s
|
||||
as
|
||||
$FUNCTION$
|
||||
begin
|
||||
begin
|
||||
%(alter_column_prepare_old_value)s
|
||||
%(alter_column_prepare_new_value)s
|
||||
return p_in::%(type)s;
|
||||
exception when others then
|
||||
return p_default;
|
||||
end;
|
||||
end;
|
||||
$FUNCTION$
|
||||
language plpgsql;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -84,9 +65,9 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
quoted_column_name = self.quote_name(new_field.column)
|
||||
for key, value in variables.items():
|
||||
variables[key] = value.replace("$FUNCTION$", "")
|
||||
self.execute(self.sql_drop_try_cast)
|
||||
self.execute(sql_drop_try_cast)
|
||||
self.execute(
|
||||
self.sql_create_try_cast
|
||||
sql_create_try_cast
|
||||
% {
|
||||
"column": quoted_column_name,
|
||||
"type": new_type,
|
||||
|
|
21
backend/src/baserow/contrib/database/db/sql_queries.py
Normal file
21
backend/src/baserow/contrib/database/db/sql_queries.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
sql_drop_try_cast = "DROP FUNCTION IF EXISTS pg_temp.try_cast(text, int)"
|
||||
sql_create_try_cast = """
|
||||
create or replace function pg_temp.try_cast(
|
||||
p_in text,
|
||||
p_default int default null
|
||||
)
|
||||
returns %(type)s
|
||||
as
|
||||
$FUNCTION$
|
||||
begin
|
||||
begin
|
||||
%(alter_column_prepare_old_value)s
|
||||
%(alter_column_prepare_new_value)s
|
||||
return p_in::%(type)s;
|
||||
exception when others then
|
||||
return p_default;
|
||||
end;
|
||||
end;
|
||||
$FUNCTION$
|
||||
language plpgsql;
|
||||
"""
|
|
@ -144,3 +144,7 @@ class InvalidLookupTargetField(Exception):
|
|||
Raised when a a lookup field is attempted to be created or updated with a target
|
||||
field that does not exist or is not in the through fields linked table.
|
||||
"""
|
||||
|
||||
|
||||
class IncompatibleFieldTypeForUniqueValues(Exception):
|
||||
"""Raised when the unique values of an incompatible field are requested."""
|
||||
|
|
|
@ -3,7 +3,6 @@ from psycopg2 import sql
|
|||
|
||||
from django.db import models, transaction
|
||||
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.db.schema import (
|
||||
lenient_schema_editor,
|
||||
safe_django_schema_editor,
|
||||
|
@ -159,85 +158,6 @@ class MultipleSelectConversionBase(MultipleSelectConversionConfig):
|
|||
),
|
||||
)
|
||||
|
||||
def _get_trim_and_split_field_values_query(self, model, field):
|
||||
"""
|
||||
Creates a sql statement for the table of the given model which will split the
|
||||
contents of the column of the given field by the configured regex and
|
||||
subsequently trim the created strings by the configured trim settings.
|
||||
"""
|
||||
|
||||
return sql.SQL(
|
||||
"""
|
||||
select
|
||||
trim(
|
||||
both {trimmed} from
|
||||
unnest(regexp_split_to_array({column}::text, {regex}))
|
||||
) as col
|
||||
from
|
||||
{table}
|
||||
"""
|
||||
).format(
|
||||
table=sql.Identifier(model._meta.db_table),
|
||||
trimmed=sql.Literal(self.trim_empty_and_quote),
|
||||
column=sql.Identifier(field.db_column),
|
||||
regex=sql.Literal(self.regex_split),
|
||||
)
|
||||
|
||||
def count_unique_field_values_options(self, connection, model, field):
|
||||
"""
|
||||
Counts the unique field values of a given field type.
|
||||
"""
|
||||
|
||||
subselect = self._get_trim_and_split_field_values_query(model, field)
|
||||
query = sql.SQL(
|
||||
"""
|
||||
select
|
||||
count(distinct col)
|
||||
from
|
||||
({table_select}) as tmp_table
|
||||
where
|
||||
col != ''
|
||||
"""
|
||||
).format(
|
||||
table_select=subselect,
|
||||
)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
res = cursor.fetchall()
|
||||
|
||||
return res[0][0]
|
||||
|
||||
def extract_field_values_to_options(self, connection, model, field):
|
||||
"""
|
||||
Extracts the distinct values for a specific field type over all the existing
|
||||
rows into one column with one row per value.
|
||||
This is needed in order to generate the select_options when converting from any
|
||||
text field to a multiple_select field.
|
||||
|
||||
:return: A list of select_options.
|
||||
:rtype: list.
|
||||
"""
|
||||
|
||||
subselect = self._get_trim_and_split_field_values_query(model, field)
|
||||
query = sql.SQL(
|
||||
"""
|
||||
select
|
||||
distinct left(col, {select_options_length})
|
||||
from
|
||||
({table_select}) as tmp_table
|
||||
where
|
||||
col != ''
|
||||
"""
|
||||
).format(
|
||||
table_select=subselect,
|
||||
select_options_length=sql.Literal(self.allowed_select_options_length),
|
||||
)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
res = cursor.fetchall()
|
||||
options = [{"value": x[0], "color": "blue"} for x in res]
|
||||
return options
|
||||
|
||||
@staticmethod
|
||||
def update_column_with_values(
|
||||
connection, values: sql.SQL, table: str, db_column: str
|
||||
|
@ -353,7 +273,6 @@ class TextFieldToMultipleSelectFieldConverter(FieldConverter):
|
|||
# Since we are converting to a multiple select field we might have to
|
||||
# create select options before we can then populate the table with the
|
||||
# given select options.
|
||||
has_select_options = to_field.select_options.count() > 0
|
||||
values_query = sql.SQL(
|
||||
"""
|
||||
SELECT
|
||||
|
@ -404,25 +323,7 @@ class TextFieldToMultipleSelectFieldConverter(FieldConverter):
|
|||
# lower than the allowed threshold and the user has not provided any
|
||||
# select_options themselves, we need to extract the options and create them.
|
||||
with transaction.atomic():
|
||||
if (
|
||||
not has_select_options
|
||||
and helper.count_unique_field_values_options(
|
||||
connection,
|
||||
to_model,
|
||||
tmp_model_field,
|
||||
)
|
||||
<= helper.new_select_options_threshold
|
||||
):
|
||||
options = helper.extract_field_values_to_options(
|
||||
connection, to_model, tmp_model_field
|
||||
)
|
||||
field_handler = FieldHandler()
|
||||
field_handler.update_field_select_options(user, to_field, options)
|
||||
|
||||
helper.insert_into_many_relationship(
|
||||
connection,
|
||||
values_query,
|
||||
)
|
||||
helper.insert_into_many_relationship(connection, values_query)
|
||||
schema_editor.remove_field(to_model, tmp_model_field)
|
||||
|
||||
|
||||
|
@ -631,7 +532,6 @@ class SingleSelectFieldToMultipleSelectFieldConverter(FieldConverter):
|
|||
user,
|
||||
connection,
|
||||
):
|
||||
|
||||
helper = MultipleSelectConversionBase(
|
||||
from_field,
|
||||
to_field,
|
||||
|
|
|
@ -955,6 +955,7 @@ class LinkRowFieldType(FieldType):
|
|||
}
|
||||
_can_order_by = False
|
||||
can_be_primary_field = False
|
||||
can_get_unique_values = False
|
||||
|
||||
def enhance_queryset(self, queryset, field, name):
|
||||
"""
|
||||
|
@ -1508,6 +1509,7 @@ class FileFieldType(FieldType):
|
|||
type = "file"
|
||||
model_class = FileField
|
||||
can_be_in_form_view = False
|
||||
can_get_unique_values = False
|
||||
|
||||
def _extract_file_names(self, value):
|
||||
# Validates the provided object and extract the names from it. We need the name
|
||||
|
@ -2040,6 +2042,7 @@ class SingleSelectFieldType(SelectOptionBaseFieldType):
|
|||
class MultipleSelectFieldType(SelectOptionBaseFieldType):
|
||||
type = "multiple_select"
|
||||
model_class = MultipleSelectField
|
||||
can_get_unique_values = False
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
required = kwargs.get("required", False)
|
||||
|
@ -2674,7 +2677,7 @@ class LookupFieldType(FormulaFieldType):
|
|||
InvalidLookupThroughField: ERROR_INVALID_LOOKUP_THROUGH_FIELD,
|
||||
InvalidLookupTargetField: ERROR_INVALID_LOOKUP_TARGET_FIELD,
|
||||
}
|
||||
|
||||
can_get_unique_values = False
|
||||
allowed_fields = BASEROW_FORMULA_TYPE_ALLOWED_FIELDS + [
|
||||
"through_field_id",
|
||||
"through_field_name",
|
||||
|
|
|
@ -1,18 +1,28 @@
|
|||
import logging
|
||||
from copy import deepcopy
|
||||
from typing import Dict, Any, Optional, List
|
||||
from psycopg2 import sql
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.db.utils import ProgrammingError, DataError
|
||||
|
||||
|
||||
from baserow.contrib.database.db.schema import (
|
||||
lenient_schema_editor,
|
||||
safe_django_schema_editor,
|
||||
)
|
||||
from baserow.contrib.database.fields.constants import RESERVED_BASEROW_FIELD_NAMES
|
||||
from baserow.contrib.database.fields.models import TextField
|
||||
from baserow.contrib.database.fields.field_converters import (
|
||||
MultipleSelectConversionConfig,
|
||||
)
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.db.sql_queries import (
|
||||
sql_drop_try_cast,
|
||||
sql_create_try_cast,
|
||||
)
|
||||
from baserow.core.trash.exceptions import RelatedTableTrashedException
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
|
@ -29,6 +39,7 @@ from .exceptions import (
|
|||
ReservedBaserowFieldNameException,
|
||||
InvalidBaserowFieldName,
|
||||
MaxFieldNameLengthExceeded,
|
||||
IncompatibleFieldTypeForUniqueValues,
|
||||
)
|
||||
from .models import Field, SelectOption
|
||||
from .registries import field_type_registry, field_converter_registry
|
||||
|
@ -784,3 +795,121 @@ class FieldHandler:
|
|||
field_restored.send(self, field=field, user=None, related_fields=[])
|
||||
else:
|
||||
raise e
|
||||
|
||||
def get_unique_row_values(
|
||||
self, field: Field, limit: int, split_comma_separated: bool = False
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns a list of all the unique row values for a field, sorted in order of
|
||||
frequency.
|
||||
|
||||
:param field: The field whose unique values are needed.
|
||||
:param limit: The maximum number of values returned.
|
||||
:param split_comma_separated: Indicates whether the text values must be split by
|
||||
comma.
|
||||
:return: A list containing the unique values sorted by frequency.
|
||||
"""
|
||||
|
||||
model = field.table.get_model()
|
||||
field_object = model._field_objects[field.id]
|
||||
field_type = field_object["type"]
|
||||
field = field_object["field"]
|
||||
|
||||
if not field_type.can_get_unique_values:
|
||||
raise IncompatibleFieldTypeForUniqueValues(
|
||||
f"The field type `{field_object['type']}`"
|
||||
)
|
||||
|
||||
# Prepare the old value sql `p_in` to convert prepare the old value to be
|
||||
# converted to string. This is the same psql that's used when converting the
|
||||
# a field type to another type, so we're sure it's converted to the right
|
||||
# "neutral" text value.
|
||||
alter_column_prepare_old_value = field_type.get_alter_column_prepare_old_value(
|
||||
connection, field, TextField()
|
||||
)
|
||||
variables = ()
|
||||
|
||||
# In some cases, the `get_alter_column_prepare_old_value` returns a tuple
|
||||
# where the first part if the psql and the second variables that must be
|
||||
# safely be injected.
|
||||
if isinstance(alter_column_prepare_old_value, tuple):
|
||||
variables = alter_column_prepare_old_value[1]
|
||||
alter_column_prepare_old_value = alter_column_prepare_old_value[0]
|
||||
|
||||
# Create the temporary function try cast function. This function makes sure
|
||||
# the that if the casting fails, the query doesn't fail hard, but falls back
|
||||
# `null`.
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sql_drop_try_cast)
|
||||
cursor.execute(
|
||||
sql_create_try_cast
|
||||
% {
|
||||
"alter_column_prepare_old_value": alter_column_prepare_old_value
|
||||
or "",
|
||||
"alter_column_prepare_new_value": "",
|
||||
"type": "text",
|
||||
},
|
||||
variables,
|
||||
)
|
||||
|
||||
# If `split_comma_separated` is `True`, then we first need to explode the raw
|
||||
# column values by comma. This means that if one of the values contains a `,
|
||||
# `, it will be treated as two values. This is for example needed when
|
||||
# converting to a multiple select field.
|
||||
if split_comma_separated:
|
||||
subselect = sql.SQL(
|
||||
"""
|
||||
select
|
||||
trim(
|
||||
both {trimmed} from
|
||||
unnest(
|
||||
regexp_split_to_array(
|
||||
pg_temp.try_cast({column}::text), {regex}
|
||||
)
|
||||
)
|
||||
) as col
|
||||
from
|
||||
{table}
|
||||
WHERE trashed = false
|
||||
"""
|
||||
).format(
|
||||
table=sql.Identifier(model._meta.db_table),
|
||||
trimmed=sql.Literal(
|
||||
MultipleSelectConversionConfig.trim_empty_and_quote
|
||||
),
|
||||
column=sql.Identifier(field.db_column),
|
||||
regex=sql.Literal(MultipleSelectConversionConfig.regex_split),
|
||||
)
|
||||
# Alternatively, we just want to select the raw column value.
|
||||
else:
|
||||
subselect = sql.SQL(
|
||||
"""
|
||||
SELECT pg_temp.try_cast({column}::text) as col
|
||||
FROM {table}
|
||||
WHERE trashed = false
|
||||
"""
|
||||
).format(
|
||||
table=sql.Identifier(model._meta.db_table),
|
||||
column=sql.Identifier(field.db_column),
|
||||
)
|
||||
|
||||
# Finally, we executed the constructed query and return the results as a list.
|
||||
query = sql.SQL(
|
||||
"""
|
||||
select col
|
||||
from ({table_select}) as tmp_table
|
||||
where col != '' and col is NOT NULL
|
||||
group by col
|
||||
order by count(col) DESC
|
||||
limit {limit}
|
||||
"""
|
||||
).format(
|
||||
table_select=subselect,
|
||||
limit=sql.Literal(limit),
|
||||
)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
res = cursor.fetchall()
|
||||
|
||||
return [x[0] for x in res]
|
||||
|
|
|
@ -84,6 +84,12 @@ class FieldType(
|
|||
can_be_in_form_view = True
|
||||
"""Indicates whether the field is compatible with the form view."""
|
||||
|
||||
can_get_unique_values = True
|
||||
"""
|
||||
Indicates whether this field can generate a list of unique values using the
|
||||
`FieldHandler::get_unique_row_values` method.
|
||||
"""
|
||||
|
||||
read_only = False
|
||||
"""Indicates whether the field allows inserting/updating row values or if it is
|
||||
read only."""
|
||||
|
|
|
@ -513,3 +513,93 @@ def test_delete_field(api_client, data_fixture):
|
|||
response_json = response.json()
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response_json["error"] == "ERROR_CANNOT_DELETE_PRIMARY_FIELD"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_unique_row_values(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="11@11.com", password="password", first_name="abcd"
|
||||
)
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table, order=0, name="Letter")
|
||||
grid = data_fixture.create_grid_view(table=table)
|
||||
model = grid.table.get_model()
|
||||
|
||||
url = reverse(
|
||||
"api:database:fields:unique_row_values", kwargs={"field_id": text_field.id}
|
||||
)
|
||||
|
||||
# Check for empty values
|
||||
response = api_client.get(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
response_json = response.json()
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json["values"] == []
|
||||
|
||||
# Check that values are sorted by frequency
|
||||
values = ["A", "B", "B", "B", "C", "C"]
|
||||
for value in values:
|
||||
model.objects.create(**{f"field_{text_field.id}": value})
|
||||
|
||||
response = api_client.get(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
response_json = response.json()
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json["values"] == ["B", "C", "A"]
|
||||
|
||||
# Check that limit is working
|
||||
response = api_client.get(url, {"limit": 1}, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
response_json = response.json()
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert len(response_json["values"]) == 1
|
||||
|
||||
# Check for non-existent field
|
||||
url = reverse("api:database:fields:unique_row_values", kwargs={"field_id": 9999})
|
||||
response = api_client.get(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_unique_row_values_splitted_by_comma(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="11@11.com", password="password", first_name="abcd"
|
||||
)
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table, order=0, name="Letter")
|
||||
grid = data_fixture.create_grid_view(table=table)
|
||||
model = grid.table.get_model()
|
||||
|
||||
# Check that values are sorted by frequency
|
||||
values = ["A,B", "C,D,E", "F,E", "G,E", "E", "F", "E,E"]
|
||||
for value in values:
|
||||
model.objects.create(**{f"field_{text_field.id}": value})
|
||||
|
||||
url = reverse(
|
||||
"api:database:fields:unique_row_values", kwargs={"field_id": text_field.id}
|
||||
)
|
||||
response = api_client.get(
|
||||
url, {"split_comma_separated": "true"}, HTTP_AUTHORIZATION=f"JWT {token}"
|
||||
)
|
||||
response_json = response.json()
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json["values"] == ["E", "F", "C", "D", "B", "G", "A"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_unique_row_values_incompatible_field_type(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="11@11.com", password="password", first_name="abcd"
|
||||
)
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
# The file field is not compatible.
|
||||
file_field = data_fixture.create_file_field(table=table, order=0)
|
||||
|
||||
url = reverse(
|
||||
"api:database:fields:unique_row_values", kwargs={"field_id": file_field.id}
|
||||
)
|
||||
response = api_client.get(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response_json["error"] == "ERROR_INCOMPATIBLE_FIELD_TYPE_FOR_UNIQUE_VALUES"
|
||||
|
|
|
@ -18,6 +18,7 @@ from baserow.contrib.database.fields.exceptions import (
|
|||
MaxFieldLimitExceeded,
|
||||
FieldWithSameNameAlreadyExists,
|
||||
ReservedBaserowFieldNameException,
|
||||
IncompatibleFieldTypeForUniqueValues,
|
||||
)
|
||||
from baserow.contrib.database.fields.field_helpers import (
|
||||
construct_all_possible_field_kwargs,
|
||||
|
@ -1072,3 +1073,85 @@ def test_can_convert_formula_to_numeric_field(data_fixture):
|
|||
assert Field.objects.all().count() == 1
|
||||
assert NumberField.objects.all().count() == 1
|
||||
assert FormulaField.objects.all().count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_unique_row_values(data_fixture):
|
||||
table = data_fixture.create_database_table()
|
||||
text_field = data_fixture.create_text_field(table=table, name="text")
|
||||
file_field = data_fixture.create_file_field(table=table, name="file")
|
||||
|
||||
model = table.get_model(attribute_names=True)
|
||||
model.objects.create(text="value5")
|
||||
model.objects.create(text="value1")
|
||||
model.objects.create(text="value1,value2")
|
||||
model.objects.create(text="value2,value3")
|
||||
model.objects.create(text="value4")
|
||||
model.objects.create(text="value5")
|
||||
model.objects.create(text="value5")
|
||||
model.objects.create(text="value3,value5")
|
||||
model.objects.create(text="value3,value5")
|
||||
model.objects.create(text="")
|
||||
model.objects.create(text=None)
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with pytest.raises(IncompatibleFieldTypeForUniqueValues):
|
||||
handler.get_unique_row_values(field=file_field, limit=10)
|
||||
|
||||
values = list(handler.get_unique_row_values(field=text_field, limit=10))
|
||||
assert values == [
|
||||
"value5",
|
||||
"value3,value5",
|
||||
"value2,value3",
|
||||
"value1,value2",
|
||||
"value4",
|
||||
"value1",
|
||||
]
|
||||
|
||||
values = list(handler.get_unique_row_values(field=text_field, limit=2))
|
||||
assert values == ["value5", "value3,value5"]
|
||||
|
||||
values = list(
|
||||
handler.get_unique_row_values(
|
||||
field=text_field, limit=10, split_comma_separated=True
|
||||
)
|
||||
)
|
||||
assert values == ["value5", "value3", "value2", "value1", "value4"]
|
||||
|
||||
values = list(
|
||||
handler.get_unique_row_values(
|
||||
field=text_field, limit=2, split_comma_separated=True
|
||||
)
|
||||
)
|
||||
assert values == ["value5", "value3"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_unique_row_values_single_select(data_fixture):
|
||||
table = data_fixture.create_database_table()
|
||||
single_select_field = data_fixture.create_single_select_field(
|
||||
table=table, name="single_select"
|
||||
)
|
||||
option_1 = data_fixture.create_select_option(
|
||||
field=single_select_field, value="Option 1"
|
||||
)
|
||||
option_2 = data_fixture.create_select_option(
|
||||
field=single_select_field, value="Option 2"
|
||||
)
|
||||
|
||||
model = table.get_model(attribute_names=True)
|
||||
model.objects.create(singleselect=option_1)
|
||||
model.objects.create(singleselect=option_2)
|
||||
model.objects.create(singleselect=option_1)
|
||||
model.objects.create(singleselect=option_2)
|
||||
model.objects.create(singleselect=option_2)
|
||||
model.objects.create(singleselect=option_2)
|
||||
model.objects.create()
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
# By testing the single select field, we actually test if the
|
||||
# `get_alter_column_prepare_old_value` method is being used correctly.
|
||||
values = list(handler.get_unique_row_values(field=single_select_field, limit=10))
|
||||
assert values == ["Option 2", "Option 1"]
|
||||
|
|
|
@ -977,10 +977,16 @@ def test_converting_multiple_select_field_value(
|
|||
[option_1.value, option_2.value]
|
||||
)
|
||||
|
||||
# converting back to text field should split by comma and
|
||||
# create the necessary select_options
|
||||
# Converting back to multiple select using the unique row values as input,
|
||||
# should automatically add the right options.
|
||||
unique_values = field_handler.get_unique_row_values(
|
||||
field=text_field, limit=10, split_comma_separated=True
|
||||
)
|
||||
multiple_select_field = field_handler.update_field(
|
||||
user=user, field=text_field, new_type_name="multiple_select"
|
||||
user=user,
|
||||
field=text_field,
|
||||
new_type_name="multiple_select",
|
||||
select_options=[{"value": value, "color": "blue"} for value in unique_values],
|
||||
)
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
|
@ -1066,7 +1072,15 @@ def test_conversion_number_to_multiple_select_field(
|
|||
|
||||
assert NumberField.objects.all().first().id == field.id
|
||||
|
||||
field_handler.update_field(user=user, field=field, new_type_name="multiple_select")
|
||||
unique_values = field_handler.get_unique_row_values(
|
||||
field=field, limit=10, split_comma_separated=True
|
||||
)
|
||||
field_handler.update_field(
|
||||
user=user,
|
||||
field=field,
|
||||
new_type_name="multiple_select",
|
||||
select_options=[{"value": value, "color": "blue"} for value in unique_values],
|
||||
)
|
||||
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
select_options = field.select_options.all()
|
||||
|
@ -1175,7 +1189,15 @@ def test_conversion_email_to_multiple_select_field(data_fixture):
|
|||
|
||||
assert EmailField.objects.all().first().id == field.id
|
||||
|
||||
field_handler.update_field(user=user, field=field, new_type_name="multiple_select")
|
||||
unique_values = field_handler.get_unique_row_values(
|
||||
field=field, limit=10, split_comma_separated=True
|
||||
)
|
||||
field_handler.update_field(
|
||||
user=user,
|
||||
field=field,
|
||||
new_type_name="multiple_select",
|
||||
select_options=[{"value": value, "color": "blue"} for value in unique_values],
|
||||
)
|
||||
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
select_options = field.select_options.all()
|
||||
|
@ -1332,12 +1354,21 @@ def test_conversion_date_to_multiple_select_field(data_fixture):
|
|||
"2021-08-31 11:00",
|
||||
]
|
||||
|
||||
for field in all_fields:
|
||||
for index, field in enumerate(all_fields):
|
||||
field_handler.update_field(
|
||||
user=user, field=field, new_type_name="multiple_select"
|
||||
user=user,
|
||||
field=field,
|
||||
new_type_name="multiple_select",
|
||||
**{
|
||||
"type": "multiple_select",
|
||||
"select_options": [{"value": all_results[index], "color": "red"}],
|
||||
},
|
||||
)
|
||||
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
field_model = field_handler.get_field(field.id)
|
||||
field_type = field_type_registry.get_by_model(field_model.specific_class)
|
||||
# Update field value after type change
|
||||
# all_fields[index] = field_model.specific
|
||||
select_options = field.select_options.all()
|
||||
assert field_type.type == "multiple_select"
|
||||
assert len(select_options) == 1
|
||||
|
@ -1409,18 +1440,25 @@ def test_convert_long_text_to_multiple_select(data_fixture):
|
|||
field = field_handler.create_field(
|
||||
user=user, table=table, type_name="long_text", name="Text"
|
||||
)
|
||||
|
||||
field_value = "This is a description, with several, commas."
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=table,
|
||||
values={f"field_{field.id}": "This is a description, with several, commas."},
|
||||
values={f"field_{field.id}": field_value},
|
||||
)
|
||||
|
||||
multiple_select_field = field_handler.update_field(
|
||||
user=user, field=field, new_type_name="multiple_select"
|
||||
user=user,
|
||||
field=field,
|
||||
new_type_name="multiple_select",
|
||||
**{
|
||||
"type": "multiple_select",
|
||||
"select_options": [
|
||||
{"value": value, "color": "red"} for value in field_value.split(", ")
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
assert len(SelectOption.objects.all()) == 3
|
||||
assert len(field.select_options.all()) == 3
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
|
||||
|
@ -1586,8 +1624,14 @@ def test_convert_multiple_select_to_text_with_comma_and_quotes(data_fixture):
|
|||
assert cell_6 == '"Option 3,",'
|
||||
|
||||
# converting back to multiple select should create 'Option 3,' without quotes
|
||||
unique_values = field_handler.get_unique_row_values(
|
||||
field=field, limit=10, split_comma_separated=True
|
||||
)
|
||||
field = field_handler.update_field(
|
||||
user=user, field=field, new_type_name="multiple_select"
|
||||
user=user,
|
||||
field=field,
|
||||
new_type_name="multiple_select",
|
||||
select_options=[{"value": value, "color": "blue"} for value in unique_values],
|
||||
)
|
||||
assert len(SelectOption.objects.all()) == 4
|
||||
|
||||
|
@ -1750,10 +1794,14 @@ def test_conversion_to_multiple_select_with_more_than_threshold_options_in_extra
|
|||
},
|
||||
)
|
||||
|
||||
unique_values = field_handler.get_unique_row_values(
|
||||
field=field_1, limit=200, split_comma_separated=True
|
||||
)
|
||||
field_handler.update_field(
|
||||
user=user,
|
||||
field=field_1,
|
||||
new_type_name="multiple_select",
|
||||
select_options=[{"value": value, "color": "blue"} for value in unique_values],
|
||||
)
|
||||
|
||||
field_type = field_type_registry.get_by_model(field_1)
|
||||
|
@ -1895,10 +1943,16 @@ def test_conversion_to_multiple_select_with_option_value_too_large(
|
|||
},
|
||||
)
|
||||
|
||||
unique_values = field_handler.get_unique_row_values(
|
||||
field=field_1, limit=10, split_comma_separated=True
|
||||
)
|
||||
field_handler.update_field(
|
||||
user=user,
|
||||
field=field_1,
|
||||
new_type_name="multiple_select",
|
||||
select_options=[
|
||||
{"value": value[:255], "color": "blue"} for value in unique_values
|
||||
],
|
||||
)
|
||||
|
||||
field_type = field_type_registry.get_by_model(field_1)
|
||||
|
@ -1952,10 +2006,14 @@ def test_conversion_to_multiple_select_with_same_option_value_on_same_row(
|
|||
},
|
||||
)
|
||||
|
||||
unique_values = field_handler.get_unique_row_values(
|
||||
field=field_1, limit=10, split_comma_separated=True
|
||||
)
|
||||
field_handler.update_field(
|
||||
user=user,
|
||||
field=field_1,
|
||||
new_type_name="multiple_select",
|
||||
select_options=[{"value": value, "color": "blue"} for value in unique_values],
|
||||
)
|
||||
|
||||
field_type = field_type_registry.get_by_model(field_1)
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
* Added `is days ago` filter to date field.
|
||||
* Fixed a bug that made it possible to delete created on/modified by fields on the web frontend.
|
||||
* Allow the setting of max request page size via environment variable.
|
||||
* Added select option suggestions when converting to a select field.
|
||||
* Introduced read only lookup of foreign row by clicking on a link row relationship in
|
||||
the grid view row modal.
|
||||
* Boolean field converts the word `checked` to `True` value.
|
||||
|
|
|
@ -68,6 +68,7 @@
|
|||
:is="getFormComponent(values.type)"
|
||||
ref="childForm"
|
||||
:table="table"
|
||||
:field-type="values.type"
|
||||
:name="values.name"
|
||||
:default-values="defaultValues"
|
||||
@validate="$v.$touch"
|
||||
|
|
|
@ -70,9 +70,9 @@ export default {
|
|||
this.value.splice(index, 1)
|
||||
this.$emit('input', this.value)
|
||||
},
|
||||
add() {
|
||||
add(optionValue = '') {
|
||||
this.value.push({
|
||||
value: '',
|
||||
value: optionValue,
|
||||
color: randomColor(),
|
||||
id: this.lastSeenId,
|
||||
})
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
<template>
|
||||
<div>
|
||||
<div class="control">
|
||||
<div v-if="loading" class="loading"></div>
|
||||
<div v-else class="control">
|
||||
<label class="control__label control__label--small">{{
|
||||
$t('fieldSingleSelectSubForm.optionsLabel')
|
||||
}}</label>
|
||||
|
@ -15,9 +16,12 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
import fieldSubForm from '@baserow/modules/database/mixins/fieldSubForm'
|
||||
import FieldSelectOptions from '@baserow/modules/database/components/field/FieldSelectOptions'
|
||||
import FieldService from '@baserow/modules/database/services/field'
|
||||
import { randomColor } from '@baserow/modules/core/utils/colors'
|
||||
|
||||
export default {
|
||||
name: 'FieldSelectOptionsSubForm',
|
||||
|
@ -25,17 +29,61 @@ export default {
|
|||
mixins: [form, fieldSubForm],
|
||||
data() {
|
||||
return {
|
||||
loading: false,
|
||||
allowedValues: ['select_options'],
|
||||
values: {
|
||||
select_options: [],
|
||||
},
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
fieldType() {
|
||||
console.log('field type change')
|
||||
this.checkFetchOptions()
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
this.checkFetchOptions()
|
||||
},
|
||||
methods: {
|
||||
isFormValid() {
|
||||
this.$refs.selectOptions.$v.$touch()
|
||||
return !this.$refs.selectOptions.$v.$invalid
|
||||
},
|
||||
checkFetchOptions() {
|
||||
if (
|
||||
this.fieldType !== '' &&
|
||||
this.defaultValues.type !== this.fieldType &&
|
||||
this.$registry
|
||||
.get('field', this.defaultValues.type)
|
||||
.shouldFetchFieldSelectOptions()
|
||||
) {
|
||||
this.fetchOptions()
|
||||
}
|
||||
},
|
||||
async fetchOptions() {
|
||||
this.loading = true
|
||||
const splitCommaSeparated = this.$registry
|
||||
.get('field', this.fieldType)
|
||||
.acceptSplitCommaSeparatedSelectOptions()
|
||||
this.values.select_options = []
|
||||
try {
|
||||
const { data } = await FieldService(this.$client).getUniqueRowValues(
|
||||
this._props.defaultValues.id,
|
||||
10,
|
||||
splitCommaSeparated
|
||||
)
|
||||
for (const value of data.values) {
|
||||
this.values.select_options.push({
|
||||
value,
|
||||
color: randomColor(),
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
notifyIf(e)
|
||||
}
|
||||
this.loading = false
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -497,6 +497,24 @@ export class FieldType extends Registerable {
|
|||
canBeReferencedByFormulaField() {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a field type should automatically fetch select options
|
||||
* when switching to a field type that supports select options, like the single or
|
||||
* multiple select.
|
||||
*/
|
||||
shouldFetchFieldSelectOptions() {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether this field type accepts single select suggestions splitted by
|
||||
* a comma. This is for example the case with a multiple select field because
|
||||
* splits old values by comma on conversion.
|
||||
*/
|
||||
acceptSplitCommaSeparatedSelectOptions() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export class TextFieldType extends FieldType {
|
||||
|
@ -758,6 +776,10 @@ export class LinkRowFieldType extends FieldType {
|
|||
canBeReferencedByFormulaField() {
|
||||
return true
|
||||
}
|
||||
|
||||
shouldFetchFieldSelectOptions() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export class NumberFieldType extends FieldType {
|
||||
|
@ -1646,6 +1668,10 @@ export class FileFieldType extends FieldType {
|
|||
getContainsFilterFunction() {
|
||||
return filenameContainsFilter
|
||||
}
|
||||
|
||||
shouldFetchFieldSelectOptions() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export class SingleSelectFieldType extends FieldType {
|
||||
|
@ -1787,6 +1813,10 @@ export class SingleSelectFieldType extends FieldType {
|
|||
canBeReferencedByFormulaField() {
|
||||
return true
|
||||
}
|
||||
|
||||
shouldFetchFieldSelectOptions() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export class MultipleSelectFieldType extends FieldType {
|
||||
|
@ -1935,6 +1965,14 @@ export class MultipleSelectFieldType extends FieldType {
|
|||
getEmptyValue() {
|
||||
return []
|
||||
}
|
||||
|
||||
shouldFetchFieldSelectOptions() {
|
||||
return false
|
||||
}
|
||||
|
||||
acceptSplitCommaSeparatedSelectOptions() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
export class PhoneNumberFieldType extends FieldType {
|
||||
|
@ -2192,4 +2230,8 @@ export class LookupFieldType extends FormulaFieldType {
|
|||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
shouldFetchFieldSelectOptions() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,5 +4,10 @@ export default {
|
|||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
fieldType: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -9,6 +9,22 @@ export default (client) => {
|
|||
get(fieldId) {
|
||||
return client.get(`/database/fields/${fieldId}/`)
|
||||
},
|
||||
getUniqueRowValues(fieldId, limit = 10, splitCommaSeparated = false) {
|
||||
const config = {
|
||||
params: {
|
||||
limit,
|
||||
},
|
||||
}
|
||||
|
||||
if (splitCommaSeparated) {
|
||||
config.params.split_comma_separated = 'true'
|
||||
}
|
||||
|
||||
return client.get(
|
||||
`/database/fields/${fieldId}/unique_row_values/`,
|
||||
config
|
||||
)
|
||||
},
|
||||
update(fieldId, values) {
|
||||
return client.patch(`/database/fields/${fieldId}/`, values)
|
||||
},
|
||||
|
|
Loading…
Add table
Reference in a new issue