1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-17 18:32:35 +00:00

Merge remote-tracking branch 'origin/develop' into even-more-docs-fixes

This commit is contained in:
Nigel Gott 2022-03-02 13:02:09 +00:00
commit 76d0147cd5
38 changed files with 1105 additions and 209 deletions
app.json
backend
docker
src/baserow
config/settings
contrib/database
tests/baserow/contrib/database
changelog.md
deploy
all-in-one/supervisor
vagrant
dev.sh
docs/installation
web-frontend
modules
test
server/core/pages
unit/database/__snapshots__

View file

@ -31,11 +31,6 @@
"description": "The amount of workers per dyno. This value must be `1` if you want to run the free plan.",
"value": "1",
"required": true
},
"SYNC_TEMPLATES_ON_STARTUP": {
"description": "Automatically install's baserow's built in templates when set to 'true'. Disabled by default because the free Heroku database only supports 10k rows, Baserows templates if installed will use 12k rows and go over the limit. Change this to 'true' without the quotes if you have paid for the Heroku database upgrade and can store more than 10k rows. ",
"value": "false",
"required": false
}
}
}

View file

@ -53,7 +53,10 @@ try:
host="${DATABASE_HOST}",
port="${DATABASE_PORT}",
)
except psycopg2.OperationalError:
except psycopg2.OperationalError as e:
print("Error: Failed to connect to the postgresql database at ${DATABASE_HOST}")
print("Please see the error below for more details:")
print(e)
sys.exit(-1)
sys.exit(0)
END
@ -66,7 +69,10 @@ try:
psycopg2.connect(
"${DATABASE_URL}"
)
except psycopg2.OperationalError:
except psycopg2.OperationalError as e:
print("Error: Failed to connect to the postgresql database at DATABASE_URL")
print("Please see the error below for more details:")
print(e)
sys.exit(-1)
sys.exit(0)
END

View file

@ -402,7 +402,7 @@ MEDIA_ROOT = os.getenv("MEDIA_ROOT", "/baserow/media")
# Indicates the directory where the user files and user thumbnails are stored.
USER_FILES_DIRECTORY = "user_files"
USER_THUMBNAILS_DIRECTORY = "thumbnails"
USER_FILE_SIZE_LIMIT = 1024 * 1024 * 20 # 20MB
USER_FILE_SIZE_LIMIT = 1024 * 1024 * 1024 * 1024 # ~1TB
EXPORT_FILES_DIRECTORY = "export_files"
EXPORT_CLEANUP_INTERVAL_MINUTES = 5

View file

@ -15,10 +15,12 @@ from baserow.api.schemas import get_error_schema
from baserow.api.trash.errors import ERROR_CANNOT_DELETE_ALREADY_DELETED_ITEM
from baserow.api.user_files.errors import ERROR_USER_FILE_DOES_NOT_EXIST
from baserow.api.utils import validate_data
from baserow.contrib.database.api.utils import get_include_exclude_fields
from baserow.contrib.database.api.fields.errors import (
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
ERROR_ORDER_BY_FIELD_NOT_FOUND,
ERROR_FILTER_FIELD_NOT_FOUND,
ERROR_FIELD_DOES_NOT_EXIST,
)
from baserow.contrib.database.api.rows.errors import ERROR_ROW_DOES_NOT_EXIST
from baserow.contrib.database.api.rows.serializers import (
@ -35,6 +37,7 @@ from baserow.contrib.database.fields.exceptions import (
OrderByFieldNotFound,
OrderByFieldNotPossible,
FilterFieldNotFound,
FieldDoesNotExist,
)
from baserow.contrib.database.rows.exceptions import RowDoesNotExist
from baserow.contrib.database.rows.handler import RowHandler
@ -221,7 +224,9 @@ class RowsView(APIView):
]
),
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
404: get_error_schema(["ERROR_TABLE_DOES_NOT_EXIST"]),
404: get_error_schema(
["ERROR_TABLE_DOES_NOT_EXIST", "ERROR_FIELD_DOES_NOT_EXIST"]
),
},
)
@map_exceptions(
@ -232,6 +237,7 @@ class RowsView(APIView):
OrderByFieldNotFound: ERROR_ORDER_BY_FIELD_NOT_FOUND,
OrderByFieldNotPossible: ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
FilterFieldNotFound: ERROR_FILTER_FIELD_NOT_FOUND,
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
ViewFilterTypeDoesNotExist: ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
ViewFilterTypeNotAllowedForField: ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
}
@ -252,7 +258,7 @@ class RowsView(APIView):
include = query_params.get("include")
exclude = query_params.get("exclude")
user_field_names = query_params.get("user_field_names")
fields = RowHandler().get_include_exclude_fields(
fields = get_include_exclude_fields(
table, include, exclude, user_field_names=user_field_names
)

View file

@ -0,0 +1,120 @@
import re
from baserow.core.utils import split_comma_separated_string
from baserow.contrib.database.fields.models import Field
def get_include_exclude_field_ids(table, include=None, exclude=None):
"""
Returns a list containing the field ids based on the value
of the include and exclude parameters.
:param table: The table where to select the fields from. Field id's that are
not in the table won't be included.
:type table: Table
:param include: The field ids that must be included. Only the provided ones
are going to be in the returned queryset. Multiple can be provided
separated by comma
:type include: Optional[str]
:param exclude: The field ids that must be excluded. Only the ones that are not
provided are going to be in the returned queryset. Multiple can be provided
separated by comma.
:type exclude: Optional[str]
:rtype: None or List[str]
"""
fields = get_include_exclude_fields(table, include, exclude)
field_ids = None
if include is not None or exclude is not None:
if fields:
field_ids = [field.get("id") for field in fields.values()]
else:
field_ids = []
return field_ids
def get_include_exclude_fields(
table, include=None, exclude=None, user_field_names=False
):
"""
Returns a field queryset containing the requested fields based on the value
and exclude parameter.
:param table: The table where to select the fields from. Field id's that are
not in the table won't be included.
:type table: Table
:param include: The field ids that must be included. Only the provided ones
are going to be in the returned queryset. Multiple can be provided
separated by comma
:type include: Optional[str]
:param exclude: The field ids that must be excluded. Only the ones that are not
provided are going to be in the returned queryset. Multiple can be provided
separated by comma.
:type exclude: Optional[str]
:return: A Field's QuerySet containing the allowed fields based on the provided
input.
:param user_field_names: If true then the value and exclude parameters are
retreated as a comma separated list of user field names instead of id's
:type user_field_names: bool
:rtype: QuerySet
"""
queryset = Field.objects.filter(table=table)
if user_field_names:
includes = extract_field_names_from_string(include)
excludes = extract_field_names_from_string(exclude)
filter_type = "name__in"
else:
includes = extract_field_ids_from_string(include)
excludes = extract_field_ids_from_string(exclude)
filter_type = "id__in"
if len(includes) == 0 and len(excludes) == 0:
return None
if len(includes) > 0:
queryset = queryset.filter(**{filter_type: includes})
if len(excludes) > 0:
queryset = queryset.exclude(**{filter_type: excludes})
return queryset
# noinspection PyMethodMayBeStatic
def extract_field_names_from_string(value):
"""
Given a comma separated string of field names this function will split the
string into a list of individual field names. For weird field names containing
commas etc the field should be escaped with quotes.
:param value: The string to split into a list of field names.
:return: A list of field names.
"""
if not value:
return []
return split_comma_separated_string(value)
def extract_field_ids_from_string(value):
"""
Extracts the field ids from a string. Multiple ids can be separated by a comma.
For example if you provide 'field_1,field_2' then [1, 2] is returned.
:param value: A string containing multiple ids separated by comma.
:type value: str
:return: A list containing the field ids as integers.
:rtype: list
"""
if not value:
return []
return [
int(re.sub("[^0-9]", "", str(v)))
for v in value.split(",")
if any(c.isdigit() for c in v)
]

View file

@ -19,6 +19,7 @@ from baserow.contrib.database.api.rows.serializers import (
get_row_serializer_class,
RowSerializer,
)
from baserow.contrib.database.api.utils import get_include_exclude_field_ids
from baserow.contrib.database.api.views.errors import ERROR_VIEW_DOES_NOT_EXIST
from baserow.contrib.database.api.views.grid.serializers import (
GridViewFieldOptionsSerializer,
@ -147,6 +148,32 @@ class GridViewView(APIView):
description="If provided only rows with data that matches the search "
"query are going to be returned.",
),
OpenApiParameter(
name="include_fields",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
description=(
"All the fields are included in the response by default. You can "
"select a subset of fields by providing the fields query "
"parameter. If you for example provide the following GET "
"parameter `include_fields=field_1,field_2` then only the fields "
"with id `1` and id `2` are going to be selected and included in "
"the response."
),
),
OpenApiParameter(
name="exclude_fields",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
description=(
"All the fields are included in the response by default. You can "
"select a subset of fields by providing the exclude_fields query "
"parameter. If you for example provide the following GET "
"parameter `exclude_fields=field_1,field_2` then the fields with "
"id `1` and id `2` are going to be excluded from the selection and "
"response. "
),
),
],
tags=["Database table grid view"],
operation_id="list_database_table_grid_view_rows",
@ -179,13 +206,16 @@ class GridViewView(APIView):
serializer_name="PaginationSerializerWithGridViewFieldOptions",
),
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
404: get_error_schema(["ERROR_GRID_DOES_NOT_EXIST"]),
404: get_error_schema(
["ERROR_GRID_DOES_NOT_EXIST", "ERROR_FIELD_DOES_NOT_EXIST"]
),
},
)
@map_exceptions(
{
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
ViewDoesNotExist: ERROR_GRID_DOES_NOT_EXIST,
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
}
)
@allowed_includes("field_options", "row_metadata")
@ -200,6 +230,8 @@ class GridViewView(APIView):
"""
search = request.GET.get("search")
include_fields = request.GET.get("include_fields")
exclude_fields = request.GET.get("exclude_fields")
view_handler = ViewHandler()
view = view_handler.get_view(view_id, GridView)
@ -208,6 +240,10 @@ class GridViewView(APIView):
view.table.database.group.has_user(
request.user, raise_error=True, allow_if_template=True
)
field_ids = get_include_exclude_field_ids(
view.table, include_fields, exclude_fields
)
model = view.table.get_model()
queryset = view_handler.get_queryset(view, search, model)
@ -221,7 +257,10 @@ class GridViewView(APIView):
page = paginator.paginate_queryset(queryset, request, self)
serializer_class = get_row_serializer_class(
model, RowSerializer, is_response=True
model,
RowSerializer,
is_response=True,
field_ids=field_ids,
)
serializer = serializer_class(page, many=True)
@ -521,6 +560,32 @@ class PublicGridViewRowsView(APIView):
"This works only if two or more filters are provided."
),
),
OpenApiParameter(
name="include_fields",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
description=(
"All the fields are included in the response by default. You can "
"select a subset of fields by providing the fields query "
"parameter. If you for example provide the following GET "
"parameter `include_fields=field_1,field_2` then only the fields "
"with id `1` and id `2` are going to be selected and included in "
"the response."
),
),
OpenApiParameter(
name="exclude_fields",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
description=(
"All the fields are included in the response by default. You can "
"select a subset of fields by providing the exclude_fields query "
"parameter. If you for example provide the following GET "
"parameter `exclude_fields=field_1,field_2` then the fields with "
"id `1` and id `2` are going to be excluded from the selection and "
"response. "
),
),
],
tags=["Database table grid view"],
operation_id="public_list_database_table_grid_view_rows",
@ -548,7 +613,9 @@ class PublicGridViewRowsView(APIView):
serializer_name="PublicPaginationSerializerWithGridViewFieldOptions",
),
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
404: get_error_schema(["ERROR_GRID_DOES_NOT_EXIST"]),
404: get_error_schema(
["ERROR_GRID_DOES_NOT_EXIST", "ERROR_FIELD_DOES_NOT_EXIST"]
),
},
)
@map_exceptions(
@ -560,6 +627,7 @@ class PublicGridViewRowsView(APIView):
FilterFieldNotFound: ERROR_FILTER_FIELD_NOT_FOUND,
ViewFilterTypeDoesNotExist: ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
ViewFilterTypeNotAllowedForField: ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
}
)
@allowed_includes("field_options")
@ -575,6 +643,8 @@ class PublicGridViewRowsView(APIView):
search = request.GET.get("search")
order_by = request.GET.get("order_by")
include_fields = request.GET.get("include_fields")
exclude_fields = request.GET.get("exclude_fields")
view_handler = ViewHandler()
view = view_handler.get_public_view_by_slug(request.user, slug, GridView)
@ -587,6 +657,10 @@ class PublicGridViewRowsView(APIView):
o.field_id for o in publicly_visible_field_options
}
field_ids = get_include_exclude_field_ids(
view.table, include_fields, exclude_fields
)
# We have to still make a model with all fields as the public rows should still
# be filtered by hidden fields.
model = view.table.get_model()
@ -620,9 +694,15 @@ class PublicGridViewRowsView(APIView):
else:
paginator = PageNumberPagination()
field_ids = (
list(set(field_ids) & set(publicly_visible_field_ids))
if field_ids
else publicly_visible_field_ids
)
page = paginator.paginate_queryset(queryset, request, self)
serializer_class = get_row_serializer_class(
model, RowSerializer, is_response=True, field_ids=publicly_visible_field_ids
model, RowSerializer, is_response=True, field_ids=field_ids
)
serializer = serializer_class(page, many=True)
response = paginator.get_paginated_response(serializer.data)

View file

@ -84,7 +84,7 @@ def rename_non_unique_names_in_table(
escaped_name = re.escape(new_name_prefix)
existing_collisions = set(
Field.objects.filter(table_id=table_id, name__regex=fr"^{escaped_name}_\d+$")
Field.objects.filter(table_id=table_id, name__regex=rf"^{escaped_name}_\d+$")
.order_by("name")
.distinct()
.values_list("name", flat=True)

View file

@ -6,9 +6,7 @@ from django.db import transaction
from django.db.models import Max, F
from django.db.models.fields.related import ManyToManyField
from baserow.contrib.database.fields.models import Field
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import split_comma_separated_string
from .exceptions import RowDoesNotExist
from .signals import (
before_row_update,
@ -66,89 +64,6 @@ class RowHandler:
if str(key).isnumeric() or field_pattern.match(str(key))
]
def extract_field_ids_from_string(self, value):
"""
Extracts the field ids from a string. Multiple ids can be separated by a comma.
For example if you provide 'field_1,field_2' then [1, 2] is returned.
:param value: A string containing multiple ids separated by comma.
:type value: str
:return: A list containing the field ids as integers.
:rtype: list
"""
if not value:
return []
return [
int(re.sub("[^0-9]", "", str(v)))
for v in value.split(",")
if any(c.isdigit() for c in v)
]
def get_include_exclude_fields(
self, table, include=None, exclude=None, user_field_names=False
):
"""
Returns a field queryset containing the requested fields based on the value
and exclude parameter.
:param table: The table where to select the fields from. Field id's that are
not in the table won't be included.
:type table: Table
:param include: The field ids that must be included. Only the provided ones
are going to be in the returned queryset. Multiple can be provided
separated by comma
:type include: Optional[str]
:param exclude: The field ids that must be excluded. Only the ones that are not
provided are going to be in the returned queryset. Multiple can be provided
separated by comma.
:type exclude: Optional[str]
:return: A Field's QuerySet containing the allowed fields based on the provided
input.
:param user_field_names: If true then the value and exclude parameters are
retreated as a comma separated list of user field names instead of id's
:type user_field_names: bool
:rtype: QuerySet
"""
queryset = Field.objects.filter(table=table)
if user_field_names:
includes = self.extract_field_names_from_string(include)
excludes = self.extract_field_names_from_string(exclude)
filter_type = "name__in"
else:
includes = self.extract_field_ids_from_string(include)
excludes = self.extract_field_ids_from_string(exclude)
filter_type = "id__in"
if len(includes) == 0 and len(excludes) == 0:
return None
if len(includes) > 0:
queryset = queryset.filter(**{filter_type: includes})
if len(excludes) > 0:
queryset = queryset.exclude(**{filter_type: excludes})
return queryset
# noinspection PyMethodMayBeStatic
def extract_field_names_from_string(self, value):
"""
Given a comma separated string of field names this function will split the
string into a list of individual field names. For weird field names containing
commas etc the field should be escaped with quotes.
:param value: The string to split into a list of field names.
:return: A list of field names.
"""
if not value:
return []
return split_comma_separated_string(value)
def extract_manytomany_values(self, values, model):
"""
Extracts the ManyToMany values out of the values because they need to be

View file

@ -1672,3 +1672,147 @@ def test_grid_view_link_row_lookup_view(api_client, data_fixture):
assert len(response_json["results"]) == 1
assert response_json["results"][0]["id"] == i2.id
assert response_json["results"][0]["value"] == "Test 2"
@pytest.mark.django_db
def test_list_rows_include_fields(api_client, data_fixture):
user, token = data_fixture.create_user_and_token(
email="test@test.nl", password="password", first_name="Test1"
)
table = data_fixture.create_database_table(user=user)
table_2 = data_fixture.create_database_table(database=table.database)
text_field = data_fixture.create_text_field(
table=table, order=0, name="Color", text_default="white"
)
number_field = data_fixture.create_number_field(
table=table, order=1, name="Horsepower"
)
boolean_field = data_fixture.create_boolean_field(
table=table, order=2, name="For sale"
)
link_row_field = FieldHandler().create_field(
user=user,
table=table,
type_name="link_row",
name="Link",
link_row_table=table_2,
)
primary_field = data_fixture.create_text_field(table=table_2, primary=True)
lookup_model = table_2.get_model()
i1 = lookup_model.objects.create(**{f"field_{primary_field.id}": "Test 1"})
i2 = lookup_model.objects.create(**{f"field_{primary_field.id}": "Test 2"})
i3 = lookup_model.objects.create(**{f"field_{primary_field.id}": "Test 3"})
grid = data_fixture.create_grid_view(table=table)
data_fixture.create_grid_view_field_option(grid, link_row_field, hidden=False)
model = grid.table.get_model()
row_1 = model.objects.create(
**{
f"field_{text_field.id}": "Green",
f"field_{number_field.id}": 10,
f"field_{boolean_field.id}": False,
}
)
getattr(row_1, f"field_{link_row_field.id}").add(i1.id)
row_2 = model.objects.create(
**{
f"field_{text_field.id}": "Orange",
f"field_{number_field.id}": 100,
f"field_{boolean_field.id}": True,
}
)
getattr(row_2, f"field_{link_row_field.id}").add(i2.id)
row_3 = model.objects.create(
**{
f"field_{text_field.id}": "Purple",
f"field_{number_field.id}": 1000,
f"field_{boolean_field.id}": False,
}
)
getattr(row_3, f"field_{link_row_field.id}").add(i3.id)
url = reverse("api:database:views:grid:list", kwargs={"view_id": grid.id})
response = api_client.get(
url,
{
"include_fields": f"\
field_{text_field.id},\
field_{number_field.id},\
field_{link_row_field.id}",
"exclude_fields": f"field_{number_field.id}",
},
**{"HTTP_AUTHORIZATION": f"JWT {token}"},
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
# Confirm that text_field is included
assert response_json["results"][0][f"field_{text_field.id}"] == "Green"
assert response_json["results"][1][f"field_{text_field.id}"] == "Orange"
assert response_json["results"][2][f"field_{text_field.id}"] == "Purple"
# Confirm that number_field is excluded
assert f"field_{number_field.id}" not in response_json["results"][0]
assert f"field_{number_field.id}" not in response_json["results"][1]
assert f"field_{number_field.id}" not in response_json["results"][2]
# Confirm that boolean_field is not returned
assert f"field_{boolean_field.id}" not in response_json["results"][0]
assert f"field_{boolean_field.id}" not in response_json["results"][1]
assert f"field_{boolean_field.id}" not in response_json["results"][2]
# Confirm that link_row_field is included
assert (
response_json["results"][0][f"field_{link_row_field.id}"][0]["value"]
== "Test 1"
)
assert (
response_json["results"][1][f"field_{link_row_field.id}"][0]["value"]
== "Test 2"
)
assert (
response_json["results"][2][f"field_{link_row_field.id}"][0]["value"]
== "Test 3"
)
# Confirm that id and order are still returned
assert "id" in response_json["results"][0]
assert "id" in response_json["results"][1]
assert "id" in response_json["results"][2]
assert "order" in response_json["results"][0]
assert "order" in response_json["results"][1]
assert "order" in response_json["results"][2]
# include_fields is empty
response = api_client.get(
url,
{"include_fields": ""},
**{"HTTP_AUTHORIZATION": f"JWT {token}"},
)
# Should return response with no fields
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert "id" in response_json["results"][0]
assert "order" in response_json["results"][0]
assert f"field_{text_field.id}" not in response_json["results"][0]
assert f"field_{number_field.id}" not in response_json["results"][0]
assert f"field_{boolean_field.id}" not in response_json["results"][0]
# Test invalid fields
response = api_client.get(
url,
{"include_fields": "field_9999"},
**{"HTTP_AUTHORIZATION": f"JWT {token}"},
)
# Should also return response with no fields
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert "id" in response_json["results"][0]
assert "id" in response_json["results"][0]
assert "order" in response_json["results"][0]
assert f"field_{text_field.id}" not in response_json["results"][0]
assert f"field_{number_field.id}" not in response_json["results"][0]
assert f"field_{boolean_field.id}" not in response_json["results"][0]

View file

@ -9,6 +9,10 @@ from baserow.contrib.database.rows.exceptions import RowDoesNotExist
from baserow.contrib.database.rows.handler import RowHandler
from baserow.core.exceptions import UserNotInGroup
from baserow.core.trash.handler import TrashHandler
from baserow.contrib.database.api.utils import (
get_include_exclude_fields,
extract_field_ids_from_string,
)
def test_get_field_ids_from_dict():
@ -24,12 +28,11 @@ def test_get_field_ids_from_dict():
def test_extract_field_ids_from_string():
handler = RowHandler()
assert handler.extract_field_ids_from_string(None) == []
assert handler.extract_field_ids_from_string("not,something") == []
assert handler.extract_field_ids_from_string("field_1,field_2") == [1, 2]
assert handler.extract_field_ids_from_string("field_22,test_8,999") == [22, 8, 999]
assert handler.extract_field_ids_from_string("is,1,one") == [1]
assert extract_field_ids_from_string(None) == []
assert extract_field_ids_from_string("not,something") == []
assert extract_field_ids_from_string("field_1,field_2") == [1, 2]
assert extract_field_ids_from_string("field_22,test_8,999") == [22, 8, 999]
assert extract_field_ids_from_string("is,1,one") == [1]
@pytest.mark.django_db
@ -40,42 +43,35 @@ def test_get_include_exclude_fields(data_fixture):
field_2 = data_fixture.create_text_field(table=table, order=2)
field_3 = data_fixture.create_text_field(table=table_2, order=3)
row_handler = RowHandler()
assert get_include_exclude_fields(table, include=None, exclude=None) is None
assert (
row_handler.get_include_exclude_fields(table, include=None, exclude=None)
is None
)
assert get_include_exclude_fields(table, include="", exclude="") is None
assert row_handler.get_include_exclude_fields(table, include="", exclude="") is None
fields = row_handler.get_include_exclude_fields(table, f"field_{field_1.id}")
fields = get_include_exclude_fields(table, f"field_{field_1.id}")
assert len(fields) == 1
assert fields[0].id == field_1.id
fields = row_handler.get_include_exclude_fields(
fields = get_include_exclude_fields(
table, f"field_{field_1.id},field_9999,field_{field_2.id}"
)
assert len(fields) == 2
assert fields[0].id == field_1.id
assert fields[1].id == field_2.id
fields = row_handler.get_include_exclude_fields(
table, None, f"field_{field_1.id},field_9999"
)
fields = get_include_exclude_fields(table, None, f"field_{field_1.id},field_9999")
assert len(fields) == 1
assert fields[0].id == field_2.id
fields = row_handler.get_include_exclude_fields(
fields = get_include_exclude_fields(
table, f"field_{field_1.id},field_{field_2.id}", f"field_{field_1.id}"
)
assert len(fields) == 1
assert fields[0].id == field_2.id
fields = row_handler.get_include_exclude_fields(table, f"field_{field_3.id}")
fields = get_include_exclude_fields(table, f"field_{field_3.id}")
assert len(fields) == 0
fields = row_handler.get_include_exclude_fields(table, None, f"field_{field_3.id}")
fields = get_include_exclude_fields(table, None, f"field_{field_3.id}")
assert len(fields) == 2
@ -500,30 +496,26 @@ def test_get_include_exclude_fields_with_user_field_names(data_fixture):
row_handler = RowHandler()
assert (
row_handler.get_include_exclude_fields(
get_include_exclude_fields(
table, include=None, exclude=None, user_field_names=True
)
is None
)
assert (
row_handler.get_include_exclude_fields(
table, include="", exclude="", user_field_names=True
)
get_include_exclude_fields(table, include="", exclude="", user_field_names=True)
is None
)
fields = row_handler.get_include_exclude_fields(
table, include="Test_2", user_field_names=True
)
fields = get_include_exclude_fields(table, include="Test_2", user_field_names=True)
assert list(fields.values_list("name", flat=True)) == ["Test_2"]
fields = row_handler.get_include_exclude_fields(
fields = get_include_exclude_fields(
table, "first,field_9999,Test", user_field_names=True
)
assert list(fields.values_list("name", flat=True)) == ["first", "Test"]
fields = row_handler.get_include_exclude_fields(
fields = get_include_exclude_fields(
table, None, "first,field_9999", user_field_names=True
)
assert list(fields.values_list("name", flat=True)) == [
@ -532,12 +524,12 @@ def test_get_include_exclude_fields_with_user_field_names(data_fixture):
"With Space",
]
fields = row_handler.get_include_exclude_fields(
fields = get_include_exclude_fields(
table, "first,Test", "first", user_field_names=True
)
assert list(fields.values_list("name", flat=True)) == ["Test"]
fields = row_handler.get_include_exclude_fields(
fields = get_include_exclude_fields(
table, 'first,"With Space",Test', user_field_names=True
)
assert list(fields.values_list("name", flat=True)) == [

View file

@ -26,6 +26,7 @@
* Add footer aggregations to grid view
* Hide "Export view" button if there is no valid exporter available
* Fix Django's default index naming scheme causing index name collisions.
* Added multi-cell selection and copying.
* Add "insert left" and "insert right" field buttons to grid view head context buttons.
* Workaround bug in Django's schema editor sometimes causing incorrect transaction
rollbacks resulting in the connection to the database becoming unusable.
@ -39,6 +40,7 @@
and BASEROW_CADDY_ADDRESSES now to configure a domain with optional auto https.
* Add health checks for all services.
* Ensure error logging is enabled in the Backend even when DEBUG is off.
* Removed upload file size limit.
## Released (2022-01-13 1.8.2)

View file

@ -24,7 +24,7 @@ baserow_ready() {
wait_for_baserow() {
until baserow_ready; do
echo 'Waiting for Baserow to become available, this might take 30+ seconds...'
sleep 1
sleep 10
done
echo "======================================================================="
echo -e "\e[32mBaserow is now available at ${BASEROW_PUBLIC_URL}\e[0m"

View file

@ -5,7 +5,7 @@ apt-get update
apt-get install git -y
cd ~
cp /local_baserow_repo/docs/guides/installation/install-on-ubuntu.md install-on-ubuntu.md
cp /local_baserow_repo/docs/guides/installation/old-install-on-ubuntu.md install-on-ubuntu.md
# Process the guide to only extract the bash we want
sed -n '/## HTTPS \/ SSL Support/q;p' install-on-ubuntu.md | # We don't want to setup https or do any upgrade scripts which follow

3
dev.sh
View file

@ -316,6 +316,9 @@ export DOCKER_BUILDKIT=1
export WEB_FRONTEND_PORT=${WEB_FRONTEND_PORT:-3000}
export BASEROW_PUBLIC_URL=${BASEROW_PUBLIC_URL:-http://localhost:$WEB_FRONTEND_PORT}
export REDIS_PASSWORD=baserow
export DATABASE_PASSWORD=baserow
export SECRET_KEY=baserow
echo "./dev.sh running docker-compose commands:

View file

@ -35,6 +35,7 @@ The installation methods referred to in the variable descriptions are:
| Name | Description | Defaults |
| ------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| SECRET\_KEY | The Secret key used by Django for cryptographic signing such as generating secure password reset links and managing sessions. See [https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-SECRET\_KEY](https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-SECRET_KEY) for more details | Required to be set by you in the docker-compose and standalone installs. Automatically generated by the baserow/baserow image if not provided and stored in /baserow/data/.secret. |
| SECRET\_KEY\_FILE | **Only supported by the `baserow/baserow` image** If set Baserow will attempt to read the above SECRET\_KEY from this file location instead. | |
| BASEROW\_BACKEND\_LOG\_LEVEL | The default log level used by the backend, supports ERROR, WARNING, INFO, DEBUG, TRACE | INFO |
| BASEROW\_BACKEND\_DATABASE\_LOG\_LEVEL | The default log level used for database related logs in the backend. Supports the same values as the normal log level. If you also enable BASEROW\_BACKEND\_DEBUG and set this to DEBUG you will be able to see all SQL queries in the backend logs. | ERROR |
| BASEROW\_BACKEND\_DEBUG | If set to “on” then will enable the non production safe debug mode for the Baserow django backend. Defaults to “off” | |
@ -49,6 +50,7 @@ The installation methods referred to in the variable descriptions are:
| DATABASE\_PORT | The port Baserow will use when trying to connect to the postgres database at DATABASE\_HOST | 5432 |
| DATABASE\_NAME | The database name Baserow will use to store data in. | baserow |
| DATABASE\_PASSWORD | The password of DATABASE\_USER on the postgres server at DATABASE\_HOST | Required to be set by you in the docker-compose and standalone installs. Automatically generated by the baserow/baserow image if not provided and stored in /baserow/data/.pgpass. |
| DATABASE\_PASSWORD\_FILE | **Only supported by the `baserow/baserow` image** If set Baserow will attempt to read the above DATABASE\_PASSWORD from this file location instead. | |
| DATABASE\_URL | Alternatively to setting the individual DATABASE\_ parameters above instead you can provide one standard postgres connection string in the format of: postgresql://\[user\[:password\]@\]\[netloc\]\[:port\]\[/dbname\]\[?param1=value1&…\] | |
| | | |
| MIGRATE\_ON\_STARTUP | If set to “true” when the Baserow backend service starts up it will automatically apply database migrations. Set to any other value to disable. If you disable this then you must remember to manually apply the database migrations when upgrading Baserow to a new version. | true |
@ -63,6 +65,7 @@ The installation methods referred to in the variable descriptions are:
| REDIS\_PORT | The port Baserow will use when trying to connect to the redis database at REDIS\_HOST | 6379 |
| REDIS\_USER | The username of the redis user Baserow will use to connect to the redis at REDIS\_HOST | |
| REDIS\_PASSWORD | The password of REDIS\_USER on the redis server at REDIS\_HOST | Required to be set by you in the docker-compose and standalone installs. Automatically generated by the baserow/baserow image if not provided and stored in /baserow/data/.redispass. |
| REDIS\_PASSWORD\_FILE | **Only supported by the `baserow/baserow` image** If set Baserow will attempt to read the above REDIS\_PASSWORD from this file location instead. | |
| REDIS\_PROTOCOL | The redis protocol used when connecting to the redis at REDIS\_HOST Can either be redis or rediss. | redis |
| | | |
| REDIS\_URL | Alternatively to setting the individual REDIS\_ parameters above instead you can provide one standard redis connection string in the format of: redis://:\[password\]@\[redishost\]:\[redisport\] | |

View file

@ -51,16 +51,81 @@ asking any users who wish to run Baserow on Ubuntu to instead install Docker and
official Docker images to run Baserow. This guide explains how to migrate an existing
Baserow Ubuntu install to use our official Docker images.
> If you were previously using a separate api.your_baserow_server.com domain this is no
> longer needed. Baserow will now work on a single domain accessing the api at
> YOUR_DOMAIN.com/api.
### Migration Steps
```bash
# === Docker Install ===
#
# Install Docker following the guide at https://docs.docker.com/engine/install/ubuntu/.
# If you have already installed Docker then please skip this section.
#
# The steps are summarized below but we encourage you to follow the guide itself:
#
sudo apt-get remove docker docker-engine docker.io containerd runc
# Setup docker
sudo apt-get update
sudo apt-get install docker
sudo apt install docker
sudo apt-get install \
ca-certificates \
curl \
gnupg \
lsb-release
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
# Add yourself to the docker group
sudo usermod -aG docker $USER
newgrp docker
# Verify Docker install worked you should see:
#
# Unable to find image 'hello-world:latest' locally
# latest: Pulling from library/hello-world
# ...
# Hello from Docker!
docker run hello-world
# === Baserow Upgrade ===
# When you are ready to stop your old Baserow server by running
supervisorctl stop all
sudo supervisorctl stop all
# === Extract your secret key ===
# Extract your current SECRET_KEY value from /etc/supervisor/conf.d/baserow.conf
cat /etc/supervisor/conf.d/baserow.conf | sed -nr "s/^\s*SECRET_KEY='(\w+)',/\1/p" > .existing_secret_key
# Check this file just contains your exact secret key by comparing it with
# /etc/supervisor/conf.d/baserow.conf
cat .existing_secret_key
# === Configure your Postgres to allow connections from Docker ===
# 1. Find out what version of postgresql is installed by running
sudo ls /etc/postgresql/
# 2. Open /etc/postgresql/YOUR_PSQL_VERSION/main/postgresql.conf for editing as root
# 3. Find the commented out # listen_addresses line.
# 4. Change it to be:
listen_addresses = '*' # what IP address(es) to listen on;
# 5. Open /etc/postgresql/YOUR_PSQL_VERSION/main/pg_hba.conf for editing as root
# 6. Add the following line to the end which will allow docker containers to connect.
host all all 172.17.0.0/16 md5
# 7. Restart postgres to load in the config changes.
sudo systemctl restart postgresql
# 8. Check the logs do not have errors by running
sudo less /var/log/postgresql/postgresql-YOUR_PSQL_VERSION-main.log
# === Launch Baserow ===
# Please change this variable to the password used by the baserow user in your
# postgresql database.
YOUR_BASEROW_DATABASE_PASSWORD=yourpassword
# Change BASEROW_PUBLIC_URL to your domain name or http://YOUR_SERVERS_IP if you want
# to access Baserow remotely.
# This command will run Baserow so it uses your existing postgresql database and your
@ -70,9 +135,14 @@ supervisorctl stop all
docker run \
-d \
--name baserow \
-e SECRET_KEY_FILE=/baserow/.existing_secret_key \
-e BASEROW_PUBLIC_URL=http://localhost \
-e DATABASE_URL=postgresql://baserow:baserow@localhost:5432/baserow \
--add-host host.docker.internal:host-gateway \
-e DATABASE_HOST=host.docker.internal \
-e DATABASE_USER=baserow \
-e DATABASE_PASSWORD=$YOUR_BASEROW_DATABASE_PASSWORD \
--restart unless-stopped \
-v $PWD/.existing_secret_key:/baserow/.existing_secret_key \
-v baserow_data:/baserow/data \
-v /baserow/media:/baserow/data/media \
-p 80:80 \
@ -82,3 +152,5 @@ docker run \
docker logs baserow
```
Please refer to the [Install with Docker](install-with-docker.md) guide in the future
and for more information on how to manage your Docker based Baserow install.

View file

@ -99,6 +99,10 @@ SECRET_KEY= DATABASE_PASSWORD= REDIS_PASSWORD= docker-compose up
## Upgrading from Baserow 1.8.2's docker-compose file
> If you were previously using a separate api.your_baserow_server.com domain this is no
> longer needed. Baserow will now work on a single domain accessing the api at
> YOUR_DOMAIN.com/api.
To upgrade from 1.8.2's docker-compose file from inside the Baserow git repo you need to:
1. Stop your existing Baserow install when safe to do so:
`docker-compose down`

View file

@ -179,6 +179,69 @@ docker run \
baserow/baserow:1.8.3
```
### With a Postgresql server running on the same host as the Baserow docker container
This is assuming you are using the postgresql server bundled by ubuntu. If not then
you will have to find the correct locations for the config files for your OS.
1. Find out what version of postgresql is installed by running
`sudo ls /etc/postgresql/`
2. Open `/etc/postgresql/YOUR_PSQL_VERSION/main/postgresql.conf` for editing as root
3. Find the commented out `# listen_addresses` line.
4. Change it to be:
`listen_addresses = '*' # what IP address(es) to listen on;`
5. Open `/etc/postgresql/YOUR_PSQL_VERSION/main/pg_hba.conf` for editing as root
6. Add the following line to the end which will allow docker containers to connect.
`host all all 172.17.0.0/16 md5`
7. Restart postgres to load in the config changes.
`sudo systemctl restart postgresql`
8. Check the logs do not have errors by running
`sudo less /var/log/postgresql/postgresql-YOUR_PSQL_VERSION-main.log`
9. Run Baserow like so:
```bash
docker run \
-d \
--name baserow \
--add-host host.docker.internal:host-gateway \
-e BASEROW_PUBLIC_URL=http://localhost \
-e DATABASE_HOST=host.docker.internal \
-e DATABASE_PORT=5432 \
-e DATABASE_NAME=YOUR_DATABASE_NAME \
-e DATABASE_USER=YOUR_DATABASE_USERNAME \
-e DATABASE_PASSWORD=REPLACE_WITH_YOUR_DATABASE_PASSWORD \
--restart unless-stopped \
-v baserow_data:/baserow/data \
-p 80:80 \
-p 443:443 \
baserow/baserow:1.8.3
```
### Supply secrets using files
The `DATABASE_PASSWORD`, `SECRET_KEY` and `REDIS_PASSWORD` environment variables
can instead be loaded using a file by using the `*_FILE` variants:
```bash
echo "your_redis_password" > .your_redis_password
echo "your_secret_key" > .your_secret_key
echo "your_pg_password" > .your_pg_password
docker run \
-d \
--name baserow \
-e BASEROW_PUBLIC_URL=http://localhost \
-e REDIS_PASSWORD_FILE=/baserow/.your_redis_password \
-e SECRET_KEY_FILE=/baserow/.your_secret_key \
-e DATABASE_PASSWORD_FILE=/baserow/.your_pg_password \
--restart unless-stopped \
-v $PWD/.your_redis_password:/baserow/.your_redis_password \
-v $PWD/.your_secret_key:/baserow/.your_secret_key \
-v $PWD/.your_pg_password:/baserow/.your_pg_password \
-v baserow_data:/baserow/data \
-p 80:80 \
-p 443:443 \
baserow/baserow:1.8.3
```
### Start just the embedded database
If you want to directly access the embedded Postgresql database then you can run:

View file

@ -3,8 +3,6 @@
> Warning: This guide has been deprecated as of version 1.9 of Baserow. Please follow
> the [Install on Ubuntu - Upgrade from 1.8.2 Section](install-on-ubuntu.md#migration-steps)
> if you installed Baserow 1.8.2 using this guide to upgrade.
> you have an existing Baserow installation. Or if you do not have an existing install
> to follow the one of the other supported installation guides instead.
This deprecated and now unsupported guide will walk you through a production
installation of Baserow. Specifically this document aims to provide a walkthrough for

View file

@ -8,6 +8,10 @@
top: 33 + 3px;
bottom: 48 + 3px;
}
&.grid-view--disable-selection {
user-select: none;
}
}
.grid-view__left,
@ -389,6 +393,38 @@
}
}
.grid-view__column--multi-select & {
background-color: $color-primary-100;
&.active {
background-color: $color-primary-100;
}
}
.grid-view__column--multi-select-top & {
top: -2px;
z-index: 2;
border-top: 2px solid $color-primary-500;
}
.grid-view__column--multi-select-right & {
right: -2px;
z-index: 2;
border-right: 2px solid $color-primary-500;
}
.grid-view__column--multi-select-bottom & {
bottom: -2px;
z-index: 2;
border-bottom: 2px solid $color-primary-500;
}
.grid-view__column--multi-select-left & {
left: -2px;
z-index: 2;
border-left: 2px solid $color-primary-500;
}
.grid-view__cell--error {
@extend %ellipsis;

View file

@ -28,6 +28,7 @@
list-style: none;
margin: 0;
padding: 0 5px;
-moz-user-select: none;
.grid-field-file__cell.active & {
height: auto;

View file

@ -14,6 +14,7 @@
list-style: none;
margin: 0;
padding: 0 4px;
-moz-user-select: none;
.grid-field-many-to-many__cell.active & {
height: auto;

View file

@ -0,0 +1,15 @@
<template>
<div class="alert alert--simple alert--with-shadow alert--has-icon">
<div class="alert__icon">
<div class="loading alert__icon-loading"></div>
</div>
<div class="alert__title">{{ $t('copyingNotification.title') }}</div>
<p class="alert__content">{{ $t('copyingNotification.content') }}</p>
</div>
</template>
<script>
export default {
name: 'CopyingNotification',
}
</script>

View file

@ -12,6 +12,7 @@
></Notification>
</div>
<div class="bottom-right-notifications">
<CopyingNotification v-if="copying"></CopyingNotification>
<RestoreNotification
v-for="notification in restoreNotifications"
:key="notification.id"
@ -28,6 +29,7 @@ import Notification from '@baserow/modules/core/components/notifications/Notific
import ConnectingNotification from '@baserow/modules/core/components/notifications/ConnectingNotification'
import FailedConnectingNotification from '@baserow/modules/core/components/notifications/FailedConnectingNotification'
import RestoreNotification from '@baserow/modules/core/components/notifications/RestoreNotification'
import CopyingNotification from '@baserow/modules/core/components/notifications/CopyingNotification'
export default {
name: 'Notifications',
@ -36,6 +38,7 @@ export default {
Notification,
ConnectingNotification,
FailedConnectingNotification,
CopyingNotification,
},
computed: {
restoreNotifications() {
@ -47,6 +50,7 @@ export default {
...mapState({
connecting: (state) => state.notification.connecting,
failedConnecting: (state) => state.notification.failedConnecting,
copying: (state) => state.notification.copying,
notifications: (state) => state.notification.items,
}),
},

View file

@ -70,10 +70,15 @@ export default {
// If the speed is either a positive or negative, so not 0, we know that we
// need to start auto scrolling.
if (speed !== 0) {
if (el.autoScrollConfig.orientation === 'horizontal') {
scrollElement.scrollLeft += speed
} else {
scrollElement.scrollTop += speed
// Only update the element if the `onScroll` functions returns `true`. This
// is because in some cases, scrolling is handled in another way. This is
// for example the case with the `GridView`.
if (el.autoScrollConfig.onScroll(speed)) {
if (el.autoScrollConfig.orientation === 'horizontal') {
scrollElement.scrollLeft += speed
} else {
scrollElement.scrollTop += speed
}
}
el.autoScrollTimeout = setTimeout(() => {
autoscrollLoop()
@ -96,12 +101,14 @@ export default {
update(el, binding) {
const defaultEnabled = () => true
const defaultScrollElement = () => el
const defaultOnScroll = () => true
el.autoScrollConfig = {
orientation: binding.value.orientation || 'vertical',
enabled: binding.value.enabled || defaultEnabled,
speed: binding.value.speed || 3,
padding: binding.value.padding || 10,
scrollElement: binding.value.scrollElement || defaultScrollElement,
onScroll: binding.value.onScroll || defaultOnScroll,
}
},
unbind(el) {

View file

@ -183,6 +183,10 @@
"restoreNotification": {
"restore": "Gelöschten {type} wiederherstellen"
},
"copyingNotification": {
"title": "Kopieren...",
"content": "Vorbereiten ihre Daten"
},
"errorLayout": {
"notFound": "Die Seite, die Sie suchen, wurde nicht gefunden. Dies könnte daran liegen, dass die URL nicht korrekt ist oder dass Sie keine Berechtigung haben, diese Seite anzuzeigen.",
"error": "Beim Laden der Seite ist ein Fehler aufgetreten. Unsere Entwickler wurden über das Problem informiert. Bitte versuchen Sie, die Seite zu aktualisieren oder zum Dashboard zurückzukehren.",

View file

@ -183,6 +183,10 @@
"restoreNotification": {
"restore": "Restore deleted {type}"
},
"copyingNotification": {
"title": "Copying...",
"content": "Preparing your data"
},
"errorLayout": {
"notFound": "The page you are looking for has not been found. This might be because URL is incorrect or that you dont have permission to view this page.",
"error": "Something went wrong while loading the page. Our developers have been notified of the issue. Please try to refresh or return to the dashboard.",

View file

@ -3,6 +3,7 @@ import { uuid } from '@baserow/modules/core/utils/string'
export const state = () => ({
connecting: false,
failedConnecting: false,
copying: false,
items: [],
})
@ -20,6 +21,9 @@ export const mutations = {
SET_FAILED_CONNECTING(state, value) {
state.failedConnecting = value
},
SET_COPYING(state, value) {
state.copying = value
},
}
export const actions = {
@ -68,6 +72,9 @@ export const actions = {
}
commit('SET_FAILED_CONNECTING', value)
},
setCopying({ commit }, value) {
commit('SET_COPYING', value)
},
}
export const getters = {}

View file

@ -1,5 +1,9 @@
<template>
<div v-scroll="scroll" class="grid-view">
<div
v-scroll="scroll"
class="grid-view"
:class="{ 'grid-view--disable-selection': isMultiSelectActive }"
>
<Scrollbars
ref="scrollbars"
horizontal="getHorizontalScrollbarElement"
@ -24,6 +28,9 @@
@row-hover="setRowHover($event.row, $event.value)"
@row-context="showRowContext($event.event, $event.row)"
@row-dragging="rowDragStart"
@cell-mousedown-left="multiSelectStart"
@cell-mouseover="multiSelectHold"
@cell-mouseup-left="multiSelectStop"
@add-row="addRow()"
@update="updateValue"
@edit="editValue"
@ -31,6 +38,7 @@
@unselected="unselectedCell($event)"
@select-next="selectNextCell($event)"
@edit-modal="$refs.rowEditModal.show($event.id)"
@scroll="scroll($event.pixelY, 0)"
>
<template #foot>
<div class="grid-view__foot-info">
@ -70,6 +78,9 @@
@add-row="addRow()"
@update="updateValue"
@edit="editValue"
@cell-mousedown-left="multiSelectStart"
@cell-mouseover="multiSelectHold"
@cell-mouseup-left="multiSelectStop"
@selected="selectedCell($event)"
@unselected="unselectedCell($event)"
@select-next="selectNextCell($event)"
@ -103,7 +114,15 @@
@scroll="scroll($event.pixelY, $event.pixelX)"
></GridViewRowDragging>
<Context ref="rowContext">
<ul class="context__menu">
<ul v-show="isMultiSelectActive" class="context__menu">
<li>
<a @click=";[exportMultiSelect(), $refs.rowContext.hide()]">
<i class="context__menu-icon fas fa-fw fa-copy"></i>
{{ $t('action.copy') }}
</a>
</li>
</ul>
<ul v-show="!isMultiSelectActive" class="context__menu">
<li v-if="!readOnly">
<a @click=";[addRow(selectedRow), $refs.rowContext.hide()]">
<i class="context__menu-icon fas fa-fw fa-arrow-up"></i>
@ -163,6 +182,7 @@ import GridViewRowDragging from '@baserow/modules/database/components/view/grid/
import RowEditModal from '@baserow/modules/database/components/row/RowEditModal'
import gridViewHelpers from '@baserow/modules/database/mixins/gridViewHelpers'
import { maxPossibleOrderValue } from '@baserow/modules/database/viewTypes'
import { isElement } from '@baserow/modules/core/utils/dom'
import viewHelpers from '@baserow/modules/database/mixins/viewHelpers'
export default {
@ -277,6 +297,8 @@ export default {
...mapGetters({
allRows: this.$options.propsData.storePrefix + 'view/grid/getAllRows',
count: this.$options.propsData.storePrefix + 'view/grid/getCount',
isMultiSelectActive:
this.$options.propsData.storePrefix + 'view/grid/isMultiSelectActive',
}),
}
},
@ -297,6 +319,14 @@ export default {
}
this.$el.resizeEvent()
window.addEventListener('resize', this.$el.resizeEvent)
window.addEventListener('keydown', this.arrowEvent)
window.addEventListener('copy', this.exportMultiSelect)
window.addEventListener('click', this.cancelMultiSelect)
window.addEventListener('mouseup', this.multiSelectStop)
this.$refs.left.$el.addEventListener(
'scroll',
this.$el.horizontalScrollEvent
)
this.$store.dispatch(
this.storePrefix + 'view/grid/fetchAllFieldAggregationData',
{ view: this.view }
@ -304,7 +334,16 @@ export default {
},
beforeDestroy() {
window.removeEventListener('resize', this.$el.resizeEvent)
window.removeEventListener('keydown', this.arrowEvent)
window.removeEventListener('copy', this.exportMultiSelect)
window.removeEventListener('click', this.cancelMultiSelect)
window.removeEventListener('mouseup', this.multiSelectStop)
this.$bus.$off('field-deleted', this.fieldDeleted)
this.$store.dispatch(
this.storePrefix + 'view/grid/setMultiSelectActive',
false
)
this.$store.dispatch(this.storePrefix + 'view/grid/clearMultiSelect')
},
methods: {
/**
@ -704,6 +743,12 @@ export default {
return
}
this.$store.dispatch(
this.storePrefix + 'view/grid/setMultiSelectActive',
false
)
this.$store.dispatch(this.storePrefix + 'view/grid/clearMultiSelect')
this.$store.dispatch(this.storePrefix + 'view/grid/setSelectedCell', {
rowId: nextRowId,
fieldId: nextFieldId,
@ -723,6 +768,95 @@ export default {
this.fieldsUpdated()
})
},
/*
Called when mouse is clicked and held on a GridViewCell component.
Starts multi-select by setting the head and tail index to the currently
selected cell.
*/
multiSelectStart({ event, row, field }) {
this.$store.dispatch(this.storePrefix + 'view/grid/multiSelectStart', {
rowId: row.id,
fieldIndex: this.visibleFields.findIndex((f) => f.id === field.id) + 1,
})
},
/*
Called when mouse hovers over a GridViewCell component.
Updates the current multi-select grid by updating the tail index
with the last cell hovered over.
*/
multiSelectHold({ event, row, field }) {
this.$store.dispatch(this.storePrefix + 'view/grid/multiSelectHold', {
rowId: row.id,
fieldIndex: this.visibleFields.findIndex((f) => f.id === field.id) + 1,
})
},
/*
Called when the mouse is unpressed over a GridViewCell component.
Stop multi-select.
*/
multiSelectStop({ event, row, field }) {
this.$store.dispatch(
this.storePrefix + 'view/grid/setMultiSelectHolding',
false
)
},
/*
Cancels multi-select if it's currently active.
This function checks if a mouse click event is triggered
outside of GridViewRows. This is done by ensuring that the
target element's class is either 'grid-view' or 'grid-view__rows'.
*/
cancelMultiSelect(event) {
if (
this.$store.getters[
this.storePrefix + 'view/grid/isMultiSelectActive'
] &&
(!isElement(this.$el, event.target) ||
!['grid-view__rows', 'grid-view'].includes(event.target.classList[0]))
) {
this.$store.dispatch(
this.storePrefix + 'view/grid/setMultiSelectActive',
false
)
this.$store.dispatch(this.storePrefix + 'view/grid/clearMultiSelect')
}
},
arrowEvent(event) {
// Check if arrow key was pressed.
if (
['ArrowRight', 'ArrowLeft', 'ArrowUp', 'ArrowDown'].includes(event.key)
) {
// Cancels multi-select if it's currently active.
if (
this.$store.getters[
this.storePrefix + 'view/grid/isMultiSelectActive'
]
) {
this.$store.dispatch(
this.storePrefix + 'view/grid/setMultiSelectActive',
false
)
this.$store.dispatch(this.storePrefix + 'view/grid/clearMultiSelect')
}
}
},
// Prepare and copy the multi-select cells into the clipboard, formatted as TSV
async exportMultiSelect(event) {
try {
this.$store.dispatch('notification/setCopying', true)
const output = await this.$store.dispatch(
this.storePrefix + 'view/grid/exportMultiSelect',
this.leftFields.concat(this.visibleFields)
)
if (output !== undefined) {
navigator.clipboard.writeText(output)
}
} catch (error) {
notifyIf(error, 'view')
} finally {
this.$store.dispatch('notification/setCopying', false)
}
},
},
}
</script>

View file

@ -6,9 +6,18 @@
'grid-view__column--matches-search':
props.row._.matchSearch &&
props.row._.fieldSearchMatches.includes(props.field.id.toString()),
'grid-view__column--multi-select': props.multiSelectPosition.selected,
'grid-view__column--multi-select-top': props.multiSelectPosition.top,
'grid-view__column--multi-select-right': props.multiSelectPosition.right,
'grid-view__column--multi-select-left': props.multiSelectPosition.left,
'grid-view__column--multi-select-bottom':
props.multiSelectPosition.bottom,
}"
:style="data.style"
@click="$options.methods.select($event, parent, props.field.id)"
@mousedown.left="$options.methods.cellMouseDownLeft($event, listeners)"
@mouseover="$options.methods.cellMouseover($event, listeners)"
@mouseup.left="$options.methods.cellMouseUpLeft($event, listeners)"
>
<component
:is="$options.methods.getFunctionalComponent(parent, props)"
@ -114,6 +123,22 @@ export default {
event.preventFieldCellUnselect = true
parent.selectCell(fieldId)
},
cellMouseDownLeft(event, listeners) {
if (listeners['cell-mousedown-left']) {
listeners['cell-mousedown-left']()
}
},
cellMouseover(event, listeners) {
event.preventDefault()
if (listeners['cell-mouseover']) {
listeners['cell-mouseover']()
}
},
cellMouseUpLeft(event, listeners) {
if (listeners['cell-mouseup-left']) {
listeners['cell-mouseup-left']()
}
},
/**
* Called when the cell field type component needs to cell to be unselected.
*/

View file

@ -69,6 +69,7 @@
:field="field"
:row="row"
:state="state"
:multi-select-position="getMultiSelectPosition(row.id, field)"
:read-only="readOnly"
:style="{ width: fieldWidths[field.id] + 'px' }"
@update="$emit('update', $event)"
@ -78,6 +79,9 @@
@selected="$emit('selected', $event)"
@unselected="$emit('unselected', $event)"
@select-next="$emit('select-next', $event)"
@cell-mousedown-left="$emit('cell-mousedown-left', { row, field })"
@cell-mouseover="$emit('cell-mouseover', { row, field })"
@cell-mouseup-left="$emit('cell-mouseup-left', { row, field })"
></GridViewCell>
</div>
</template>
@ -100,6 +104,10 @@ export default {
type: Array,
required: true,
},
allFieldIds: {
type: Array,
required: true,
},
fieldWidths: {
type: Object,
required: true,
@ -144,6 +152,56 @@ export default {
fieldId,
})
},
// Return an object that represents if a cell is selected,
// and it's current position in the selection grid
getMultiSelectPosition(rowId, field) {
const position = {
selected: false,
top: false,
right: false,
bottom: false,
left: false,
}
if (
this.$store.getters[this.storePrefix + 'view/grid/isMultiSelectActive']
) {
const rowIndex =
this.$store.getters[
this.storePrefix + 'view/grid/getMultiSelectRowIndexById'
](rowId)
let fieldIndex = this.allFieldIds.findIndex((id) => field.id === id)
fieldIndex += !field.primary ? 1 : 0
const [minRow, maxRow] =
this.$store.getters[
this.storePrefix + 'view/grid/getMultiSelectRowIndexSorted'
]
const [minField, maxField] =
this.$store.getters[
this.storePrefix + 'view/grid/getMultiSelectFieldIndexSorted'
]
if (rowIndex >= minRow && rowIndex <= maxRow) {
if (fieldIndex >= minField && fieldIndex <= maxField) {
position.selected = true
if (rowIndex === minRow) {
position.top = true
}
if (rowIndex === maxRow) {
position.bottom = true
}
if (fieldIndex === minField) {
position.left = true
}
if (fieldIndex === maxField) {
position.right = true
}
}
}
}
return position
},
setState(value) {
this.state = value
},

View file

@ -10,6 +10,7 @@
:key="'row-' + '-' + row.id"
:row="row"
:fields="fields"
:all-field-ids="allFieldIds"
:field-widths="fieldWidths"
:include-row-details="includeRowDetails"
:read-only="readOnly"
@ -35,6 +36,10 @@ export default {
type: Array,
required: true,
},
allFieldIds: {
type: Array,
required: true,
},
leftOffset: {
type: Number,
required: false,

View file

@ -1,5 +1,16 @@
<template>
<div>
<div
v-auto-scroll="{
enabled: () => isMultiSelectHolding,
orientation: 'horizontal',
speed: 4,
padding: 10,
onScroll: (speed) => {
$emit('scroll', { pixelY: 0, pixelX: speed })
return false
},
}"
>
<div class="grid-view__inner" :style="{ 'min-width': width + 'px' }">
<GridViewHead
:table="table"
@ -17,7 +28,19 @@
$refs.fieldDragging.start($event.field, $event.event)
"
></GridViewHead>
<div ref="body" class="grid-view__body">
<div
ref="body"
v-auto-scroll="{
enabled: () => isMultiSelectHolding,
speed: 4,
padding: 10,
onScroll: (speed) => {
$emit('scroll', { pixelY: speed, pixelX: 0 })
return false
},
}"
class="grid-view__body"
>
<div class="grid-view__body-inner">
<GridViewPlaceholder
:fields="fields"
@ -29,6 +52,7 @@
:table="table"
:view="view"
:fields="fieldsToRender"
:all-field-ids="allFieldIds"
:left-offset="fieldsLeftOffset"
:include-row-details="includeRowDetails"
:read-only="readOnly"
@ -75,6 +99,7 @@
</template>
<script>
import { mapGetters } from 'vuex'
import debounce from 'lodash/debounce'
import ResizeObserver from 'resize-observer-polyfill'
@ -166,6 +191,9 @@ export default {
return width
},
allFieldIds() {
return this.fields.map((field) => field.id)
},
},
watch: {
fieldOptions: {
@ -181,6 +209,16 @@ export default {
},
},
},
beforeCreate() {
this.$options.computed = {
...(this.$options.computed || {}),
...mapGetters({
isMultiSelectHolding:
this.$options.propsData.storePrefix +
'view/grid/isMultiSelectHolding',
}),
}
},
mounted() {
// When the component first loads, we need to check
this.updateVisibleFieldsInRow()

View file

@ -700,6 +700,13 @@ export class LinkRowFieldType extends FieldType {
return []
}
toHumanReadableString(field, value) {
if (value) {
return value.map((link) => link.value).join(', ')
}
return ''
}
/**
* The structure for updating is slightly different than what we need for displaying
* the value because the display value does not have to be included. Here we convert
@ -2159,4 +2166,11 @@ export class LookupFieldType extends FormulaFieldType {
getFormComponent() {
return FieldLookupSubForm
}
toHumanReadableString(field, value) {
if (value) {
return value.map((link) => link.value).join(', ')
}
return ''
}
}

View file

@ -11,6 +11,7 @@ export default (client) => {
publicUrl = false,
orderBy = '',
filters = {},
includeFields = [],
}) {
const include = []
const params = new URLSearchParams()
@ -40,6 +41,10 @@ export default (client) => {
params.append('order_by', orderBy)
}
if (includeFields.length > 0) {
params.append('include_fields', includeFields.join(','))
}
Object.keys(filters).forEach((key) => {
filters[key].forEach((value) => {
params.append(key, value)

View file

@ -79,6 +79,21 @@ function getFilters(getters, rootGetters) {
}
export const state = () => ({
// Indicates if multiple cell selection is active
multiSelectActive: false,
// Indicates if the user is clicking and holding the mouse over a cell
multiSelectHolding: false,
/*
The indexes for head and tail cells in a multi-select grid.
Multi-Select works by tracking four different indexes, these are:
- The field and row index for the first cell selected, known as the head.
- The field and row index for the last cell selected, known as the tail.
All the cells between the head and tail cells are later also calculated as selected.
*/
multiSelectHeadRowIndex: -1,
multiSelectHeadFieldIndex: -1,
multiSelectTailRowIndex: -1,
multiSelectTailFieldIndex: -1,
// The last used grid id.
lastGridId: -1,
// Contains the custom field options per view. Things like the field width are
@ -297,6 +312,32 @@ export const mutations = {
}
})
},
UPDATE_MULTISELECT(state, { position, rowIndex, fieldIndex }) {
if (position === 'head') {
state.multiSelectHeadRowIndex = rowIndex
state.multiSelectHeadFieldIndex = fieldIndex
} else if (position === 'tail') {
// Limit selection to 200 rows (199 since rows start at index 0)
// This limit is set by the backend
if (Math.abs(state.multiSelectHeadRowIndex - rowIndex) <= 199) {
state.multiSelectTailRowIndex = rowIndex
state.multiSelectTailFieldIndex = fieldIndex
}
}
},
SET_MULTISELECT_HOLDING(state, value) {
state.multiSelectHolding = value
},
SET_MULTISELECT_ACTIVE(state, value) {
state.multiSelectActive = value
},
CLEAR_MULTISELECT(state) {
state.multiSelectHolding = false
state.multiSelectHeadRowIndex = -1
state.multiSelectHeadFieldIndex = -1
state.multiSelectTailRowIndex = -1
state.multiSelectTailFieldIndex = -1
},
ADD_FIELD_TO_ROWS_IN_BUFFER(state, { field, value }) {
const name = `field_${field.id}`
state.rows.forEach((row) => {
@ -1021,6 +1062,104 @@ export const actions = {
setSelectedCell({ commit }, { rowId, fieldId }) {
commit('SET_SELECTED_CELL', { rowId, fieldId })
},
setMultiSelectHolding({ commit }, value) {
commit('SET_MULTISELECT_HOLDING', value)
},
setMultiSelectActive({ commit }, value) {
commit('SET_MULTISELECT_ACTIVE', value)
},
clearMultiSelect({ commit }) {
commit('CLEAR_MULTISELECT')
},
multiSelectStart({ getters, commit }, { rowId, fieldIndex }) {
commit('CLEAR_MULTISELECT')
const rowIndex = getters.getMultiSelectRowIndexById(rowId)
// Set the head and tail index to highlight the first cell
commit('UPDATE_MULTISELECT', { position: 'head', rowIndex, fieldIndex })
commit('UPDATE_MULTISELECT', { position: 'tail', rowIndex, fieldIndex })
// Update the store to show that the mouse is being held for multi-select
commit('SET_MULTISELECT_HOLDING', true)
// Do not enable multi-select if only a single cell is selected
commit('SET_MULTISELECT_ACTIVE', false)
},
multiSelectHold({ getters, commit }, { rowId, fieldIndex }) {
if (getters.isMultiSelectHolding) {
// Unselect single cell
commit('SET_SELECTED_CELL', { rowId: -1, fieldId: -1 })
commit('UPDATE_MULTISELECT', {
position: 'tail',
rowIndex: getters.getMultiSelectRowIndexById(rowId),
fieldIndex,
})
commit('SET_MULTISELECT_ACTIVE', true)
}
},
async exportMultiSelect({ dispatch, getters, commit }, fields) {
if (getters.isMultiSelectActive) {
const output = []
const [minFieldIndex, maxFieldIndex] =
getters.getMultiSelectFieldIndexSorted
let rows = []
fields = fields.slice(minFieldIndex, maxFieldIndex + 1)
if (getters.areMultiSelectRowsWithinBuffer) {
rows = getters.getSelectedRows
} else {
// Fetch rows from backend
rows = await dispatch('getMultiSelectedRows', fields)
}
// Loop over selected rows
for (const row of rows) {
const line = []
// Loop over selected fields
for (const field of fields) {
const rawValue = row['field_' + field.id]
// Format the value for copying using the field's prepareValueForCopy()
const value = this.$registry
.get('field', field.type)
.toHumanReadableString(field, rawValue)
line.push(JSON.stringify(value))
}
output.push(line.join('\t'))
}
return output.join('\n')
}
},
/*
This function is called if a user attempts to access rows that are
no longer in the row buffer and need to be fetched from the backend.
A user can select some or all fields in a row, and only those fields
will be returned.
*/
async getMultiSelectedRows({ getters, rootGetters }, fields) {
const [minRow, maxRow] = getters.getMultiSelectRowIndexSorted
const gridId = getters.getLastGridId
return await GridService(this.$client)
.fetchRows({
gridId,
offset: minRow,
limit: maxRow - minRow + 1,
search: getters.getServerSearchTerm,
publicUrl: getters.isPublic,
orderBy: getOrderBy(getters, rootGetters),
filters: getFilters(getters, rootGetters),
includeFields: fields.map((field) => `field_${field.id}`),
})
.catch((error) => {
throw error
})
.then(({ data }) => {
return data.results
})
},
setRowHover({ commit }, { row, value }) {
commit('SET_ROW_HOVER', { row, value })
},
@ -1819,6 +1958,59 @@ export const getters = {
})
return order
},
isMultiSelectActive(state) {
return state.multiSelectActive
},
isMultiSelectHolding(state) {
return state.multiSelectHolding
},
getMultiSelectRowIndexSorted(state) {
return [
Math.min(state.multiSelectHeadRowIndex, state.multiSelectTailRowIndex),
Math.max(state.multiSelectHeadRowIndex, state.multiSelectTailRowIndex),
]
},
getMultiSelectFieldIndexSorted(state) {
return [
Math.min(
state.multiSelectHeadFieldIndex,
state.multiSelectTailFieldIndex
),
Math.max(
state.multiSelectHeadFieldIndex,
state.multiSelectTailFieldIndex
),
]
},
// Get the index of a row given it's row id.
// This will calculate the row index from the current buffer position and offset.
getMultiSelectRowIndexById: (state) => (rowId) => {
const bufferIndex = state.rows.findIndex((r) => r.id === rowId)
if (bufferIndex !== -1) {
return state.bufferStartIndex + bufferIndex
}
return -1
},
// Check if all the multi-select rows are within the row buffer
areMultiSelectRowsWithinBuffer(state, getters) {
const [minRow, maxRow] = getters.getMultiSelectRowIndexSorted
return (
minRow >= state.bufferStartIndex &&
maxRow <= state.bufferStartIndex + state.bufferRequestSize
)
},
// Return all rows within a multi-select grid if they are within the current row buffer
getSelectedRows(state, getters) {
const [minRow, maxRow] = getters.getMultiSelectRowIndexSorted
if (getters.areMultiSelectRowsWithinBuffer) {
return state.rows.slice(
minRow - state.bufferStartIndex,
maxRow - state.bufferStartIndex + 1
)
}
},
getAllFieldAggregationData(state) {
return state.fieldAggregationData
},

View file

@ -1,60 +0,0 @@
import axios from 'axios'
import MockAdapter from 'axios-mock-adapter'
import httpMocks from 'node-mocks-http'
import createNuxt from '@baserow/test/helpers/create-nuxt'
let nuxt = null
let mock = null
describe('index redirect', () => {
beforeAll(async (done) => {
mock = new MockAdapter(axios)
// Because the token 'test1' exists it will be refreshed immediately, the
// refresh endpoint is stubbed so that it will always provide a valid
// unexpired token.
mock.onPost('http://localhost/api/user/token-refresh/').reply(200, {
token:
'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyX2lkIjoxLCJ1c2' +
'VybmFtZSI6InRlc3RAdGVzdCIsImV4cCI6MTk5OTk5OTk5OSwiZW1haWwiO' +
'iJ0ZXN0QHRlc3QubmwiLCJvcmlnX2lhdCI6MTU2Mjc3MzQxNH0.2i0gqrcH' +
'5uy7mk4kf3LoLpZYXoyMrOfi0fDQneVcaFE',
user: {
first_name: 'Test',
username: 'test@test.nl',
},
})
nuxt = await createNuxt(true)
done()
})
test('if not authenticated', async () => {
const { redirected } = await nuxt.server.renderRoute('/')
expect(redirected.path).toBe('/login')
expect(redirected.status).toBe(302)
})
test('if authenticated', async () => {
const req = httpMocks.createRequest({
headers: {
cookie: 'jwt_token=test1',
},
})
const res = httpMocks.createResponse()
const { redirected } = await nuxt.server.renderRoute('/', { req, res })
expect(redirected.path).toBe('/dashboard')
expect(redirected.status).toBe(302)
})
test('login page renders', async () => {
const { html } = await nuxt.server.renderRoute('/login')
expect(html).toContain('Login')
})
test('sign up page renders', async () => {
const { html } = await nuxt.server.renderRoute('/signup')
expect(html).toContain('Sign up')
})
})

View file

@ -16,7 +16,10 @@ exports[`Public View Page Tests Can see a publicly shared grid view 1`] = `
<div
class="bottom-right-notifications"
/>
>
<!---->
</div>
</div>
<div