1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-10 23:50:12 +00:00

Merge branch 'develop'

This commit is contained in:
Bram Wiepjes 2021-03-01 20:19:52 +01:00
commit 81f82461c3
151 changed files with 3224 additions and 971 deletions
.gitlab-ci.ymlREADME.md
backend
changelog.md
deploy/cloudron
docs
plugin-boilerplate/{{ cookiecutter.project_slug }}
web-frontend

View file

@ -32,7 +32,7 @@ web-frontend-test:
backend-flake8:
stage: lint
image: python:3.6
image: python:3.7
script:
- cd backend
- make install-dependencies
@ -41,7 +41,7 @@ backend-flake8:
backend-pytest:
stage: test
image: python:3.6
image: python:3.7
services:
- name: postgres:11.3
alias: db
@ -60,7 +60,7 @@ backend-pytest:
backend-setup:
stage: build
image: python:3.6
image: python:3.7
script:
- pip install -e ./backend
- python -c 'import baserow'

View file

@ -1,6 +1,6 @@
# Baserow
Open source online database tool and Airtable alternative.
Open source no-code database tool and Airtable alternative.
**We're hiring** remote developers! More information at
https://baserow.io/jobs/experienced-full-stack-developer.
@ -109,7 +109,7 @@ Created by Bram Wiepjes (Baserow) - bram@baserow.io.
Distributes under the MIT license. See `LICENSE` for more information.
Version: 0.8.0
Version: 1.0.0
The official repository can be found at https://gitlab.com/bramw/baserow.

View file

@ -1,4 +1,4 @@
FROM python:3.6
FROM python:3.7
ADD . /backend
RUN mkdir -p /media

View file

@ -1,4 +1,4 @@
FROM python:3.6
FROM python:3.7
ADD . /backend

View file

@ -11,7 +11,7 @@ uvicorn[standard]==0.13.3
django-mjml==0.9.0
requests==2.25.0
itsdangerous==1.1.0
drf-spectacular==0.9.12
drf-spectacular==0.13.1
Pillow==8.0.1
channels==3.0.3
channels-redis==3.2.0

View file

@ -6,7 +6,7 @@ from setuptools import find_packages, setup
PROJECT_DIR = os.path.dirname(__file__)
REQUIREMENTS_DIR = os.path.join(PROJECT_DIR, 'requirements')
VERSION = '0.8.0'
VERSION = '1.0.0'
def get_requirements(env):
@ -29,8 +29,11 @@ setup(
author='Bram Wiepjes (Baserow)',
author_email='bram@baserow.io',
license='MIT',
description='Baserow: open source online database backend.',
long_description='',
description='Baserow: open source no-code database backend.',
long_description='Baserow is an open source no-code database tool and Airtable '
'alternative. Easily create a relational database without any '
'technical expertise. Build a table and define custom fields '
'like text, number, file and many more.',
platforms=['linux'],
package_dir={'': 'src'},
packages=find_packages('src'),

View file

@ -219,7 +219,8 @@ class ApplicationView(APIView):
def get(self, request, application_id):
"""Selects a single application and responds with a serialized version."""
application = CoreHandler().get_application(request.user, application_id)
application = CoreHandler().get_application(application_id)
application.group.has_user(request.user, raise_error=True)
return Response(get_application_serializer(application).data)
@extend_schema(
@ -261,12 +262,12 @@ class ApplicationView(APIView):
"""Updates the application if the user belongs to the group."""
application = CoreHandler().get_application(
request.user, application_id,
application_id,
base_queryset=Application.objects.select_for_update()
)
application = CoreHandler().update_application(
request.user, application, name=data['name'])
request.user, application, name=data['name']
)
return Response(get_application_serializer(application).data)
@extend_schema(
@ -301,7 +302,7 @@ class ApplicationView(APIView):
"""Deletes an existing application if the user belongs to the group."""
application = CoreHandler().get_application(
request.user, application_id,
application_id,
base_queryset=Application.objects.select_for_update()
)
CoreHandler().delete_application(request.user, application)

View file

@ -168,10 +168,8 @@ class GroupInvitationView(APIView):
def get(self, request, group_invitation_id):
"""Selects a single group invitation and responds with a serialized version."""
group_invitation = CoreHandler().get_group_invitation(
request.user,
group_invitation_id
)
group_invitation = CoreHandler().get_group_invitation(group_invitation_id)
group_invitation.group.has_user(request.user, 'ADMIN', raise_error=True)
return Response(GroupInvitationSerializer(group_invitation).data)
@extend_schema(
@ -213,7 +211,6 @@ class GroupInvitationView(APIView):
"""Updates the group invitation if the user belongs to the group."""
group_invitation = CoreHandler().get_group_invitation(
request.user,
group_invitation_id,
base_queryset=GroupInvitation.objects.select_for_update()
)
@ -259,7 +256,6 @@ class GroupInvitationView(APIView):
"""Deletes an existing group_invitation if the user belongs to the group."""
group_invitation = CoreHandler().get_group_invitation(
request.user,
group_invitation_id,
base_queryset=GroupInvitation.objects.select_for_update()
)
@ -286,6 +282,7 @@ class AcceptGroupInvitationView(APIView):
'Accepts a group invitation with the given id if the email address of the '
'user matches that of the invitation.'
),
request=None,
responses={
200: GroupUserGroupSerializer,
400: get_error_schema(['ERROR_GROUP_INVITATION_EMAIL_MISMATCH']),
@ -335,6 +332,7 @@ class RejectGroupInvitationView(APIView):
'Rejects a group invitation with the given id if the email address of the '
'user matches that of the invitation.'
),
request=None,
responses={
204: None,
400: get_error_schema(['ERROR_GROUP_INVITATION_EMAIL_MISMATCH']),

View file

@ -1,5 +1,8 @@
from django.contrib.auth import get_user_model
from drf_spectacular.utils import extend_schema_field
from drf_spectacular.types import OpenApiTypes
from rest_framework import serializers
from baserow.core.models import GroupUser
@ -16,9 +19,11 @@ class GroupUserSerializer(serializers.ModelSerializer):
model = GroupUser
fields = ('id', 'name', 'email', 'group', 'permissions', 'created_on')
@extend_schema_field(OpenApiTypes.STR)
def get_name(self, object):
return object.user.first_name
@extend_schema_field(OpenApiTypes.STR)
def get_email(self, object):
return object.user.email

View file

@ -0,0 +1,12 @@
from rest_framework import serializers
from baserow.core.models import Settings
class SettingsSerializer(serializers.ModelSerializer):
class Meta:
model = Settings
fields = ('allow_new_signups',)
extra_kwargs = {
'allow_new_signups': {'required': False},
}

View file

@ -0,0 +1,11 @@
from django.conf.urls import url
from .views import SettingsView, UpdateSettingsView
app_name = 'baserow.api.settings'
urlpatterns = [
url(r'^update/$', UpdateSettingsView.as_view(), name='update'),
url(r'^$', SettingsView.as_view(), name='get'),
]

View file

@ -0,0 +1,58 @@
from django.db import transaction
from drf_spectacular.utils import extend_schema
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import AllowAny, IsAdminUser
from baserow.api.decorators import validate_body
from baserow.core.handler import CoreHandler
from .serializers import SettingsSerializer
class SettingsView(APIView):
permission_classes = (AllowAny,)
@extend_schema(
tags=['Settings'],
operation_id='get_settings',
description='Responds with all the admin configured settings.',
responses={
200: SettingsSerializer,
},
auth=[None],
)
def get(self, request):
"""
Responds with all the admin configured settings.
"""
settings = CoreHandler().get_settings()
return Response(SettingsSerializer(settings).data)
class UpdateSettingsView(APIView):
permission_classes = (IsAdminUser,)
@extend_schema(
tags=['Settings'],
operation_id='update_settings',
description=(
'Updates the admin configured settings if the user has admin permissions.'
),
request=SettingsSerializer,
responses={
200: SettingsSerializer,
},
)
@validate_body(SettingsSerializer)
@transaction.atomic
def patch(self, request, data):
"""
Updates the provided config settings if the user has admin permissions.
"""
settings = CoreHandler().update_settings(request.user, **data)
return Response(SettingsSerializer(settings).data)

View file

@ -4,6 +4,7 @@ from drf_spectacular.views import SpectacularJSONAPIView, SpectacularRedocView
from baserow.core.registries import plugin_registry, application_type_registry
from .settings import urls as settings_urls
from .user import urls as user_urls
from .user_files import urls as user_files_urls
from .groups import urls as group_urls
@ -19,6 +20,7 @@ urlpatterns = [
SpectacularRedocView.as_view(url_name='api:json_schema'),
name='redoc'
),
path('settings/', include(settings_urls, namespace='settings')),
path('user/', include(user_urls, namespace='user')),
path('user-files/', include(user_files_urls, namespace='user_files')),
path('groups/', include(group_urls, namespace='groups')),

View file

@ -1,3 +1,4 @@
ERROR_ALREADY_EXISTS = 'ERROR_EMAIL_ALREADY_EXISTS'
ERROR_USER_NOT_FOUND = 'ERROR_USER_NOT_FOUND'
ERROR_INVALID_OLD_PASSWORD = 'ERROR_INVALID_OLD_PASSWORD'
ERROR_DISABLED_SIGNUP = 'ERROR_DISABLED_SIGNUP'

View file

@ -13,11 +13,10 @@ User = get_user_model()
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('first_name', 'username', 'password')
fields = ('first_name', 'username', 'password', 'is_staff')
extra_kwargs = {
'password': {
'write_only': True
}
'password': {'write_only': True},
'is_staff': {'read_only': True},
}

View file

@ -30,7 +30,7 @@ from baserow.core.exceptions import (
from baserow.core.models import GroupInvitation
from baserow.core.user.handler import UserHandler
from baserow.core.user.exceptions import (
UserAlreadyExist, UserNotFound, InvalidPassword
UserAlreadyExist, UserNotFound, InvalidPassword, DisabledSignupError
)
from .serializers import (
@ -39,7 +39,8 @@ from .serializers import (
NormalizedEmailWebTokenSerializer, DashboardSerializer
)
from .errors import (
ERROR_ALREADY_EXISTS, ERROR_USER_NOT_FOUND, ERROR_INVALID_OLD_PASSWORD
ERROR_ALREADY_EXISTS, ERROR_USER_NOT_FOUND, ERROR_INVALID_OLD_PASSWORD,
ERROR_DISABLED_SIGNUP
)
from .schemas import create_user_response_schema, authenticate_user_schema
@ -146,7 +147,8 @@ class UserView(APIView):
UserAlreadyExist: ERROR_ALREADY_EXISTS,
BadSignature: BAD_TOKEN_SIGNATURE,
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
DisabledSignupError: ERROR_DISABLED_SIGNUP
})
@validate_body(RegisterSerializer)
def post(self, request, data):

View file

@ -28,6 +28,7 @@ class UserFileURLAndThumbnailsSerializerMixin(serializers.Serializer):
url = default_storage.url(path)
return url
@extend_schema_field(OpenApiTypes.OBJECT)
def get_thumbnails(self, instance):
if not self.get_instance_attr(instance, 'is_image'):
return None

View file

@ -3,7 +3,6 @@ from django.db import transaction
from rest_framework.parsers import MultiPartParser
from drf_spectacular.utils import extend_schema
from drf_spectacular.plumbing import build_object_type
from rest_framework.views import APIView
from rest_framework.response import Response
@ -33,7 +32,7 @@ class UploadFileView(APIView):
'Uploads a file to Baserow by uploading the file contents directly. A '
'`file` multipart is expected containing the file contents.'
),
request=build_object_type(),
request=None,
responses={
200: UserFileSerializer,
400: get_error_schema(['ERROR_INVALID_FILE', 'ERROR_FILE_SIZE_TOO_LARGE'])

View file

@ -182,9 +182,10 @@ SPECTACULAR_SETTINGS = {
'name': 'MIT',
'url': 'https://gitlab.com/bramw/baserow/-/blob/master/LICENSE'
},
'VERSION': '0.8.0',
'VERSION': '1.0.0',
'SERVE_INCLUDE_SCHEMA': False,
'TAGS': [
{'name': 'Settings'},
{'name': 'User'},
{'name': 'User files'},
{'name': 'Groups'},
@ -242,6 +243,15 @@ USER_FILES_DIRECTORY = 'user_files'
USER_THUMBNAILS_DIRECTORY = 'thumbnails'
USER_FILE_SIZE_LIMIT = 1024 * 1024 * 20 # 20MB
if os.getenv('EMAIL_SMTP', ''):
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
EMAIL_USE_TLS = bool(os.getenv('EMAIL_SMPT_USE_TLS', ''))
EMAIL_HOST = os.getenv('EMAIL_SMTP_HOST', 'localhost')
EMAIL_PORT = os.getenv('EMAIL_SMTP_PORT', '25')
EMAIL_HOST_USER = os.getenv('EMAIL_SMTP_USER', '')
EMAIL_HOST_PASSWORD = os.getenv('EMAIL_SMTP_PASSWORD', '')
# Configurable thumbnails that are going to be generated when a user uploads an image
# file.
USER_THUMBNAILS = {

View file

@ -1,5 +1,4 @@
from django.utils.functional import lazy
from django.db import models
from drf_spectacular.utils import extend_schema_field
from drf_spectacular.types import OpenApiTypes
@ -66,21 +65,6 @@ class UpdateFieldSerializer(serializers.ModelSerializer):
}
class LinkRowListSerializer(serializers.ListSerializer):
def to_representation(self, data):
"""
Data that is fetched is always from another Table model and when fetching
that data we always need to respect the field enhancements. Otherwise it
could for example fail when we want to fetch the related select options that
could be in another database and table.
"""
if isinstance(data, models.Manager):
data = data.all().enhance_by_fields()
return super().to_representation(data)
class LinkRowValueSerializer(serializers.Serializer):
id = serializers.IntegerField(help_text='The unique identifier of the row in the '
'related table.')

View file

@ -74,7 +74,8 @@ class FieldsView(APIView):
has access to that group.
"""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
table.database.group.has_user(request.user, raise_error=True)
fields = Field.objects.filter(table=table).select_related('content_type')
data = [
@ -128,7 +129,8 @@ class FieldsView(APIView):
type_name = data.pop('type')
field_type = field_type_registry.get(type_name)
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
table.database.group.has_user(request.user, raise_error=True)
# Because each field type can raise custom exceptions while creating the
# field we need to be able to map those to the correct API exceptions which are
@ -175,7 +177,8 @@ class FieldView(APIView):
def get(self, request, field_id):
"""Selects a single field and responds with a serialized version."""
field = FieldHandler().get_field(request.user, field_id)
field = FieldHandler().get_field(field_id)
field.table.database.group.has_user(request.user, raise_error=True)
serializer = field_type_registry.get_serializer(field, FieldSerializer)
return Response(serializer.data)
@ -226,7 +229,8 @@ class FieldView(APIView):
"""Updates the field if the user belongs to the group."""
field = FieldHandler().get_field(
request.user, field_id, base_queryset=Field.objects.select_for_update()
field_id,
base_queryset=Field.objects.select_for_update()
).specific
type_name = type_from_data_or_registry(request.data, field_type_registry, field)
field_type = field_type_registry.get(type_name)
@ -276,7 +280,7 @@ class FieldView(APIView):
def delete(self, request, field_id):
"""Deletes an existing field if the user belongs to the group."""
field = FieldHandler().get_field(request.user, field_id)
field = FieldHandler().get_field(field_id)
FieldHandler().delete_field(request.user, field)
return Response(status=204)

View file

@ -195,7 +195,9 @@ class RowsView(APIView):
provide a search query.
"""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
table.database.group.has_user(request.user, raise_error=True)
TokenHandler().check_table_permissions(request, 'read', table, False)
search = request.GET.get('search')
order_by = request.GET.get('order_by')
@ -287,7 +289,7 @@ class RowsView(APIView):
according to the tables field types.
"""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, 'create', table, False)
model = table.get_model()
@ -361,7 +363,7 @@ class RowView(APIView):
and table_id.
"""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, 'read', table, False)
model = table.get_model()
@ -426,7 +428,7 @@ class RowView(APIView):
table_id. Also the post data is validated according to the tables field types.
"""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, 'update', table, False)
field_ids = RowHandler().extract_field_ids_from_dict(request.data)
@ -481,7 +483,7 @@ class RowView(APIView):
table_id.
"""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
TokenHandler().check_table_permissions(request, 'delete', table, False)
RowHandler().delete_row(request.user, table, row_id)

View file

@ -64,9 +64,10 @@ class TablesView(APIView):
"""Lists all the tables of a database."""
database = CoreHandler().get_application(
request.user, database_id,
database_id,
base_queryset=Database.objects
)
database.group.has_user(request.user, raise_error=True)
tables = Table.objects.filter(database=database)
serializer = TableSerializer(tables, many=True)
return Response(serializer.data)
@ -111,7 +112,7 @@ class TablesView(APIView):
"""Creates a new table in a database."""
database = CoreHandler().get_application(
request.user, database_id,
database_id,
base_queryset=Database.objects
)
table = TableHandler().create_table(
@ -155,7 +156,8 @@ class TableView(APIView):
def get(self, request, table_id):
"""Responds with a serialized table instance."""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
table.database.group.has_user(request.user, raise_error=True)
serializer = TableSerializer(table)
return Response(serializer.data)
@ -194,7 +196,7 @@ class TableView(APIView):
table = TableHandler().update_table(
request.user,
TableHandler().get_table(request.user, table_id),
TableHandler().get_table(table_id),
base_queryset=Table.objects.select_for_update(),
name=data['name']
)
@ -232,6 +234,6 @@ class TableView(APIView):
TableHandler().delete_table(
request.user,
TableHandler().get_table(request.user, table_id)
TableHandler().get_table(table_id)
)
return Response(status=204)

View file

@ -113,7 +113,8 @@ class GridViewView(APIView):
"""
view_handler = ViewHandler()
view = view_handler.get_view(request.user, view_id, GridView)
view = view_handler.get_view(view_id, GridView)
view.table.database.group.has_user(request.user, raise_error=True)
model = view.table.get_model()
queryset = model.objects.all().enhance_by_fields()
@ -192,7 +193,8 @@ class GridViewView(APIView):
requested fields.
"""
view = ViewHandler().get_view(request.user, view_id, GridView)
view = ViewHandler().get_view(view_id, GridView)
view.table.database.group.has_user(request.user, raise_error=True)
model = view.table.get_model(field_ids=data['field_ids'])
results = model.objects.filter(pk__in=data['row_ids'])
@ -251,7 +253,7 @@ class GridViewView(APIView):
"""
handler = ViewHandler()
view = handler.get_view(request.user, view_id, GridView)
view = handler.get_view(view_id, GridView)
handler.update_grid_view_field_options(
request.user,
view,

View file

@ -88,7 +88,8 @@ class ViewsView(APIView):
has access to that group.
"""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
table.database.group.has_user(request.user, raise_error=True)
views = View.objects.filter(table=table).select_related('content_type')
if filters:
@ -152,7 +153,7 @@ class ViewsView(APIView):
def post(self, request, data, table_id, filters, sortings):
"""Creates a new view for a user."""
table = TableHandler().get_table(request.user, table_id)
table = TableHandler().get_table(table_id)
view = ViewHandler().create_view(
request.user, table, data.pop('type'), **data)
@ -201,7 +202,8 @@ class ViewView(APIView):
def get(self, request, view_id, filters, sortings):
"""Selects a single view and responds with a serialized version."""
view = ViewHandler().get_view(request.user, view_id)
view = ViewHandler().get_view(view_id)
view.table.database.group.has_user(request.user, raise_error=True)
serializer = view_type_registry.get_serializer(
view,
ViewSerializer,
@ -250,7 +252,7 @@ class ViewView(APIView):
def patch(self, request, view_id, filters, sortings):
"""Updates the view if the user belongs to the group."""
view = ViewHandler().get_view(request.user, view_id).specific
view = ViewHandler().get_view(view_id).specific
view_type = view_type_registry.get_by_model(view)
data = validate_data_custom_fields(
view_type.type, view_type_registry, request.data,
@ -298,7 +300,7 @@ class ViewView(APIView):
def delete(self, request, view_id):
"""Deletes an existing view if the user belongs to the group."""
view = ViewHandler().get_view(request.user, view_id)
view = ViewHandler().get_view(view_id)
ViewHandler().delete_view(request.user, view)
return Response(status=204)
@ -341,7 +343,8 @@ class ViewFiltersView(APIView):
has access to that group.
"""
view = ViewHandler().get_view(request.user, view_id)
view = ViewHandler().get_view(view_id)
view.table.database.group.has_user(request.user, raise_error=True)
filters = ViewFilter.objects.filter(view=view)
serializer = ViewFilterSerializer(filters, many=True)
return Response(serializer.data)
@ -391,7 +394,7 @@ class ViewFiltersView(APIView):
"""Creates a new filter for the provided view."""
view_handler = ViewHandler()
view = view_handler.get_view(request.user, view_id)
view = view_handler.get_view(view_id)
# We can safely assume the field exists because the CreateViewFilterSerializer
# has already checked that.
field = Field.objects.get(pk=data['field'])
@ -566,7 +569,8 @@ class ViewSortingsView(APIView):
has access to that group.
"""
view = ViewHandler().get_view(request.user, view_id)
view = ViewHandler().get_view(view_id)
view.table.database.group.has_user(request.user, raise_error=True)
sortings = ViewSort.objects.filter(view=view)
serializer = ViewSortSerializer(sortings, many=True)
return Response(serializer.data)
@ -616,7 +620,7 @@ class ViewSortingsView(APIView):
"""Creates a new sort for the provided view."""
view_handler = ViewHandler()
view = view_handler.get_view(request.user, view_id)
view = view_handler.get_view(view_id)
# We can safely assume the field exists because the CreateViewSortSerializer
# has already checked that.
field = Field.objects.get(pk=data['field'])

View file

@ -71,11 +71,13 @@ class DatabaseConfig(AppConfig):
EqualViewFilterType, NotEqualViewFilterType, EmptyViewFilterType,
NotEmptyViewFilterType, DateEqualViewFilterType, DateNotEqualViewFilterType,
HigherThanViewFilterType, LowerThanViewFilterType, ContainsViewFilterType,
ContainsNotViewFilterType, BooleanViewFilterType,
SingleSelectEqualViewFilterType, SingleSelectNotEqualViewFilterType
FilenameContainsViewFilterType, ContainsNotViewFilterType,
BooleanViewFilterType, SingleSelectEqualViewFilterType,
SingleSelectNotEqualViewFilterType
)
view_filter_type_registry.register(EqualViewFilterType())
view_filter_type_registry.register(NotEqualViewFilterType())
view_filter_type_registry.register(FilenameContainsViewFilterType())
view_filter_type_registry.register(ContainsViewFilterType())
view_filter_type_registry.register(ContainsNotViewFilterType())
view_filter_type_registry.register(HigherThanViewFilterType())

View file

@ -23,21 +23,21 @@ class PostgresqlLenientDatabaseSchemaEditor:
$$
begin
begin
%(alter_column_prepare_value)s
return %(alert_column_type_function)s::%(type)s;
exception
when others then
return p_default;
%(alter_column_prepare_old_value)s
%(alter_column_prepare_new_value)s
return p_in::%(type)s;
exception when others then
return p_default;
end;
end;
$$
language plpgsql;
"""
def __init__(self, *args, alter_column_prepare_value='',
alert_column_type_function='p_in'):
self.alter_column_prepare_value = alter_column_prepare_value
self.alert_column_type_function = alert_column_type_function
def __init__(self, *args, alter_column_prepare_old_value='',
alter_column_prepare_new_value=''):
self.alter_column_prepare_old_value = alter_column_prepare_old_value
self.alter_column_prepare_new_value = alter_column_prepare_new_value
super().__init__(*args)
def _alter_field(self, model, old_field, new_field, old_type, new_type,
@ -45,24 +45,24 @@ class PostgresqlLenientDatabaseSchemaEditor:
if old_type != new_type:
variables = {}
if isinstance(self.alter_column_prepare_value, tuple):
alter_column_prepare_value, v = self.alter_column_prepare_value
if isinstance(self.alter_column_prepare_old_value, tuple):
alter_column_prepare_old_value, v = self.alter_column_prepare_old_value
variables = {**variables, **v}
else:
alter_column_prepare_value = self.alter_column_prepare_value
alter_column_prepare_old_value = self.alter_column_prepare_old_value
if isinstance(self.alert_column_type_function, tuple):
alert_column_type_function, v = self.alert_column_type_function
if isinstance(self.alter_column_prepare_new_value, tuple):
alter_column_prepare_new_value, v = self.alter_column_prepare_new_value
variables = {**variables, **v}
else:
alert_column_type_function = self.alert_column_type_function
alter_column_prepare_new_value = self.alter_column_prepare_new_value
self.execute(self.sql_drop_try_cast)
self.execute(self.sql_create_try_cast % {
"column": self.quote_name(new_field.column),
"type": new_type,
"alter_column_prepare_value": alter_column_prepare_value,
"alert_column_type_function": alert_column_type_function
"alter_column_prepare_old_value": alter_column_prepare_old_value,
"alter_column_prepare_new_value": alter_column_prepare_new_value
}, variables)
return super()._alter_field(model, old_field, new_field, old_type, new_type,
@ -70,8 +70,8 @@ class PostgresqlLenientDatabaseSchemaEditor:
@contextlib.contextmanager
def lenient_schema_editor(connection, alter_column_prepare_value=None,
alert_column_type_function=None):
def lenient_schema_editor(connection, alter_column_prepare_old_value=None,
alter_column_prepare_new_value=None):
"""
A contextual function that yields a modified version of the connection's schema
editor. This temporary version is more lenient then the regular editor. Normally
@ -83,13 +83,12 @@ def lenient_schema_editor(connection, alter_column_prepare_value=None,
:param connection: The current connection for which to generate the schema editor
for.
:type connection: DatabaseWrapper
:param alter_column_prepare_value: Optionally a query statement converting the
:param alter_column_prepare_old_value: Optionally a query statement converting the
`p_in` value to a string format.
:type alter_column_prepare_value: None or str
:param alert_column_type_function: Optionally the string of a SQL function to
convert the data value to the the new type. The function will have the variable
`p_in` as old value.
:type alert_column_type_function: None or str
:type alter_column_prepare_old_value: None or str
:param alter_column_prepare_new_value: Optionally a query statement converting the
`p_in` text value to the new type.
:type alter_column_prepare_new_value: None or str
:raises ValueError: When the provided connection is not supported. For now only
`postgresql` is supported.
"""
@ -112,11 +111,11 @@ def lenient_schema_editor(connection, alter_column_prepare_value=None,
kwargs = {}
if alter_column_prepare_value:
kwargs['alter_column_prepare_value'] = alter_column_prepare_value
if alter_column_prepare_old_value:
kwargs['alter_column_prepare_old_value'] = alter_column_prepare_old_value
if alert_column_type_function:
kwargs['alert_column_type_function'] = alert_column_type_function
if alter_column_prepare_new_value:
kwargs['alter_column_prepare_new_value'] = alter_column_prepare_new_value
try:
with connection.schema_editor(**kwargs) as schema_editor:

View file

@ -17,8 +17,8 @@ from rest_framework import serializers
from baserow.core.models import UserFile
from baserow.core.user_files.exceptions import UserFileDoesNotExist
from baserow.contrib.database.api.fields.serializers import (
LinkRowListSerializer, LinkRowValueSerializer, FileFieldRequestSerializer,
FileFieldResponseSerializer, SelectOptionSerializer
LinkRowValueSerializer, FileFieldRequestSerializer, FileFieldResponseSerializer,
SelectOptionSerializer
)
from baserow.contrib.database.api.fields.errors import (
ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE, ERROR_LINK_ROW_TABLE_NOT_PROVIDED,
@ -28,8 +28,9 @@ from baserow.contrib.database.api.fields.errors import (
from .handler import FieldHandler
from .registries import FieldType, field_type_registry
from .models import (
NUMBER_TYPE_INTEGER, NUMBER_TYPE_DECIMAL, TextField, LongTextField, URLField,
NumberField, BooleanField, DateField, LinkRowField, EmailField, FileField,
NUMBER_TYPE_INTEGER, NUMBER_TYPE_DECIMAL, DATE_FORMAT, DATE_TIME_FORMAT,
TextField, LongTextField, URLField, NumberField, BooleanField, DateField,
LinkRowField, EmailField, FileField,
SingleSelectField, SelectOption
)
from .exceptions import (
@ -94,17 +95,18 @@ class URLFieldType(FieldType):
def random_value(self, instance, fake, cache):
return fake.url()
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
if connection.vendor == 'postgresql':
return r"""(
return r"""p_in = (
case
when p_in::text ~* '(https?|ftps?)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?'
then p_in::text
else ''
end
)"""
);"""
return super().get_alter_column_type_function(connection, from_field, to_field)
return super().get_alter_column_prepare_new_value(connection, from_field,
to_field)
class NumberFieldType(FieldType):
@ -169,7 +171,7 @@ class NumberFieldType(FieldType):
positive=not instance.number_negative
)
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
if connection.vendor == 'postgresql':
decimal_places = 0
if to_field.number_type == NUMBER_TYPE_DECIMAL:
@ -180,9 +182,10 @@ class NumberFieldType(FieldType):
if not to_field.number_negative:
function = f"greatest({function}, 0)"
return function
return f'p_in = {function};'
return super().get_alter_column_type_function(connection, from_field, to_field)
return super().get_alter_column_prepare_new_value(connection, from_field,
to_field)
def after_update(self, from_field, to_field, from_model, to_model, user, connection,
altered_column, before):
@ -288,6 +291,52 @@ class DateFieldType(FieldType):
else:
return fake.date_object()
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
"""
If the field type has changed then we want to convert the date or timestamp to
a human readable text following the old date format.
"""
to_field_type = field_type_registry.get_by_model(to_field)
if to_field_type.type != self.type and connection.vendor == 'postgresql':
sql_type = 'date'
sql_format = DATE_FORMAT[from_field.date_format]['sql']
if from_field.date_include_time:
sql_type = 'timestamp'
sql_format += ' ' + DATE_TIME_FORMAT[from_field.date_time_format]['sql']
return f"""p_in = TO_CHAR(p_in::{sql_type}, '{sql_format}');"""
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
"""
If the field type has changed into a date field then we want to parse the old
text value following the format of the new field and convert it to a date or
timestamp. If that fails we want to fallback on the default ::date or
::timestamp conversion that has already been added.
"""
from_field_type = field_type_registry.get_by_model(from_field)
if from_field_type.type != self.type and connection.vendor == 'postgresql':
sql_function = 'TO_DATE'
sql_format = DATE_FORMAT[to_field.date_format]['sql']
if to_field.date_include_time:
sql_function = 'TO_TIMESTAMP'
sql_format += ' ' + DATE_TIME_FORMAT[to_field.date_time_format]['sql']
return f"""
begin
p_in = {sql_function}(p_in::text, 'FM{sql_format}');
exception when others then end;
"""
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
class LinkRowFieldType(FieldType):
"""
@ -314,10 +363,35 @@ class LinkRowFieldType(FieldType):
def enhance_queryset(self, queryset, field, name):
"""
Makes sure that the related rows are prefetched by Django.
Makes sure that the related rows are prefetched by Django. We also want to
enhance the primary field of the related queryset. If for example the primary
field is a single select field then the dropdown options need to be
prefetched in order to prevent many queries.
"""
return queryset.prefetch_related(name)
remote_model = queryset.model._meta.get_field(name).remote_field.model
related_queryset = remote_model.objects.all()
try:
primary_field_object = next(
object
for object in remote_model._field_objects.values()
if object['field'].primary
)
related_queryset = primary_field_object['type'].enhance_queryset(
related_queryset,
primary_field_object['field'],
primary_field_object['name']
)
except StopIteration:
# If the related model does not have a primary field then we also don't
# need to enhance the queryset.
pass
return queryset.prefetch_related(models.Prefetch(
name,
queryset=related_queryset
))
def get_serializer_field(self, instance, **kwargs):
"""
@ -331,9 +405,9 @@ class LinkRowFieldType(FieldType):
def get_response_serializer_field(self, instance, **kwargs):
"""
If a model has already been generated it will be added as a property to the
instance. If that case then we can extract the primary field from the model and
we can pass the name along to the LinkRowValueSerializer. It will be used to
include the primary field's value in the response as a string.
instance. If that is the case then we can extract the primary field from the
model and we can pass the name along to the LinkRowValueSerializer. It will
be used to include the primary field's value in the response as a string.
"""
primary_field_name = None
@ -348,7 +422,7 @@ class LinkRowFieldType(FieldType):
if primary_field:
primary_field_name = primary_field['name']
return LinkRowListSerializer(child=LinkRowValueSerializer(
return serializers.ListSerializer(child=LinkRowValueSerializer(
value_field_name=primary_field_name, required=False, **kwargs
))
@ -424,10 +498,9 @@ class LinkRowFieldType(FieldType):
if 'link_row_table' in values and isinstance(values['link_row_table'], int):
from baserow.contrib.database.table.handler import TableHandler
values['link_row_table'] = TableHandler().get_table(
user,
values['link_row_table']
)
table = TableHandler().get_table(values['link_row_table'])
table.database.group.has_user(user, raise_error=True)
values['link_row_table'] = table
return values
@ -598,17 +671,18 @@ class EmailFieldType(FieldType):
def random_value(self, instance, fake, cache):
return fake.email()
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
if connection.vendor == 'postgresql':
return r"""(
return r"""p_in = (
case
when p_in::text ~* '[A-Z0-9._+-]+@[A-Z0-9.-]+\.[A-Z]{2,}'
then p_in::text
else ''
end
)"""
);"""
return super().get_alter_column_type_function(connection, from_field, to_field)
return super().get_alter_column_prepare_new_value(connection, from_field,
to_field)
class FileFieldType(FieldType):
@ -787,7 +861,7 @@ class SingleSelectFieldType(FieldType):
)
to_field_values.pop('select_options')
def get_alter_column_prepare_value(self, connection, from_field, to_field):
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
"""
If the new field type isn't a single select field we can convert the plain
text value of the option and maybe that can be used by the new field.
@ -802,6 +876,11 @@ class SingleSelectFieldType(FieldType):
variables[variable_name] = option.value
values_mapping.append(f"('{int(option.id)}', %({variable_name})s)")
# If there are no values we don't need to convert the value to a string
# since all values will be converted to null.
if len(values_mapping) == 0:
return None
sql = f"""
p_in = (SELECT value FROM (
VALUES {','.join(values_mapping)}
@ -810,9 +889,10 @@ class SingleSelectFieldType(FieldType):
"""
return sql, variables
return super().get_alter_column_prepare_value(connection, from_field, to_field)
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
"""
If the old field wasn't a single select field we can try to match the old text
values to the new options.
@ -829,20 +909,21 @@ class SingleSelectFieldType(FieldType):
f"(lower(%({variable_name})s), '{int(option.id)}')"
)
# If there is no values we don't need to convert the value since all
# If there are no values we don't need to convert the value since all
# values should be converted to null.
if len(values_mapping) == 0:
return None
return f"""(
return f"""p_in = (
SELECT value FROM (
VALUES {','.join(values_mapping)}
) AS values (key, value)
WHERE key = lower(p_in)
)
);
""", variables
return super().get_alter_column_prepare_value(connection, from_field, to_field)
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
def get_order(self, field, field_name, view_sort):
"""
@ -864,3 +945,23 @@ class SingleSelectFieldType(FieldType):
for index, option in enumerate(options)
])
return order
def random_value(self, instance, fake, cache):
"""
Selects a random choice out of the possible options.
"""
cache_entry_name = f'field_{instance.id}_options'
if cache_entry_name not in cache:
cache[cache_entry_name] = instance.select_options.all()
select_options = cache[cache_entry_name]
# if the select_options are empty return None
if not select_options:
return None
random_choice = randint(0, len(select_options) - 1)
return select_options[random_choice]

View file

@ -5,7 +5,6 @@ from django.db import connections
from django.db.utils import ProgrammingError, DataError
from django.conf import settings
from baserow.core.exceptions import UserNotInGroupError
from baserow.core.utils import extract_allowed, set_allowed_attrs
from baserow.contrib.database.db.schema import lenient_schema_editor
from baserow.contrib.database.views.handler import ViewHandler
@ -23,12 +22,10 @@ logger = logging.getLogger(__name__)
class FieldHandler:
def get_field(self, user, field_id, field_model=None, base_queryset=None):
def get_field(self, field_id, field_model=None, base_queryset=None):
"""
Selects a field with a given id from the database.
:param user: The user on whose behalf the field is requested.
:type user: User
:param field_id: The identifier of the field that must be returned.
:type field_id: int
:param field_model: If provided that model's objects are used to select the
@ -40,7 +37,6 @@ class FieldHandler:
if this is used the `field_model` parameter doesn't work anymore.
:type base_queryset: Queryset
:raises FieldDoesNotExist: When the field with the provided id does not exist.
:raises UserNotInGroupError: When the user does not belong to the field.
:return: The requested field instance of the provided id.
:rtype: Field
"""
@ -58,10 +54,6 @@ class FieldHandler:
except Field.DoesNotExist:
raise FieldDoesNotExist(f'The field with id {field_id} does not exist.')
group = field.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
return field
def create_field(self, user, table, type_name, primary=False,
@ -84,7 +76,6 @@ class FieldHandler:
:type do_schema_change: bool
:param kwargs: The field values that need to be set upon creation.
:type kwargs: object
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises PrimaryFieldAlreadyExists: When we try to create a primary field,
but one already exists.
:return: The created field instance.
@ -92,8 +83,7 @@ class FieldHandler:
"""
group = table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
# Because only one primary field per table can exist and we have to check if one
# already exists. If so the field cannot be created and an exception is raised.
@ -146,7 +136,6 @@ class FieldHandler:
:param kwargs: The field values that need to be updated
:type kwargs: object
:raises ValueError: When the provided field is not an instance of Field.
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises CannotChangeFieldType: When the database server responds with an
error while trying to change the field type. This should rarely happen
because of the lenient schema editor, which replaces the value with null
@ -159,8 +148,7 @@ class FieldHandler:
raise ValueError('The field is not an instance of Field.')
group = field.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
old_field = deepcopy(field)
field_type = field_type_registry.get_by_model(field)
@ -233,9 +221,11 @@ class FieldHandler:
# the lenient schema editor.
with lenient_schema_editor(
connection,
old_field_type.get_alter_column_prepare_value(
old_field_type.get_alter_column_prepare_old_value(
connection, old_field, field),
field_type.get_alter_column_type_function(connection, old_field, field)
field_type.get_alter_column_prepare_new_value(
connection, old_field, field
)
) as schema_editor:
try:
schema_editor.alter_field(from_model, from_model_field,
@ -278,7 +268,6 @@ class FieldHandler:
:param field: The field instance that needs to be deleted.
:type field: Field
:raises ValueError: When the provided field is not an instance of Field.
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises CannotDeletePrimaryField: When we try to delete the primary field
which cannot be deleted.
"""
@ -287,8 +276,7 @@ class FieldHandler:
raise ValueError('The field is not an instance of Field')
group = field.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
if field.primary:
raise CannotDeletePrimaryField('Cannot delete the primary field of a '
@ -329,12 +317,10 @@ class FieldHandler:
:type field: Field
:param select_options: A list containing dicts with the desired select options.
:type select_options: list
:raises UserNotInGroupError: When the user does not belong to the related group.
"""
group = field.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
existing_select_options = field.select_options.all()

View file

@ -25,15 +25,18 @@ NUMBER_DECIMAL_PLACES_CHOICES = (
DATE_FORMAT = {
'EU': {
'name': 'European (D/M/Y)',
'format': '%d/%m/%Y'
'format': '%d/%m/%Y',
'sql': 'DD/MM/YYYY'
},
'US': {
'name': 'US (M/D/Y)',
'format': '%m/%d/%Y'
'format': '%m/%d/%Y',
'sql': 'MM/DD/YYYY'
},
'ISO': {
'name': 'ISO (Y-M-D)',
'format': '%Y-%m-%d'
'format': '%Y-%m-%d',
'sql': 'YYYY-MM-DD'
},
}
DATE_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_FORMAT.items()]
@ -41,11 +44,13 @@ DATE_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_FORMAT.items()]
DATE_TIME_FORMAT = {
'24': {
'name': '24 hour',
'format': '%H:%M'
'format': '%H:%M',
'sql': 'HH24:MI'
},
'12': {
'name': '12 hour',
'format': '%I:%M %p'
'format': '%I:%M %p',
'sql': 'HH12:MIAM'
}
}
DATE_TIME_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_TIME_FORMAT.items()]

View file

@ -189,10 +189,10 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
return None
def get_alter_column_prepare_value(self, connection, from_field, to_field):
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
"""
Can return a small SQL statement to convert the `p_in` variable to a readable
text format for the new field.
Can return an SQL statement to convert the `p_in` variable to a readable text
format for the new field.
Example: return "p_in = lower(p_in);"
@ -210,15 +210,13 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
return None
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
"""
Can optionally return a SQL function as string to convert the old field's value
when changing the field type. If None is returned no function will be
applied. The connection can be used to see which engine is used, postgresql,
mysql or sqlite.
Can return a SQL statement to convert the `p_in` variable from text to a
desired format for the new field.
Example when a string is converted to a number, the function could be:
REGEXP_REPLACE(p_in, '[^0-9]', '', 'g') which would remove all non numeric
Example when a string is converted to a number, to statement could be:
`REGEXP_REPLACE(p_in, '[^0-9]', '', 'g')` which would remove all non numeric
characters. The p_in variable is the old value as a string.
:param connection: The used connection. This can for example be used to check
@ -228,7 +226,8 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
:type to_field: Field
:param to_field: The new field instance.
:type to_field: Field
:return: The SQL function to convert the value.
:return: The SQL statement converting the old text value into the correct
format.
:rtype: None or str
"""

View file

@ -7,7 +7,6 @@ from django.db.models import Max, F, Q
from django.db.models.fields.related import ManyToManyField
from django.conf import settings
from baserow.core.exceptions import UserNotInGroupError
from baserow.contrib.database.fields.models import Field
from .exceptions import RowDoesNotExist
@ -155,7 +154,6 @@ class RowHandler:
:param model: If the correct model has already been generated it can be
provided so that it does not have to be generated for a second time.
:type model: Model
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises RowDoesNotExist: When the row with the provided id does not exist.
:return: The requested row instance.
:rtype: Model
@ -165,8 +163,7 @@ class RowHandler:
model = table.get_model()
group = table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
try:
row = model.objects.get(id=row_id)
@ -192,7 +189,6 @@ class RowHandler:
:param before: If provided the new row will be placed right before that row
instance.
:type before: Table
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The created row instance.
:rtype: Model
"""
@ -201,8 +197,7 @@ class RowHandler:
values = {}
group = table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
if not model:
model = table.get_model()
@ -255,15 +250,13 @@ class RowHandler:
:param model: If the correct model has already been generated it can be
provided so that it does not have to be generated for a second time.
:type model: Model
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises RowDoesNotExist: When the row with the provided id does not exist.
:return: The updated row instance.
:rtype: Model
"""
group = table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
if not model:
model = table.get_model()
@ -302,13 +295,11 @@ class RowHandler:
:type table: Table
:param row_id: The id of the row that must be deleted.
:type row_id: int
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises RowDoesNotExist: When the row with the provided id does not exist.
"""
group = table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
model = table.get_model(field_ids=[])

View file

@ -1,7 +1,6 @@
from django.db import connections
from django.conf import settings
from baserow.core.exceptions import UserNotInGroupError
from baserow.core.utils import extract_allowed, set_allowed_attrs
from baserow.contrib.database.fields.models import TextField
from baserow.contrib.database.views.handler import ViewHandler
@ -19,19 +18,16 @@ from .signals import table_created, table_updated, table_deleted
class TableHandler:
def get_table(self, user, table_id, base_queryset=None):
def get_table(self, table_id, base_queryset=None):
"""
Selects a table with a given id from the database.
:param user: The user on whose behalf the table is requested.
:type user: User
:param table_id: The identifier of the table that must be returned.
:type table_id: int
:param base_queryset: The base queryset from where to select the table
object from. This can for example be used to do a `select_related`.
:type base_queryset: Queryset
:raises TableDoesNotExist: When the table with the provided id does not exist.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The requested table of the provided id.
:rtype: Table
"""
@ -44,10 +40,6 @@ class TableHandler:
except Table.DoesNotExist:
raise TableDoesNotExist(f'The table with id {table_id} doe not exist.')
group = table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
return table
def create_table(self, user, database, fill_example=False, data=None,
@ -71,13 +63,11 @@ class TableHandler:
:type first_row_header: bool
:param kwargs: The fields that need to be set upon creation.
:type kwargs: object
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The created table instance.
:rtype: Table
"""
if not database.group.has_user(user):
raise UserNotInGroupError(user, database.group)
database.group.has_user(user, raise_error=True)
if data is not None:
fields, data = self.normalize_initial_table_data(data, first_row_header)
@ -233,7 +223,6 @@ class TableHandler:
:param kwargs: The fields that need to be updated.
:type kwargs: object
:raises ValueError: When the provided table is not an instance of Table.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The updated table instance.
:rtype: Table
"""
@ -241,8 +230,7 @@ class TableHandler:
if not isinstance(table, Table):
raise ValueError('The table is not an instance of Table')
if not table.database.group.has_user(user):
raise UserNotInGroupError(user, table.database.group)
table.database.group.has_user(user, raise_error=True)
table = set_allowed_attrs(kwargs, ['name'], table)
table.save()
@ -260,15 +248,12 @@ class TableHandler:
:param table: The table instance that needs to be deleted.
:type table: Table
:raises ValueError: When the provided table is not an instance of Table.
:raises UserNotInGroupError: When the user does not belong to the related group.
"""
if not isinstance(table, Table):
raise ValueError('The table is not an instance of Table')
if not table.database.group.has_user(user):
raise UserNotInGroupError(user, table.database.group)
table.database.group.has_user(user, raise_error=True)
table_id = table.id
# Delete the table schema from the database.

View file

@ -201,6 +201,13 @@ class TableModelQuerySet(models.QuerySet):
model_field
)
view_filter_annotation = view_filter_type.get_annotation(
field_name,
value
)
if view_filter_annotation:
self = self.annotate(**view_filter_annotation)
# Depending on filter type we are going to combine the Q either as
# AND or as OR.
if filter_type == FILTER_TYPE_AND:

View file

@ -3,7 +3,6 @@ from django.utils import timezone
from rest_framework.request import Request
from baserow.core.exceptions import UserNotInGroupError
from baserow.core.utils import random_string
from baserow.contrib.database.models import Database, Table
from baserow.contrib.database.exceptions import DatabaseDoesNotBelongToGroup
@ -49,8 +48,6 @@ class TokenHandler:
:type base_queryset: Queryset
:raises TokenDoesNotExist: Raised when the requested token was not found or
if it does not belong to the user.
:raises UserNotInGroupError: When the user does not belong to the group
anymore.
:return: The fetched token.
:rtype: Token
"""
@ -67,8 +64,7 @@ class TokenHandler:
raise TokenDoesNotExist(f'The token with id {token_id} does not exist.')
group = token.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
return token
@ -112,13 +108,11 @@ class TokenHandler:
:type group: Group
:param name: The name of the token.
:type name: str
:raises UserNotInGroupError: If the user does not belong to the group.
:return: The created token instance.
:rtype: Token
"""
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
token = Token.objects.create(
name=name,

View file

@ -1,6 +1,5 @@
from django.db.models import Q, F
from baserow.core.exceptions import UserNotInGroupError
from baserow.core.utils import extract_allowed, set_allowed_attrs
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.fields.models import Field
@ -23,13 +22,11 @@ from .signals import (
class ViewHandler:
def get_view(self, user, view_id, view_model=None, base_queryset=None):
def get_view(self, view_id, view_model=None, base_queryset=None):
"""
Selects a view and checks if the user has access to that view. If everything
is fine the view is returned.
:param user: The user on whose behalf the view is requested.
:type user: User
:param view_id: The identifier of the view that must be returned.
:type view_id: int
:param view_model: If provided that models objects are used to select the
@ -41,7 +38,6 @@ class ViewHandler:
if this is used the `view_model` parameter doesn't work anymore.
:type base_queryset: Queryset
:raises ViewDoesNotExist: When the view with the provided id does not exist.
:raises UserNotInGroupError: When the user does not belong to the related group.
:type view_model: View
:return:
"""
@ -59,10 +55,6 @@ class ViewHandler:
except View.DoesNotExist:
raise ViewDoesNotExist(f'The view with id {view_id} does not exist.')
group = view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
return view
def create_view(self, user, table, type_name, **kwargs):
@ -77,14 +69,12 @@ class ViewHandler:
:type type_name: str
:param kwargs: The fields that need to be set upon creation.
:type kwargs: object
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The created view instance.
:rtype: View
"""
group = table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
# Figure out which model to use for the given view type.
view_type = view_type_registry.get(type_name)
@ -116,7 +106,6 @@ class ViewHandler:
:param kwargs: The fields that need to be updated.
:type kwargs: object
:raises ValueError: When the provided view not an instance of View.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The updated view instance.
:rtype: View
"""
@ -125,8 +114,7 @@ class ViewHandler:
raise ValueError('The view is not an instance of View.')
group = view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
view_type = view_type_registry.get_by_model(view)
allowed_fields = [
@ -150,15 +138,13 @@ class ViewHandler:
:param view: The view instance that needs to be deleted.
:type view: View
:raises ViewDoesNotExist: When the view with the provided id does not exist.
:raises UserNotInGroupError: When the user does not belong to the related group.
"""
if not isinstance(view, View):
raise ValueError('The view is not an instance of View')
group = view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
view_id = view.id
view.delete()
@ -185,6 +171,8 @@ class ViewHandler:
provided view.
"""
grid_view.table.database.group.has_user(user, raise_error=True)
if not fields:
fields = Field.objects.filter(table=grid_view.table)
@ -266,6 +254,13 @@ class ViewHandler:
model_field
)
view_filter_annotation = view_filter_type.get_annotation(
field_name,
view_filter.value
)
if view_filter_annotation:
queryset = queryset.annotate(**view_filter_annotation)
# Depending on filter type we are going to combine the Q either as AND or
# as OR.
if view.filter_type == FILTER_TYPE_AND:
@ -289,7 +284,6 @@ class ViewHandler:
object. This can for example be used to do a `select_related`.
:type base_queryset: Queryset
:raises ViewFilterDoesNotExist: The the requested view does not exists.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The requested view filter instance.
:type: ViewFilter
"""
@ -309,8 +303,7 @@ class ViewHandler:
)
group = view_filter.view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
return view_filter
@ -330,7 +323,6 @@ class ViewHandler:
:type type_name: str
:param value: The value that the filter must apply to.
:type value: str
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises ViewFilterNotSupported: When the provided view does not support
filtering.
:raises ViewFilterTypeNotAllowedForField: When the field does not support the
@ -342,8 +334,7 @@ class ViewHandler:
"""
group = view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
# Check if view supports filtering
view_type = view_type_registry.get_by_model(view.specific_class)
@ -388,7 +379,6 @@ class ViewHandler:
:param kwargs: The values that need to be updated, allowed values are
`field`, `value` and `type_name`.
:type kwargs: dict
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises ViewFilterTypeNotAllowedForField: When the field does not supports the
filter type.
:raises FieldNotInTable: When the provided field does not belong to the
@ -398,8 +388,7 @@ class ViewHandler:
"""
group = view_filter.view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
type_name = kwargs.get('type_name', view_filter.type)
field = kwargs.get('field', view_filter.field)
@ -439,12 +428,10 @@ class ViewHandler:
:type user: User
:param view_filter: The view filter instance that needs to be deleted.
:type view_filter: ViewFilter
:raises UserNotInGroupError: When the user does not belong to the related group.
"""
group = view_filter.view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
view_filter_id = view_filter.id
view_filter.delete()
@ -530,7 +517,6 @@ class ViewHandler:
object from. This can for example be used to do a `select_related`.
:type base_queryset: Queryset
:raises ViewSortDoesNotExist: The the requested view does not exists.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The requested view sort instance.
:type: ViewSort
"""
@ -550,8 +536,7 @@ class ViewHandler:
)
group = view_sort.view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
return view_sort
@ -568,7 +553,6 @@ class ViewHandler:
:param order: The desired order, can either be ascending (A to Z) or
descending (Z to A).
:type order: str
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises ViewSortNotSupported: When the provided view does not support sorting.
:raises FieldNotInTable: When the provided field does not belong to the
provided view's table.
@ -577,8 +561,7 @@ class ViewHandler:
"""
group = view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
# Check if view supports sorting.
view_type = view_type_registry.get_by_model(view.specific_class)
@ -624,15 +607,13 @@ class ViewHandler:
:param kwargs: The values that need to be updated, allowed values are
`field` and `order`.
:type kwargs: dict
:raises UserNotInGroupError: When the user does not belong to the related group.
:raises FieldNotInTable: When the field does not support sorting.
:return: The updated view sort instance.
:rtype: ViewSort
"""
group = view_sort.view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
field = kwargs.get('field', view_sort.field)
order = kwargs.get('order', view_sort.order)
@ -680,12 +661,10 @@ class ViewHandler:
:type user: User
:param view_sort: The view sort instance that needs to be deleted.
:type view_sort: ViewSort
:raises UserNotInGroupError: When the user does not belong to the related group.
"""
group = view_sort.view.table.database.group
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
view_sort_id = view_sort.id
view_sort.delete()

View file

@ -120,6 +120,25 @@ class ViewFilterType(Instance):
raise NotImplementedError('Each must have his own get_filter method.')
def get_annotation(self, field_name, value):
"""
Optional method allowing this ViewFilterType to annotate the queryset prior to
the application of any Q filters returned by ViewFilterType.get_filter.
Should return a dictionary which can be unpacked into an annotate call or None
if you do not wish any annotation to be applied by your filter.
:param field_name: The name of the field that needs to be filtered.
:type field_name: str
:param value: The value that the field must be compared to.
:type value: str
:return: The dict object that will be unpacked into an annotate call or None if
no annotation needs to be done.
:rtype: None or dict
"""
return None
class ViewFilterTypeRegistry(Registry):
"""

View file

@ -6,6 +6,7 @@ from dateutil import parser
from dateutil.parser import ParserError
from django.db.models import Q, IntegerField, BooleanField
from django.db.models.expressions import RawSQL
from django.db.models.fields.related import ManyToManyField, ForeignKey
from django.contrib.postgres.fields import JSONField
@ -62,6 +63,58 @@ class NotEqualViewFilterType(NotViewFilterTypeMixin, EqualViewFilterType):
type = 'not_equal'
class FilenameContainsViewFilterType(ViewFilterType):
"""
The filename contains filter checks if the filename's visible name contains the
provided filter value. It is only compatible with fields.JSONField which contain
a list of File JSON Objects.
"""
type = 'filename_contains'
compatible_field_types = [
FileFieldType.type
]
def get_annotation(self, field_name, value):
value = value.strip()
# If an empty value has been provided we do not want to filter at all.
if value == '':
return None
# It is not possible to use Django's ORM to query for if one item in a JSONB
# list has has a key which contains a specified value.
#
# The closest thing the Django ORM provides is:
# queryset.filter(your_json_field__contains=[{"key":"value"}])
# However this is an exact match, so in the above example [{"key":"value_etc"}]
# would not match the filter.
#
# Instead we have to resort to RawSQL to use various built in PostgreSQL JSON
# Array manipulation functions to be able to 'iterate' over a JSONB list
# performing `like` on individual keys in said list.
num_files_with_name_like_value = f"""
EXISTS(
SELECT attached_files ->> 'visible_name'
FROM JSONB_ARRAY_ELEMENTS("{field_name}") as attached_files
WHERE UPPER(attached_files ->> 'visible_name') LIKE UPPER(%s)
)
"""
query = RawSQL(num_files_with_name_like_value, params=[f"%{value}%"],
output_field=BooleanField())
return {f"{field_name}_matches_visible_names": query}
def get_filter(self, field_name, value, model_field):
value = value.strip()
# If an empty value has been provided we do not want to filter at all.
if value == '':
return Q()
# Check if the model_field has a file which matches the provided filter value.
return Q(**{f'{field_name}_matches_visible_names': True})
class ContainsViewFilterType(ViewFilterType):
"""
The contains filter checks if the field value contains the provided filter value.

View file

@ -20,7 +20,8 @@ class TablePageType(PageType):
try:
handler = TableHandler()
handler.get_table(user, table_id)
table = handler.get_table(table_id)
table.database.group.has_user(user, raise_error=True)
except (UserNotInGroupError, TableDoesNotExist):
return False

View file

@ -1,3 +1,10 @@
class IsNotAdminError(Exception):
"""
Raised when the user tries to perform an action that is not allowed because he
does not have admin permissions.
"""
class UserNotInGroupError(Exception):
"""Raised when the user doesn't have access to the related group."""

View file

@ -6,13 +6,13 @@ from django.conf import settings
from baserow.core.user.utils import normalize_email_address
from .models import (
Group, GroupUser, GroupInvitation, Application, GROUP_USER_PERMISSION_CHOICES,
GROUP_USER_PERMISSION_ADMIN
Settings, Group, GroupUser, GroupInvitation, Application,
GROUP_USER_PERMISSION_CHOICES, GROUP_USER_PERMISSION_ADMIN
)
from .exceptions import (
GroupDoesNotExist, ApplicationDoesNotExist, BaseURLHostnameNotAllowed,
UserNotInGroupError, GroupInvitationEmailMismatch, GroupInvitationDoesNotExist,
GroupUserDoesNotExist, GroupUserAlreadyExists
GroupInvitationEmailMismatch, GroupInvitationDoesNotExist, GroupUserDoesNotExist,
GroupUserAlreadyExists, IsNotAdminError
)
from .utils import extract_allowed, set_allowed_attrs
from .registries import application_type_registry
@ -24,6 +24,46 @@ from .emails import GroupInvitationEmail
class CoreHandler:
def get_settings(self):
"""
Returns a settings model instance containing all the admin configured settings.
:return: The settings instance.
:rtype: Settings
"""
try:
return Settings.objects.all()[:1].get()
except Settings.DoesNotExist:
return Settings.objects.create()
def update_settings(self, user, settings_instance=None, **kwargs):
"""
Updates one or more setting values if the user has staff permissions.
:param user: The user on whose behalf the settings are updated.
:type user: User
:param settings_instance: If already fetched, the settings instance can be
provided to avoid fetching the values for a second time.
:type settings_instance: Settings
:param kwargs: An dict containing the settings that need to be updated.
:type kwargs: dict
:return: The update settings instance.
:rtype: Settings
"""
if not user.is_staff:
raise IsNotAdminError(user)
if not settings_instance:
settings_instance = self.get_settings()
for name, value in kwargs.items():
setattr(settings_instance, name, value)
settings_instance.save()
return settings_instance
def get_group(self, group_id, base_queryset=None):
"""
Selects a group with a given id from the database.
@ -34,7 +74,6 @@ class CoreHandler:
object. This can for example be used to do a `prefetch_related`.
:type base_queryset: Queryset
:raises GroupDoesNotExist: When the group with the provided id does not exist.
:raises UserNotInGroupError: When the user does not belong to the group.
:return: The requested group instance of the provided id.
:rtype: Group
"""
@ -299,7 +338,7 @@ class CoreHandler:
return group_invitation
def get_group_invitation(self, user, group_invitation_id, base_queryset=None):
def get_group_invitation(self, group_invitation_id, base_queryset=None):
"""
Selects a group invitation with a given id from the database.
@ -326,8 +365,6 @@ class CoreHandler:
f'The group invitation with id {group_invitation_id} does not exist.'
)
group_invitation.group.has_user(user, 'ADMIN', raise_error=True)
return group_invitation
def create_group_invitation(self, user, group, email, permissions, message,
@ -487,7 +524,7 @@ class CoreHandler:
return group_user
def get_application(self, user, application_id, base_queryset=None):
def get_application(self, application_id, base_queryset=None):
"""
Selects an application with a given id from the database.
@ -500,7 +537,6 @@ class CoreHandler:
:type base_queryset: Queryset
:raises ApplicationDoesNotExist: When the application with the provided id
does not exist.
:raises UserNotInGroupError: When the user does not belong to the group.
:return: The requested application instance of the provided id.
:rtype: Application
"""
@ -517,9 +553,6 @@ class CoreHandler:
f'The application with id {application_id} does not exist.'
)
if not application.group.has_user(user):
raise UserNotInGroupError(user, application.group)
return application
def create_application(self, user, group, type_name, **kwargs):
@ -535,13 +568,11 @@ class CoreHandler:
:type type_name: str
:param kwargs: The fields that need to be set upon creation.
:type kwargs: object
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The created application instance.
:rtype: Application
"""
if not group.has_user(user):
raise UserNotInGroupError(user, group)
group.has_user(user, raise_error=True)
# Figure out which model is used for the given application type.
application_type = application_type_registry.get(type_name)
@ -568,7 +599,6 @@ class CoreHandler:
:param kwargs: The fields that need to be updated.
:type kwargs: object
:raises ValueError: If one of the provided parameters is invalid.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The updated application instance.
:rtype: Application
"""
@ -576,8 +606,7 @@ class CoreHandler:
if not isinstance(application, Application):
raise ValueError('The application is not an instance of Application.')
if not application.group.has_user(user):
raise UserNotInGroupError(user, application.group)
application.group.has_user(user, raise_error=True)
application = set_allowed_attrs(kwargs, ['name'], application)
application.save()
@ -595,14 +624,12 @@ class CoreHandler:
:param application: The application instance that needs to be deleted.
:type application: Application
:raises ValueError: If one of the provided parameters is invalid.
:raises UserNotInGroupError: When the user does not belong to the related group.
"""
if not isinstance(application, Application):
raise ValueError('The application is not an instance of Application')
if not application.group.has_user(user):
raise UserNotInGroupError(user, application.group)
application.group.has_user(user, raise_error=True)
application_id = application.id
application = application.specific

View file

@ -0,0 +1,29 @@
# Generated by Django 2.2.11 on 2021-02-15 13:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20210126_1950'),
]
operations = [
migrations.CreateModel(
name='Settings',
fields=[
('id', models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID'
)),
('allow_new_signups', models.BooleanField(
default=True,
help_text='Indicates whether new users can create a new account '
'when signing up.'
)),
],
),
]

View file

@ -31,6 +31,19 @@ def get_default_application_content_type():
return ContentType.objects.get_for_model(Application)
class Settings(models.Model):
"""
The settings model represents the application wide settings that only admins can
change. This table can only contain a single row.
"""
allow_new_signups = models.BooleanField(
default=True,
help_text='Indicates whether new users can create a new account when signing '
'up.'
)
class Group(CreatedAndUpdatedOnMixin, models.Model):
name = models.CharField(max_length=100)
users = models.ManyToManyField(User, through='GroupUser')

View file

@ -8,3 +8,9 @@ class UserAlreadyExist(Exception):
class InvalidPassword(Exception):
"""Raised when the provided password is incorrect."""
class DisabledSignupError(Exception):
"""
Raised when a user account is created when the new signup setting is disabled.
"""

View file

@ -12,7 +12,9 @@ from baserow.core.exceptions import (
)
from baserow.core.exceptions import GroupInvitationEmailMismatch
from .exceptions import UserAlreadyExist, UserNotFound, InvalidPassword
from .exceptions import (
UserAlreadyExist, UserNotFound, InvalidPassword, DisabledSignupError
)
from .emails import ResetPasswordEmail
from .utils import normalize_email_address
@ -72,10 +74,14 @@ class UserHandler:
already exists.
:raises GroupInvitationEmailMismatch: If the group invitation email does not
match the one of the user.
:raises SignupDisabledError: If signing up is disabled.
:return: The user object.
:rtype: User
"""
if not CoreHandler().get_settings().allow_new_signups:
raise DisabledSignupError('Sign up is disabled.')
email = normalize_email_address(email)
if User.objects.filter(Q(email=email) | Q(username=email)).exists():

View file

@ -12,7 +12,7 @@ from django.conf import settings
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from baserow.core.utils import sha256_hash, stream_size, random_string
from baserow.core.utils import sha256_hash, stream_size, random_string, truncate_middle
from .exceptions import (
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached,
@ -171,6 +171,7 @@ class UserFileHandler:
storage = storage or default_storage
hash = sha256_hash(stream)
file_name = truncate_middle(file_name, 64)
try:
return UserFile.objects.get(original_name=file_name, sha256_hash=hash)

View file

@ -3,6 +3,7 @@ import re
import random
import string
import hashlib
import math
from collections import namedtuple
@ -213,3 +214,38 @@ def stream_size(stream):
size = stream.tell()
stream.seek(0)
return size
def truncate_middle(content, max_length, middle='...'):
"""
Truncates the middle part of the string if the total length if too long.
For example:
truncate_middle('testabcdecho', 8) == 'tes...ho'
:param content: The string that must be truncated.
:type: str
:param max_length: The maximum amount of characters the string can have.
:type max_length: int
:param middle: The part that must be added in the middle if the provided
content is too long.
:type middle str
:return: The truncated string.
:rtype: str
"""
if len(content) <= max_length:
return content
if max_length <= len(middle):
raise ValueError('The max_length cannot be lower than the length if the '
'middle string.')
total = max_length - len(middle)
start = math.ceil(total / 2)
end = math.floor(total / 2)
left = content[:start]
right = content[-end:] if end else ''
return f'{left}{middle}{right}'

View file

@ -139,5 +139,5 @@ class CoreConsumer(AsyncJsonWebsocketConsumer):
await self.send_json(payload)
async def disconnect(self, message):
self.discard_current_page()
await self.discard_current_page()
await self.channel_layer.group_discard('users', self.channel_name)

View file

@ -0,0 +1,75 @@
import pytest
from rest_framework.status import (
HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN
)
from django.shortcuts import reverse
from baserow.core.models import Settings
from baserow.core.handler import CoreHandler
@pytest.mark.django_db
def test_get_settings(api_client):
response = api_client.get(reverse('api:settings:get'))
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert response_json['allow_new_signups'] is True
settings = Settings.objects.first()
settings.allow_new_signups = False
settings.save()
response = api_client.get(reverse('api:settings:get'))
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert response_json['allow_new_signups'] is False
@pytest.mark.django_db
def test_update_settings(api_client, data_fixture):
user, token = data_fixture.create_user_and_token(is_staff=True)
user_2, token_2 = data_fixture.create_user_and_token()
response = api_client.patch(
reverse('api:settings:update'),
{'allow_new_signups': False},
format='json',
HTTP_AUTHORIZATION=f'JWT {token_2}'
)
assert response.status_code == HTTP_403_FORBIDDEN
assert CoreHandler().get_settings().allow_new_signups is True
response = api_client.patch(
reverse('api:settings:update'),
{'allow_new_signups': {}},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
response_json = response.json()
assert response_json['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert response_json['detail']['allow_new_signups'][0]['code'] == 'invalid'
response = api_client.patch(
reverse('api:settings:update'),
{'allow_new_signups': False},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert response_json['allow_new_signups'] is False
assert CoreHandler().get_settings().allow_new_signups is False
response = api_client.patch(
reverse('api:settings:update'),
{},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert response_json['allow_new_signups'] is False
assert CoreHandler().get_settings().allow_new_signups is False

View file

@ -53,6 +53,7 @@ def test_token_auth(api_client, data_fixture):
assert 'user' in json
assert json['user']['username'] == 'test@test.nl'
assert json['user']['first_name'] == 'Test1'
assert json['user']['is_staff'] is False
user.refresh_from_db()
assert user.last_login == datetime(2020, 1, 1, 12, 00, tzinfo=timezone('UTC'))
@ -68,6 +69,7 @@ def test_token_auth(api_client, data_fixture):
assert 'user' in json
assert json['user']['username'] == 'test@test.nl'
assert json['user']['first_name'] == 'Test1'
assert json['user']['is_staff'] is False
user.refresh_from_db()
assert user.last_login == datetime(2020, 1, 2, 12, 00, tzinfo=timezone('UTC'))
@ -85,7 +87,12 @@ def test_token_refresh(api_client, data_fixture):
response = api_client.post(reverse('api:user:token_refresh'),
{'token': token}, format='json')
assert response.status_code == HTTP_200_OK
assert 'token' in response.json()
json = response.json()
assert 'token' in json
assert 'user' in json
assert json['user']['username'] == 'test@test.nl'
assert json['user']['first_name'] == 'Test1'
assert json['user']['is_staff'] is False
with patch('rest_framework_jwt.utils.datetime') as mock_datetime:
mock_datetime.utcnow.return_value = datetime(2019, 1, 1, 1, 1, 1, 0)

View file

@ -16,7 +16,7 @@ User = get_user_model()
@pytest.mark.django_db
def test_create_user(client):
def test_create_user(client, data_fixture):
response = client.post(reverse('api:user:index'), {
'name': 'Test1',
'email': 'test@test.nl',
@ -31,6 +31,7 @@ def test_create_user(client):
assert 'password' not in response_json['user']
assert response_json['user']['username'] == 'test@test.nl'
assert response_json['user']['first_name'] == 'Test1'
assert response_json['user']['is_staff'] is False
response_failed = client.post(reverse('api:user:index'), {
'name': 'Test1',
@ -48,6 +49,16 @@ def test_create_user(client):
assert response_failed.status_code == 400
assert response_failed.json()['error'] == 'ERROR_EMAIL_ALREADY_EXISTS'
data_fixture.update_settings(allow_new_signups=False)
response_failed = client.post(reverse('api:user:index'), {
'name': 'Test1',
'email': 'test10@test.nl',
'password': 'test12'
}, format='json')
assert response_failed.status_code == 400
assert response_failed.json()['error'] == 'ERROR_DISABLED_SIGNUP'
data_fixture.update_settings(allow_new_signups=True)
response_failed_2 = client.post(reverse('api:user:index'), {
'email': 'test'
}, format='json')

View file

@ -24,18 +24,20 @@ def test_lenient_schema_editor():
with lenient_schema_editor(connection) as schema_editor:
assert isinstance(schema_editor, PostgresqlLenientDatabaseSchemaEditor)
assert isinstance(schema_editor, BaseDatabaseSchemaEditor)
assert schema_editor.alter_column_prepare_value == ''
assert schema_editor.alert_column_type_function == 'p_in'
assert schema_editor.alter_column_prepare_old_value == ''
assert schema_editor.alter_column_prepare_new_value == ''
assert connection.SchemaEditorClass != PostgresqlDatabaseSchemaEditor
assert connection.SchemaEditorClass == PostgresqlDatabaseSchemaEditor
with lenient_schema_editor(
connection,
'p_in = p_in;',
"REGEXP_REPLACE(p_in, 'test', '', 'g')"
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');",
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');"
) as schema_editor:
assert schema_editor.alter_column_prepare_value == "p_in = p_in;"
assert schema_editor.alert_column_type_function == (
"REGEXP_REPLACE(p_in, 'test', '', 'g')"
assert schema_editor.alter_column_prepare_old_value == (
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');"
)
assert schema_editor.alter_column_prepare_new_value == (
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');"
)

View file

@ -0,0 +1,363 @@
import pytest
from pytz import timezone
from datetime import date
from django.core.exceptions import ValidationError
from django.utils.timezone import make_aware, datetime
from baserow.contrib.database.fields.field_types import DateFieldType
from baserow.contrib.database.fields.models import DateField
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.rows.handler import RowHandler
@pytest.mark.django_db
def test_date_field_type_prepare_value(data_fixture):
d = DateFieldType()
f = data_fixture.create_date_field(date_include_time=True)
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('UTC')
expected_date = make_aware(datetime(2020, 4, 10, 0, 0, 0), utc)
expected_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
with pytest.raises(ValidationError):
assert d.prepare_value_for_db(f, 'TEST')
assert d.prepare_value_for_db(f, None) is None
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = datetime(2020, 4, 10, 12, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_date = date(2020, 4, 10)
assert d.prepare_value_for_db(f, unprepared_date) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_datetime
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_datetime
f = data_fixture.create_date_field(date_include_time=False)
expected_date = date(2020, 4, 10)
unprepared_datetime = datetime(2020, 4, 10, 14, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_date
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_date
@pytest.mark.django_db
def test_date_field_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field_handler = FieldHandler()
row_handler = RowHandler()
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('utc')
date_field_1 = field_handler.create_field(user=user, table=table, type_name='date',
name='Date')
date_field_2 = field_handler.create_field(user=user, table=table, type_name='date',
name='Datetime', date_include_time=True)
assert date_field_1.date_include_time is False
assert date_field_2.date_include_time is True
assert len(DateField.objects.all()) == 2
model = table.get_model(attribute_names=True)
row = row_handler.create_row(user=user, table=table, values={}, model=model)
assert row.date is None
assert row.datetime is None
row = row_handler.create_row(user=user, table=table, values={
'date': '2020-4-1',
'datetime': '2020-4-1 12:30:30'
}, model=model)
row.refresh_from_db()
assert row.date == date(2020, 4, 1)
assert row.datetime == datetime(2020, 4, 1, 12, 30, 30, tzinfo=utc)
row = row_handler.create_row(user=user, table=table, values={
'datetime': make_aware(datetime(2020, 4, 1, 12, 30, 30), amsterdam)
}, model=model)
row.refresh_from_db()
assert row.date is None
assert row.datetime == datetime(2020, 4, 1, 10, 30, 30, tzinfo=timezone('UTC'))
date_field_1 = field_handler.update_field(user=user, field=date_field_1,
date_include_time=True)
date_field_2 = field_handler.update_field(user=user, field=date_field_2,
date_include_time=False)
assert date_field_1.date_include_time is True
assert date_field_2.date_include_time is False
model = table.get_model(attribute_names=True)
rows = model.objects.all()
assert rows[0].date is None
assert rows[0].datetime is None
assert rows[1].date == datetime(2020, 4, 1, tzinfo=timezone('UTC'))
assert rows[1].datetime == date(2020, 4, 1)
assert rows[2].date is None
assert rows[2].datetime == date(2020, 4, 1)
field_handler.delete_field(user=user, field=date_field_1)
field_handler.delete_field(user=user, field=date_field_2)
assert len(DateField.objects.all()) == 0
@pytest.mark.django_db
def test_converting_date_field_value(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field_handler = FieldHandler()
row_handler = RowHandler()
utc = timezone('utc')
date_field_eu = data_fixture.create_text_field(table=table)
date_field_us = data_fixture.create_text_field(table=table)
date_field_iso = data_fixture.create_text_field(table=table)
date_field_eu_12 = data_fixture.create_text_field(table=table)
date_field_us_12 = data_fixture.create_text_field(table=table)
date_field_iso_12 = data_fixture.create_text_field(table=table)
date_field_eu_24 = data_fixture.create_text_field(table=table)
date_field_us_24 = data_fixture.create_text_field(table=table)
date_field_iso_24 = data_fixture.create_text_field(table=table)
model = table.get_model()
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '22/07/2021',
f'field_{date_field_us.id}': '07/22/2021',
f'field_{date_field_iso.id}': '2021-07-22',
f'field_{date_field_eu_12.id}': '22/07/2021 12:45 PM',
f'field_{date_field_us_12.id}': '07/22/2021 12:45 PM',
f'field_{date_field_iso_12.id}': '2021-07-22 12:45 PM',
f'field_{date_field_eu_24.id}': '22/07/2021 12:45',
f'field_{date_field_us_24.id}': '07/22/2021 12:45',
f'field_{date_field_iso_24.id}': '2021-07-22 12:45',
})
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '22-7-2021',
f'field_{date_field_us.id}': '7-22-2021',
f'field_{date_field_iso.id}': '2021/7/22',
f'field_{date_field_eu_12.id}': '22-7-2021 12:45am',
f'field_{date_field_us_12.id}': '7-22-2021 12:45am',
f'field_{date_field_iso_12.id}': '2021/7/22 12:45am',
f'field_{date_field_eu_24.id}': '22-7-2021 7:45',
f'field_{date_field_us_24.id}': '7-22-2021 7:45',
f'field_{date_field_iso_24.id}': '2021/7/22 7:45',
})
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '22/07/2021 12:00',
f'field_{date_field_us.id}': '07/22/2021 12:00am',
f'field_{date_field_iso.id}': '2021-07-22 12:00 PM',
f'field_{date_field_eu_12.id}': 'INVALID',
f'field_{date_field_us_12.id}': '2222-2222-2222',
f'field_{date_field_iso_12.id}': 'x-7--1',
f'field_{date_field_eu_24.id}': '22-7-2021 7:45:12',
f'field_{date_field_us_24.id}': '7-22-2021 7:45:23',
f'field_{date_field_iso_24.id}': '2021/7/22 7:45:70'
})
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '2018-08-20T13:20:10',
f'field_{date_field_us.id}': '2017 Mar 03 05:12:41.211',
f'field_{date_field_iso.id}': '19/Apr/2017:06:36:15',
f'field_{date_field_eu_12.id}': 'Dec 2, 2017 2:39:58 AM',
f'field_{date_field_us_12.id}': 'Jun 09 2018 15:28:14',
f'field_{date_field_iso_12.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_eu_24.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_us_24.id}': '2018-02-27 15:35:20.311',
f'field_{date_field_iso_24.id}': '10-04-19 12:00:17'
})
date_field_eu = field_handler.update_field(
user=user, field=date_field_eu, new_type_name='date', date_format='EU'
)
date_field_us = field_handler.update_field(
user=user, field=date_field_us, new_type_name='date', date_format='US'
)
date_field_iso = field_handler.update_field(
user=user, field=date_field_iso, new_type_name='date', date_format='ISO'
)
date_field_eu_12 = field_handler.update_field(
user=user, field=date_field_eu_12, new_type_name='date', date_format='EU',
date_include_time=True, date_time_format='12'
)
date_field_us_12 = field_handler.update_field(
user=user, field=date_field_us_12, new_type_name='date', date_format='US',
date_include_time=True, date_time_format='12'
)
date_field_iso_12 = field_handler.update_field(
user=user, field=date_field_iso_12, new_type_name='date', date_format='ISO',
date_include_time=True, date_time_format='12'
)
date_field_eu_24 = field_handler.update_field(
user=user, field=date_field_eu_24, new_type_name='date', date_format='EU',
date_include_time=True, date_time_format='24'
)
date_field_us_24 = field_handler.update_field(
user=user, field=date_field_us_24, new_type_name='date', date_format='US',
date_include_time=True, date_time_format='24'
)
date_field_iso_24 = field_handler.update_field(
user=user, field=date_field_iso_24, new_type_name='date', date_format='ISO',
date_include_time=True, date_time_format='24'
)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{date_field_eu.id}') == date(2021, 7, 22)
assert getattr(rows[0], f'field_{date_field_us.id}') == date(2021, 7, 22)
assert getattr(rows[0], f'field_{date_field_iso.id}') == date(2021, 7, 22)
assert getattr(rows[0], f'field_{date_field_eu_12.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_us_12.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_iso_12.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_eu_24.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_us_24.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_iso_24.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_eu.id}') == date(2021, 7, 22)
assert getattr(rows[1], f'field_{date_field_us.id}') == date(2021, 7, 22)
assert getattr(rows[1], f'field_{date_field_iso.id}') == date(2021, 7, 22)
assert getattr(rows[1], f'field_{date_field_eu_12.id}') == (
datetime(2021, 7, 22, 0, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_us_12.id}') == (
datetime(2021, 7, 22, 0, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_iso_12.id}') == (
datetime(2021, 7, 22, 0, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_eu_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_us_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_iso_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[2], f'field_{date_field_eu.id}') == date(2021, 7, 22)
assert getattr(rows[2], f'field_{date_field_us.id}') == date(2021, 7, 22)
assert getattr(rows[2], f'field_{date_field_iso.id}') == date(2021, 7, 22)
assert getattr(rows[2], f'field_{date_field_eu_12.id}') is None
assert getattr(rows[2], f'field_{date_field_us_12.id}') is None
assert getattr(rows[2], f'field_{date_field_iso_12.id}') is None
assert getattr(rows[2], f'field_{date_field_eu_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[2], f'field_{date_field_us_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[2], f'field_{date_field_iso_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
"""
f'field_{date_field_eu.id}': '2018-08-20T13:20:10',
f'field_{date_field_us.id}': '2017 Mar 03 05:12:41.211',
f'field_{date_field_iso.id}': '19/Apr/2017:06:36:15',
f'field_{date_field_eu_12.id}': 'Dec 2, 2017 2:39:58 AM',
f'field_{date_field_us_12.id}': 'Jun 09 2018 15:28:14',
f'field_{date_field_iso_12.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_eu_24.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_us_24.id}': '2018-02-27 15:35:20.311',
f'field_{date_field_iso_24.id}': '10-04-19 12:00:17'
"""
assert getattr(rows[3], f'field_{date_field_eu.id}') == date(2018, 8, 20)
assert getattr(rows[3], f'field_{date_field_us.id}') == date(2017, 3, 3)
assert getattr(rows[3], f'field_{date_field_iso.id}') == date(2017, 4, 19)
assert getattr(rows[3], f'field_{date_field_eu_12.id}') == (
datetime(2017, 12, 2, 2, 39, 58, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_us_12.id}') == (
datetime(2018, 6, 9, 15, 28, 14, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_iso_12.id}') == (
datetime(2010, 4, 20, 0, 0, 35, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_eu_24.id}') == (
datetime(2010, 4, 20, 0, 0, 35, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_us_24.id}') == (
datetime(2018, 2, 27, 15, 35, 20, 311000, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_iso_24.id}') == (
datetime(10, 4, 19, 12, 0, tzinfo=utc)
)
date_field_eu = field_handler.update_field(
user=user, field=date_field_eu, new_type_name='text'
)
date_field_us = field_handler.update_field(
user=user, field=date_field_us, new_type_name='text'
)
date_field_iso = field_handler.update_field(
user=user, field=date_field_iso, new_type_name='text'
)
date_field_eu_12 = field_handler.update_field(
user=user, field=date_field_eu_12, new_type_name='text'
)
date_field_us_12 = field_handler.update_field(
user=user, field=date_field_us_12, new_type_name='text'
)
date_field_iso_12 = field_handler.update_field(
user=user, field=date_field_iso_12, new_type_name='text'
)
date_field_eu_24 = field_handler.update_field(
user=user, field=date_field_eu_24, new_type_name='text'
)
date_field_us_24 = field_handler.update_field(
user=user, field=date_field_us_24, new_type_name='text'
)
date_field_iso_24 = field_handler.update_field(
user=user, field=date_field_iso_24, new_type_name='text'
)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{date_field_eu.id}') == '22/07/2021'
assert getattr(rows[0], f'field_{date_field_us.id}') == '07/22/2021'
assert getattr(rows[0], f'field_{date_field_iso.id}') == '2021-07-22'
assert getattr(rows[0], f'field_{date_field_eu_12.id}') == '22/07/2021 12:45PM'
assert getattr(rows[0], f'field_{date_field_us_12.id}') == '07/22/2021 12:45PM'
assert getattr(rows[0], f'field_{date_field_iso_12.id}') == '2021-07-22 12:45PM'
assert getattr(rows[0], f'field_{date_field_eu_24.id}') == '22/07/2021 12:45'
assert getattr(rows[0], f'field_{date_field_us_24.id}') == '07/22/2021 12:45'
assert getattr(rows[0], f'field_{date_field_iso_24.id}') == '2021-07-22 12:45'
assert getattr(rows[2], f'field_{date_field_eu_12.id}') is None

View file

@ -18,24 +18,21 @@ from baserow.contrib.database.fields.exceptions import (
@pytest.mark.django_db
def test_get_field(data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
data_fixture.create_user()
text = data_fixture.create_text_field(user=user)
handler = FieldHandler()
with pytest.raises(FieldDoesNotExist):
handler.get_field(user=user, field_id=99999)
handler.get_field(field_id=99999)
with pytest.raises(UserNotInGroupError):
handler.get_field(user=user_2, field_id=text.id)
field = handler.get_field(user=user, field_id=text.id)
field = handler.get_field(field_id=text.id)
assert text.id == field.id
assert text.name == field.name
assert isinstance(field, Field)
field = handler.get_field(user=user, field_id=text.id, field_model=TextField)
field = handler.get_field(field_id=text.id, field_model=TextField)
assert text.id == field.id
assert text.name == field.name
@ -44,7 +41,7 @@ def test_get_field(data_fixture):
# If the error is raised we know for sure that the query has resolved.
with pytest.raises(AttributeError):
handler.get_field(
user=user, field_id=text.id,
field_id=text.id,
base_queryset=Field.objects.prefetch_related('UNKNOWN')
)
@ -251,7 +248,7 @@ def test_update_field_failing(data_fixture):
# This failing field type triggers the CannotChangeFieldType error if a field is
# changed into this type.
class FailingFieldType(TextFieldType):
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
return 'p_in::NOT_VALID_SQL_SO_IT_WILL_FAIL('
user = data_fixture.create_user()

View file

@ -1,19 +1,15 @@
import pytest
import json
from pytz import timezone
from datetime import date
from faker import Faker
from decimal import Decimal
from django.core.exceptions import ValidationError
from django.utils.timezone import make_aware, datetime
from baserow.core.user_files.exceptions import (
InvalidUserFileNameError, UserFileDoesNotExist
)
from baserow.contrib.database.fields.field_types import DateFieldType
from baserow.contrib.database.fields.models import (
LongTextField, URLField, DateField, EmailField, FileField
LongTextField, URLField, EmailField, FileField
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.rows.handler import RowHandler
@ -288,118 +284,6 @@ def test_url_field_type(data_fixture):
assert len(URLField.objects.all()) == 2
@pytest.mark.django_db
def test_date_field_type_prepare_value(data_fixture):
d = DateFieldType()
f = data_fixture.create_date_field(date_include_time=True)
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('UTC')
expected_date = make_aware(datetime(2020, 4, 10, 0, 0, 0), utc)
expected_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
with pytest.raises(ValidationError):
assert d.prepare_value_for_db(f, 'TEST')
assert d.prepare_value_for_db(f, None) is None
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = datetime(2020, 4, 10, 12, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_date = date(2020, 4, 10)
assert d.prepare_value_for_db(f, unprepared_date) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_datetime
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_datetime
f = data_fixture.create_date_field(date_include_time=False)
expected_date = date(2020, 4, 10)
unprepared_datetime = datetime(2020, 4, 10, 14, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_date
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_date
@pytest.mark.django_db
def test_date_field_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field_handler = FieldHandler()
row_handler = RowHandler()
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('utc')
date_field_1 = field_handler.create_field(user=user, table=table, type_name='date',
name='Date')
date_field_2 = field_handler.create_field(user=user, table=table, type_name='date',
name='Datetime', date_include_time=True)
assert date_field_1.date_include_time is False
assert date_field_2.date_include_time is True
assert len(DateField.objects.all()) == 2
model = table.get_model(attribute_names=True)
row = row_handler.create_row(user=user, table=table, values={}, model=model)
assert row.date is None
assert row.datetime is None
row = row_handler.create_row(user=user, table=table, values={
'date': '2020-4-1',
'datetime': '2020-4-1 12:30:30'
}, model=model)
row.refresh_from_db()
assert row.date == date(2020, 4, 1)
assert row.datetime == datetime(2020, 4, 1, 12, 30, 30, tzinfo=utc)
row = row_handler.create_row(user=user, table=table, values={
'datetime': make_aware(datetime(2020, 4, 1, 12, 30, 30), amsterdam)
}, model=model)
row.refresh_from_db()
assert row.date is None
assert row.datetime == datetime(2020, 4, 1, 10, 30, 30, tzinfo=timezone('UTC'))
date_field_1 = field_handler.update_field(user=user, field=date_field_1,
date_include_time=True)
date_field_2 = field_handler.update_field(user=user, field=date_field_2,
date_include_time=False)
assert date_field_1.date_include_time is True
assert date_field_2.date_include_time is False
model = table.get_model(attribute_names=True)
rows = model.objects.all()
assert rows[0].date is None
assert rows[0].datetime is None
assert rows[1].date == datetime(2020, 4, 1, tzinfo=timezone('UTC'))
assert rows[1].datetime == date(2020, 4, 1)
assert rows[2].date is None
assert rows[2].datetime == date(2020, 4, 1)
field_handler.delete_field(user=user, field=date_field_1)
field_handler.delete_field(user=user, field=date_field_2)
assert len(DateField.objects.all()) == 0
@pytest.mark.django_db
def test_email_field_type(data_fixture):
user = data_fixture.create_user()

View file

@ -5,10 +5,16 @@ from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_400_BAD
from django.shortcuts import reverse
from django.core.exceptions import ValidationError
from faker import Faker
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import SelectOption, SingleSelectField
from baserow.contrib.database.fields.field_types import SingleSelectFieldType
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.views.handler import ViewHandler
from baserow.contrib.database.api.rows.serializers import (
get_row_serializer_class, RowSerializer
)
@pytest.mark.django_db
@ -58,6 +64,10 @@ def test_single_select_field_type(data_fixture):
field_handler.delete_field(user=user, field=field)
assert SelectOption.objects.all().count() == 0
field = field_handler.create_field(user=user, table=table,
type_name='single_select', select_options=[])
field_handler.update_field(user=user, field=field, new_type_name='text')
@pytest.mark.django_db
def test_single_select_field_type_rows(data_fixture, django_assert_num_queries):
@ -495,7 +505,11 @@ def test_single_select_field_type_get_order(data_fixture):
@pytest.mark.django_db
def test_primary_single_select_field_with_link_row_field(api_client, data_fixture):
def test_primary_single_select_field_with_link_row_field(
api_client,
data_fixture,
django_assert_num_queries
):
"""
We expect the relation to a table that has a single select field to work.
"""
@ -521,7 +535,8 @@ def test_primary_single_select_field_with_link_row_field(api_client, data_fixtur
type_name='single_select',
select_options=[
{'value': 'Option 1', 'color': 'red'},
{'value': 'Option 2', 'color': 'blue'}
{'value': 'Option 2', 'color': 'blue'},
{'value': 'Option 3', 'color': 'orange'}
],
primary=True
)
@ -541,6 +556,10 @@ def test_primary_single_select_field_with_link_row_field(api_client, data_fixtur
user=user, table=customers_table,
values={f'field_{customers_primary.id}': select_options[1].id}
)
customers_row_3 = row_handler.create_row(
user=user, table=customers_table,
values={f'field_{customers_primary.id}': select_options[2].id}
)
row_handler.create_row(
user, table=example_table,
values={f'field_{link_row_field.id}': [customers_row_1.id, customers_row_2.id]}
@ -549,6 +568,18 @@ def test_primary_single_select_field_with_link_row_field(api_client, data_fixtur
user, table=example_table,
values={f'field_{link_row_field.id}': [customers_row_1.id]}
)
row_handler.create_row(
user, table=example_table,
values={f'field_{link_row_field.id}': [customers_row_3.id]}
)
model = example_table.get_model()
queryset = model.objects.all().enhance_by_fields()
serializer_class = get_row_serializer_class(model, RowSerializer, is_response=True)
with django_assert_num_queries(3):
serializer = serializer_class(queryset, many=True)
serializer.data
response = api_client.get(
reverse('api:database:rows:list', kwargs={'table_id': example_table.id}),
@ -569,3 +600,50 @@ def test_primary_single_select_field_with_link_row_field(api_client, data_fixtur
response_json['results'][1][f'field_{link_row_field.id}'][0]['value'] ==
'Option 1'
)
assert (
response_json['results'][2][f'field_{link_row_field.id}'][0]['value'] ==
'Option 3'
)
@pytest.mark.django_db
def test_single_select_field_type_random_value(data_fixture):
"""
Verify that the random_value function of the single select field type correctly
returns one option of a given select_options list. If the select_options list is
empty or the passed field type is not of single select field type by any chance
it should return None.
"""
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user, name='Placeholder')
table = data_fixture.create_database_table(name='Example', database=database)
field_handler = FieldHandler()
cache = {}
fake = Faker()
field = field_handler.create_field(
user=user,
table=table,
type_name='single_select',
name='Single select',
select_options=[
{'value': 'Option 1', 'color': 'blue'},
{'value': 'Option 2', 'color': 'red'}
],
)
select_options = field.select_options.all()
random_choice = SingleSelectFieldType().random_value(field, fake, cache)
assert random_choice in select_options
random_choice = SingleSelectFieldType().random_value(field, fake, cache)
assert random_choice in select_options
email_field = field_handler.create_field(
user=user,
table=table,
type_name='email',
name='E-Mail',
)
random_choice_2 = SingleSelectFieldType().random_value(email_field, fake, cache)
assert random_choice_2 is None

View file

@ -0,0 +1,80 @@
import pytest
from io import StringIO
from django.core.management import call_command
@pytest.mark.django_db
def test_fill_table_no_table():
"""
Check whether calling the fille_table command correctly 'raises' a system exit
when the command gets called with a table that does not exist
"""
output = StringIO()
table_id_that_does_not_exist = 5
with pytest.raises(SystemExit) as sys_exit:
call_command('fill_table', table_id_that_does_not_exist, 10, stdout=output)
assert sys_exit.type == SystemExit
assert sys_exit.value.code == 1
assert (
output.getvalue()
== f"The table with id {table_id_that_does_not_exist} was not found.\n"
)
@pytest.mark.django_db
@pytest.mark.parametrize("test_limit", [5, 10])
def test_fill_table_empty_table(data_fixture, test_limit):
"""
Verify that the fill_table command correctly populates a given empty table with
different 'limit' rows
"""
# create a new empty table with a text field
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
data_fixture.create_text_field(user=user, table=table)
call_command('fill_table', table.id, test_limit)
model = table.get_model()
results = model.objects.all()
assert len(results) == test_limit
@pytest.mark.django_db
@pytest.mark.parametrize("test_limit", [5, 10])
def test_fill_table_no_empty_table(data_fixture, test_limit):
"""
Verify that the fill_table command correctly appends to a table with data already
in it with different 'limit' rows
"""
# create a new empty table with a field
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(user=user, table=table)
model = table.get_model()
# create data in the previously created field
values = {f"field_{text_field.id}": "Some Text"}
model.objects.create(**values)
results = model.objects.all()
row_length_before_random_insert = len(results)
first_row_value_before = getattr(results[0], f"field_{text_field.id}")
# execute the fill_table command
call_command('fill_table', table.id, test_limit)
results = model.objects.all()
first_row_value_after = getattr(results[0], f"field_{text_field.id}")
# make sure the first row is still the same
assert first_row_value_before == first_row_value_after
assert len(results) == test_limit + row_length_before_random_insert

View file

@ -21,24 +21,20 @@ from baserow.contrib.database.views.models import GridView, GridViewFieldOptions
def test_get_database_table(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
table_2 = data_fixture.create_database_table()
data_fixture.create_database_table()
handler = TableHandler()
with pytest.raises(UserNotInGroupError):
handler.get_table(user=user, table_id=table_2.id)
with pytest.raises(TableDoesNotExist):
handler.get_table(user=user, table_id=99999)
handler.get_table(table_id=99999)
# If the error is raised we know for sure that the base query has resolved.
with pytest.raises(AttributeError):
handler.get_table(
user=user,
table_id=table.id,
base_queryset=Table.objects.prefetch_related('UNKNOWN')
)
table_copy = handler.get_table(user=user, table_id=table.id)
table_copy = handler.get_table(table_id=table.id)
assert table_copy.id == table.id

View file

@ -1329,3 +1329,67 @@ def test_not_empty_filter_type(data_fixture):
filter.field = single_select_field
filter.save()
assert handler.apply_filters(grid_view, model.objects.all()).get().id == row_2.id
@pytest.mark.django_db
def test_filename_contains_filter_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
grid_view = data_fixture.create_grid_view(table=table)
file_field = data_fixture.create_file_field(table=table)
handler = ViewHandler()
model = table.get_model()
row = model.objects.create(**{
f'field_{file_field.id}': [{'visible_name': 'test_file.png'}],
})
row_with_multiple_files = model.objects.create(**{
f'field_{file_field.id}': [
{'visible_name': 'test.doc'},
{'visible_name': 'test.txt'}
],
})
row_with_no_files = model.objects.create(**{
f'field_{file_field.id}': [],
})
filter = data_fixture.create_view_filter(
view=grid_view,
field=file_field,
type='filename_contains',
value='test_file.png'
)
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 1
assert row.id in ids
filter.value = '.jpg'
filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 0
filter.value = '.png'
filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 1
assert row.id in ids
filter.value = 'test.'
filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 1
assert row_with_multiple_files.id in ids
filter.value = ''
filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 3
assert row.id in ids
assert row_with_multiple_files.id in ids
assert row_with_no_files.id in ids
results = model.objects.all().filter_by_fields_object(filter_object={
f'filter__field_{file_field.id}__filename_contains': ['.png'],
}, filter_type='AND')
assert len(results) == 1

View file

@ -22,18 +22,15 @@ from baserow.contrib.database.fields.exceptions import FieldNotInTable
@pytest.mark.django_db
def test_get_view(data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
data_fixture.create_user()
grid = data_fixture.create_grid_view(user=user)
handler = ViewHandler()
with pytest.raises(ViewDoesNotExist):
handler.get_view(user=user, view_id=99999)
handler.get_view(view_id=99999)
with pytest.raises(UserNotInGroupError):
handler.get_view(user=user_2, view_id=grid.id)
view = handler.get_view(user=user, view_id=grid.id)
view = handler.get_view(view_id=grid.id)
assert view.id == grid.id
assert view.name == grid.name
@ -41,7 +38,7 @@ def test_get_view(data_fixture):
assert not view.filters_disabled
assert isinstance(view, View)
view = handler.get_view(user=user, view_id=grid.id, view_model=GridView)
view = handler.get_view(view_id=grid.id, view_model=GridView)
assert view.id == grid.id
assert view.name == grid.name
@ -52,7 +49,7 @@ def test_get_view(data_fixture):
# If the error is raised we know for sure that the query has resolved.
with pytest.raises(AttributeError):
handler.get_view(
user=user, view_id=grid.id,
view_id=grid.id,
base_queryset=View.objects.prefetch_related('UNKNOWN')
)
@ -196,6 +193,15 @@ def test_update_grid_view_field_options(send_mock, data_fixture):
}
)
with pytest.raises(UserNotInGroupError):
ViewHandler().update_grid_view_field_options(
user=data_fixture.create_user(),
grid_view=grid_view,
field_options={
'strange_format': {'height': 150},
}
)
with pytest.raises(UnrelatedFieldError):
ViewHandler().update_grid_view_field_options(
user=user,

View file

@ -7,17 +7,40 @@ from django.db import connection
from baserow.core.handler import CoreHandler
from baserow.core.models import (
Group, GroupUser, GroupInvitation, Application, GROUP_USER_PERMISSION_ADMIN
Settings, Group, GroupUser, GroupInvitation, Application,
GROUP_USER_PERMISSION_ADMIN
)
from baserow.core.exceptions import (
UserNotInGroupError, ApplicationTypeDoesNotExist, GroupDoesNotExist,
GroupUserDoesNotExist, ApplicationDoesNotExist, UserInvalidGroupPermissionsError,
BaseURLHostnameNotAllowed, GroupInvitationEmailMismatch,
GroupInvitationDoesNotExist, GroupUserAlreadyExists
GroupInvitationDoesNotExist, GroupUserAlreadyExists, IsNotAdminError
)
from baserow.contrib.database.models import Database, Table
@pytest.mark.django_db
def test_get_settings():
settings = CoreHandler().get_settings()
assert isinstance(settings, Settings)
assert settings.allow_new_signups is True
@pytest.mark.django_db
def test_update_settings(data_fixture):
user_1 = data_fixture.create_user(is_staff=True)
user_2 = data_fixture.create_user()
with pytest.raises(IsNotAdminError):
CoreHandler().update_settings(user_2, allow_new_signups=False)
settings = CoreHandler().update_settings(user_1, allow_new_signups=False)
assert settings.allow_new_signups is False
settings = Settings.objects.all().first()
assert settings.allow_new_signups is False
@pytest.mark.django_db
def test_get_group(data_fixture):
user_1 = data_fixture.create_user()
@ -282,7 +305,7 @@ def test_get_group_invitation_by_token(data_fixture):
def test_get_group_invitation(data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
user_3 = data_fixture.create_user()
data_fixture.create_user()
group_user = data_fixture.create_user_group(user=user)
data_fixture.create_user_group(
user=user_2,
@ -297,18 +320,9 @@ def test_get_group_invitation(data_fixture):
handler = CoreHandler()
with pytest.raises(GroupInvitationDoesNotExist):
handler.get_group_invitation(user=user, group_invitation_id=999999)
handler.get_group_invitation(group_invitation_id=999999)
with pytest.raises(UserNotInGroupError):
handler.get_group_invitation(user=user_3, group_invitation_id=invitation.id)
with pytest.raises(UserInvalidGroupPermissionsError):
handler.get_group_invitation(user=user_2, group_invitation_id=invitation.id)
invitation2 = handler.get_group_invitation(
user=user,
group_invitation_id=invitation.id
)
invitation2 = handler.get_group_invitation(group_invitation_id=invitation.id)
assert invitation.id == invitation2.id
assert invitation.invited_by_id == invitation2.invited_by_id
@ -596,25 +610,20 @@ def test_accept_group_invitation(data_fixture):
@pytest.mark.django_db
def test_get_application(data_fixture):
user_1 = data_fixture.create_user()
user_2 = data_fixture.create_user()
data_fixture.create_user()
application_1 = data_fixture.create_database_application(user=user_1)
handler = CoreHandler()
with pytest.raises(ApplicationDoesNotExist):
handler.get_application(user=user_1, application_id=0)
handler.get_application(application_id=0)
with pytest.raises(UserNotInGroupError):
handler.get_application(user=user_2, application_id=application_1.id)
application_1_copy = handler.get_application(
user=user_1, application_id=application_1.id
)
application_1_copy = handler.get_application(application_id=application_1.id)
assert application_1_copy.id == application_1.id
assert isinstance(application_1_copy, Application)
database_1_copy = handler.get_application(
user=user_1, application_id=application_1.id, base_queryset=Database.objects
application_id=application_1.id, base_queryset=Database.objects
)
assert database_1_copy.id == application_1.id
assert isinstance(database_1_copy, Database)

View file

@ -1,9 +1,10 @@
from io import BytesIO
import pytest
from baserow.core.utils import (
extract_allowed, set_allowed_attrs, to_pascal_case, to_snake_case,
remove_special_characters, dict_to_object, random_string, sha256_hash,
stream_size
stream_size, truncate_middle
)
@ -77,3 +78,19 @@ def test_sha256_hash():
def test_stream_size():
assert stream_size(BytesIO(b'test')) == 4
def test_truncate_middle():
assert truncate_middle('testtesttest', 13) == 'testtesttest'
assert truncate_middle('testtesttest', 12) == 'testtesttest'
assert truncate_middle('testabcdecho', 11) == 'test...echo'
assert truncate_middle('testabcdecho', 10) == 'test...cho'
assert truncate_middle('testabcdecho', 9) == 'tes...cho'
assert truncate_middle('testabcdecho', 8) == 'tes...ho'
assert truncate_middle('testabcdecho', 7) == 'te...ho'
assert truncate_middle('testabcdecho', 6) == 'te...o'
assert truncate_middle('testabcdecho', 5) == 't...o'
assert truncate_middle('testabcdecho', 4) == 't...'
with pytest.raises(ValueError):
truncate_middle('testtesttest', 3) == '...'

View file

@ -5,6 +5,8 @@ from freezegun import freeze_time
from itsdangerous.exc import SignatureExpired, BadSignature
from django.contrib.auth import get_user_model
from baserow.core.models import Group, GroupUser
from baserow.core.registries import plugin_registry
from baserow.contrib.database.models import (
@ -17,11 +19,14 @@ from baserow.core.exceptions import (
)
from baserow.core.handler import CoreHandler
from baserow.core.user.exceptions import (
UserAlreadyExist, UserNotFound, InvalidPassword
UserAlreadyExist, UserNotFound, InvalidPassword, DisabledSignupError
)
from baserow.core.user.handler import UserHandler
User = get_user_model()
@pytest.mark.django_db
def test_get_user(data_fixture):
user_1 = data_fixture.create_user(email='user1@localhost')
@ -42,12 +47,18 @@ def test_get_user(data_fixture):
@pytest.mark.django_db
def test_create_user():
def test_create_user(data_fixture):
plugin_mock = MagicMock()
plugin_registry.registry['mock'] = plugin_mock
user_handler = UserHandler()
data_fixture.update_settings(allow_new_signups=False)
with pytest.raises(DisabledSignupError):
user_handler.create_user('Test1', 'test@test.nl', 'password')
assert User.objects.all().count() == 0
data_fixture.update_settings(allow_new_signups=True)
user = user_handler.create_user('Test1', 'test@test.nl', 'password')
assert user.pk
assert user.first_name == 'Test1'

View file

@ -238,6 +238,20 @@ def test_upload_user_file(data_fixture, tmpdir):
assert UserFile.objects.all().count() == 7
image = Image.new('RGB', (1, 1), color='red')
image_bytes = BytesIO()
image.save(image_bytes, format='PNG')
user_file = handler.upload_user_file(
user,
'this_file_has_an_extreme_long_file_name_that_should_not_make_the_system_'
'fail_hard_when_trying_to_upload.png',
image_bytes,
storage=storage
)
assert user_file.original_name == 'this_file_has_an_extreme_long_f...hard_when_' \
'trying_to_upload.png'
@pytest.mark.django_db
@responses.activate

View file

@ -1,5 +1,6 @@
from faker import Faker
from .settings import SettingsFixtures
from .user import UserFixtures
from .user_file import UserFileFixtures
from .group import GroupFixtures
@ -10,6 +11,7 @@ from .field import FieldFixtures
from .token import TokenFixtures
class Fixtures(UserFixtures, UserFileFixtures, GroupFixtures, ApplicationFixtures,
TableFixtures, ViewFixtures, FieldFixtures, TokenFixtures):
class Fixtures(SettingsFixtures, UserFixtures, UserFileFixtures, GroupFixtures,
ApplicationFixtures, TableFixtures, ViewFixtures, FieldFixtures,
TokenFixtures):
fake = Faker()

7
backend/tests/fixtures/settings.py vendored Normal file
View file

@ -0,0 +1,7 @@
from baserow.core.models import Settings
class SettingsFixtures:
def update_settings(self, **kwargs):
settings, created = Settings.objects.update_or_create(defaults=kwargs)
return settings

View file

@ -1,5 +1,31 @@
# Changelog
## Released (2021-03-01)
* Redesigned the left sidebar.
* Fixed error when a very long user file name is provided when uploading.
* Upgraded DRF Spectacular dependency to the latest version.
* Added single select field form option validation.
* Changed all cookies to SameSite=lax.
* Fixed the "Ignored attempt to cancel a touchmove" error.
* Refactored the has_user everywhere such that the raise_error argument is used when
possible.
* Added Baserow Cloudron app.
* Fixed bug where a single select field without options could not be converted to a
another field.
* Fixed bug where the Editable component was not working if a prent a user-select:
none; property.
* Fail hard when the web-frontend can't reach the backend because of a network error.
* Use UTC time in the date picker.
* Refactored handler get_* methods so that they never check for permissions.
* Made it possible to configure SMTP settings via environment variables.
* Added field name to the public REST API docs.
* Made the public REST API docs compatible with smaller screens.
* Made it possible for the admin to disable new signups.
* Reduced the amount of queries when using the link row field.
* Respect the date format when converting to a date field.
* Added a field type filename contains filter.
## Released (2021-02-04)
* Upgraded web-frontend dependencies.

View file

@ -0,0 +1,21 @@
{
"id": "io.baserow.cloudronapp",
"title": "Baserow",
"author": "Bram Wiepjes",
"description": "file://DESCRIPTION.md",
"tagline": "Collaborate on any form of data",
"website": "https://baserow.io",
"contactEmail": "bram@baserow.io",
"icon": "file://logo.png",
"tags": ["no-code", "nocode", "database", "data", "collaborate", "airtable"],
"version": "1.0.0",
"healthCheckPath": "/_health",
"httpPort": 80,
"addons": {
"postgresql": {},
"sendmail": {},
"localstorage": {}
},
"memoryLimit": 2147483648,
"manifestVersion": 2
}

View file

@ -0,0 +1,3 @@
Baserow is an open source no-code database tool and Airtable alternative. Easily create
a relational database without any technical expertise. Build a table and define custom
fields like text, number, file and many more.

View file

@ -0,0 +1,54 @@
FROM cloudron/base:3.0.0@sha256:455c70428723e3a823198c57472785437eb6eab082e79b3ff04ea584faf46e92
RUN mkdir -p /app/code
WORKDIR /app/code
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
# We want to install Redis version 5.
RUN add-apt-repository ppa:chris-lea/redis-server
RUN apt-get update && \
apt install -y \
make curl gnupg2 nginx redis-server supervisor \
python3 build-essential libxslt-dev python3-dev python3-virtualenv \
python3-setuptools zlib1g-dev libffi-dev libssl-dev python3-pip \
&& rm -rf /var/cache/apt /var/lib/apt/lists
RUN service supervisor stop && service nginx stop
RUN rm -f /etc/nginx/sites-enabled/*
ADD start.sh /app/code
RUN git clone https://gitlab.com/bramw/baserow.git
RUN virtualenv -p python3 env
RUN env/bin/pip install --no-cache -r baserow/backend/requirements/base.txt
RUN (cd baserow/web-frontend && yarn install && yarn build)
RUN npm install -g mjml
RUN (mkdir -p /app/code/cloudron/cloudron && \
mkdir /app/data && \
touch /app/code/cloudron/cloudron/__init__.py)
ADD settings.py /app/code/cloudron/cloudron
ENV PYTHONPATH $PYTHONPATH:/app/code/baserow/backend/src:/app/code/cloudron
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8
ENV TMPDIR=/run/temp
USER root
RUN chown -R cloudron:cloudron /app/code
RUN sed -i 's/daemonize no/daemonize yes\nbind 127.0.0.1/g' /etc/redis/redis.conf
RUN sed -i 's/dir \/var\/lib\/redis/dir \/app\/data\/redis/g' /etc/redis/redis.conf
RUN ln -sf /dev/stdout /var/log/redis/redis-server.log
ADD supervisor.conf /etc/supervisor/conf.d/supervisor.conf
RUN ln -sf /dev/stdout /var/log/supervisor/supervisord.log
RUN ln -sf /dev/stdout /app/code/supervisord.log
ADD nginx.conf /etc/nginx/sites-enabled/nginx.conf
RUN ln -sf /dev/stdout /var/log/nginx/access.log
RUN ln -sf /dev/stderr /var/log/nginx/error.log
CMD ["/app/code/start.sh"]

BIN
deploy/cloudron/logo.png Normal file

Binary file not shown.

After

(image error) Size: 6 KiB

View file

@ -0,0 +1,49 @@
client_body_temp_path /run/client_body;
proxy_temp_path /run/proxy_temp;
fastcgi_temp_path /run/fastcgi_temp;
scgi_temp_path /run/scgi_temp;
uwsgi_temp_path /run/uwsgi_temp;
server {
access_log /dev/stdout;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
gzip on;
gzip_disable "msie6";
listen 80;
proxy_read_timeout 1800s;
client_max_body_size 0; # avoid HTTP 413 for large image uploads
# required to avoid HTTP 411: see Issue #1486 (https://github.com/dotcloud/docker/issues/1486)
chunked_transfer_encoding on;
location / {
proxy_pass http://127.0.0.1:3000;
proxy_pass_request_headers on;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
location ~ ^/(api|ws)/ {
proxy_pass http://127.0.0.1:8000;
proxy_pass_request_headers on;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
location /media/ {
root /app/data;
}
}

View file

@ -0,0 +1,26 @@
from baserow.config.settings.base import *
import os
MEDIA_ROOT = '/app/data/media'
MJML_BACKEND_MODE = 'cmd'
MJML_EXEC_CMD = 'mjml'
FROM_EMAIL = os.environ['CLOUDRON_MAIL_FROM']
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
EMAIL_USE_TLS = False
EMAIL_HOST = os.environ["CLOUDRON_MAIL_SMTP_SERVER"]
EMAIL_PORT = os.environ["CLOUDRON_MAIL_SMTP_PORT"]
EMAIL_HOST_USER = os.environ["CLOUDRON_MAIL_SMTP_USERNAME"]
EMAIL_HOST_PASSWORD = os.environ["CLOUDRON_MAIL_SMTP_PASSWORD"]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.environ['CLOUDRON_POSTGRESQL_DATABASE'],
'USER': os.environ['CLOUDRON_POSTGRESQL_USERNAME'],
'PASSWORD': os.environ['CLOUDRON_POSTGRESQL_PASSWORD'],
'HOST': os.environ['CLOUDRON_POSTGRESQL_HOST'],
'PORT': os.environ['CLOUDRON_POSTGRESQL_PORT'],
}
}

18
deploy/cloudron/start.sh Executable file
View file

@ -0,0 +1,18 @@
#!/bin/bash
set -eu
if [[ ! -f /app/data/.secret ]]; then
echo "export SECRET_KEY=$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)" > /app/data/.secret
fi
source /app/data/.secret
mkdir -p /app/data/redis
echo "==> Executing database migrations"
/app/code/env/bin/python /app/code/baserow/backend/src/baserow/manage.py migrate --settings=cloudron.settings
chown -R cloudron:cloudron /app/data
echo "==> Starting"
exec /usr/bin/supervisord --configuration /etc/supervisor/conf.d/supervisor.conf

View file

@ -0,0 +1,64 @@
[supervisord]
nodaemon = true
logfile=/dev/null
logfile_maxbytes=0
environment =
DJANGO_SETTINGS_MODULE='cloudron.settings',
REDIS_HOST='localhost',
PRIVATE_BACKEND_URL='http://localhost:8000',
PUBLIC_WEB_FRONTEND_URL='https://%(ENV_CLOUDRON_APP_DOMAIN)s',
PUBLIC_BACKEND_URL='https://%(ENV_CLOUDRON_APP_DOMAIN)s'
[program:redis]
command=redis-server /etc/redis/redis.conf
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
autostart=true
autorestart=true
[program:gunicorn]
user=cloudron
directory=/app/code/baserow
command=/app/code/env/bin/gunicorn -w 3 -b 127.0.0.1:8000 -k uvicorn.workers.UvicornWorker baserow.config.asgi:application --log-level=debug
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
autostart=true
autorestart=true
[program:worker]
user=cloudron
directory=/app/code/baserow
command=/app/code/env/bin/celery -A baserow worker -l INFO
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
autostart=true
autorestart=true
[program:nuxt]
user=cloudron
directory=/app/code/baserow/web-frontend
command=sh -c './node_modules/.bin/nuxt start --hostname 127.0.0.1 --config-file ./config/nuxt.config.demo.js'
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
autostart=true
autorestart=true
[program:nginx]
directory=/tmp
user=root
command=/usr/sbin/nginx -g "daemon off;"
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
autostart=true
autorestart=true
numprocs=1

Binary file not shown.

Before

(image error) Size: 134 KiB

After

(image error) Size: 119 KiB

View file

@ -5,7 +5,12 @@ Baserow is divided into two components, the **backend** and the
contains some documentation about those endpoints and how to use them. These endpoints
should never be used to show data on your own website because that would mean you have
to expose your credentials or JWT token. They should only be used the make changes in
in your data. In the future there are going to be features that enable you to expose
in your data. You can publicly expose your data in a safe way by creating a
[database token](https://api.baserow.io/api/redoc/#operation/create_database_token)
token, set the permissions and follow the automatically generated api docs at
https://baserow.io/api/docs.
In the future there are going to be features that enable you to expose
your data publicly in a safe way.
## OpenAPI spec

View file

@ -113,3 +113,17 @@ are accepted.
* `INITIAL_TABLE_DATA_LIMIT` (default `null`): Indicates the initial table data limit.
If for example 100 is provided then it will not be possible to import a CSV file with
more than 100 rows.
* `REDIS_HOST` (default `redis`): The hostname of the Redis server.
* `REDIS_PORT` (default `6379`): The port of the Redis server.
* `REDIS_USER` (default ``): The username of the Redis server.
* `REDIS_PASSWORD` (default ``): The password of the Redis server.
* `REDIS_PROTOCOL` (default `redis`): The redis protocol. Can either be `redis` or
`rediss`.
* `EMAIL_SMTP` (default ``): Providing anything other than an empty string will enable
SMTP email.
* `EMAIL_SMTP_HOST` (default `localhost`): The hostname of the SMTP server.
* `EMAIL_SMPT_USE_TLS` (default ``): Providing anything other than an empty string will
enable connecting to the SMTP server via TLS.
* `EMAIL_SMTP_PORT` (default `25`): The port of the SMTP server.
* `EMAIL_SMTP_USER` (default ``): The username for the SMTP server.
* `EMAIL_SMTP_PASSWORD` (default ``): The password of the SMTP server.

View file

@ -0,0 +1,67 @@
# Installation on Cloudron
Cloudron is a complete solution for running apps on your server and keeping them
up-to-date and secure. If you don't have Cloudron installed on a server you can follow
the [installation instructions here ](https://docs.cloudron.io/installation/). Once
you have Cloudron running you can follow the steps below to install the Baserow app.
> Basic experience with the Cloudron CLI is required.
## Install Cloudron CLI
The Cloudron CLI can be installed on Linux/Mac using the following command. More
information about installing can be found on their website at
[https://docs.cloudron.io/custom-apps/cli/](https://docs.cloudron.io/custom-apps/cli/).
```
$ sudo npm install -g cloudron
```
## Installing Baserow
If you have not already been logged into your Cloudron platfrom you can do so by
executing the following command.
```
$ cloudron login my.{YOUR_DOMAIN}
```
When you have successfully logged in, you need to clone the latest Baserow repository
to your machine. This contains the Cloudron manifest file that you need when installing
the app.
```
$ git clone https://gitlab.com/bramw/baserow.git
$ cd baserow/deploy/cloudron
```
After that you can install the Baserow Cloudron app by executing the following commands.
```
$ cloudron install -l baserow.{YOUR_DOMAIN} --image registry.gitlab.com/bramw/baserow/cloudron:1.0.0
App is being installed.
...
App is installed.
```
> All the available versions can be found here:
> [https://gitlab.com/bramw/baserow/container_registry/1692077](https://gitlab.com/bramw/baserow/container_registry/1692077)
When the installation has finished you can visit your domain and create a new account
from there.
## Updating
When a new Baserow version becomes available you can easily update to that version.
First you need to figure out what your app id is. You can do so by executing the
`cloudron list` command. After that you can execute the following command to update to
the latest version.
```
cloudron update --app {YOUR_APP_ID} --image registry.gitlab.com/bramw/baserow/cloudron:1.0.0
```
> Note that you must replace the image with the most recent image of Baserow. The
> latest version can be found here:
> [https://gitlab.com/bramw/baserow/container_registry/1692077](https://gitlab.com/bramw/baserow/container_registry/1692077)

View file

@ -29,6 +29,8 @@ Need some help with setting things up?
`docker-compose`.
* [Install on Ubuntu](./guides/installation/install-on-ubuntu.md): A step by step guide
on how to install Baserow on an Ubuntu server.
* [Install on Cloudron](guides/installation/install-on-cloudron.md): Instructions on
how to manually install Baserow on Cloudron.
## Development

View file

@ -1,4 +1,4 @@
FROM python:3.6
FROM python:3.7
ADD ./baserow /baserow
ADD ./{{ cookiecutter.project_slug }} /{{ cookiecutter.project_slug }}

View file

@ -10,6 +10,6 @@ stylelint:
lint: eslint stylelint
jest:
yarn run jest || exit;
yarn run jest-all || exit;
test: jest

View file

@ -0,0 +1,78 @@
import { Registerable } from '@baserow/modules/core/registry'
/**
* An admin type is visible in the sidebar under the admin menu item. All
* registered admin types are visible in the sidebar to admins and he clicks
* on one he is redirected to the route related to the admin type.
*/
export class AdminType extends Registerable {
/**
* The font awesome 5 icon name that is used as convenience for the user to
* recognize admin types. The icon will for example be displayed in the
* sidebar. If you for example want the database icon, you must return
* 'database' here. This will result in the classname 'fas fa-database'.
*/
getIconClass() {
return null
}
/**
* A human readable name of the admin type. This will be shown in the sidebar
* if the user is an admin.
*/
getName() {
return null
}
getRouteName() {
throw new Error('The route name of an admin type must be set.')
}
constructor() {
super()
this.type = this.getType()
this.iconClass = this.getIconClass()
this.name = this.getName()
this.routeName = this.getRouteName()
if (this.type === null) {
throw new Error('The type name of an admin type must be set.')
}
if (this.iconClass === null) {
throw new Error('The icon class of an admin type must be set.')
}
if (this.name === null) {
throw new Error('The name of an admin type must be set.')
}
}
/**
* @return object
*/
serialize() {
return {
type: this.type,
iconClass: this.iconClass,
name: this.name,
routeName: this.routeName,
}
}
}
export class SettingsAdminType extends AdminType {
static getType() {
return 'settings'
}
getIconClass() {
return 'cogs'
}
getName() {
return 'Settings'
}
getRouteName() {
return 'admin-settings'
}
}

View file

@ -0,0 +1,48 @@
.admin-settings {
padding: 30px;
}
.admin-settings__group {
&:not(:last-child) {
padding-bottom: 30px;
margin-bottom: 30px;
border-bottom: solid 1px $color-neutral-200;
}
}
.admin-settings__group-title {
font-size: 20px;
margin-bottom: 30px;
}
.admin-settings__item {
display: flex;
&:not(:last-child) {
margin-bottom: 30px;
}
}
.admin-settings__label {
flex: 0 0 25%;
max-width: 340px;
min-width: 200px;
margin-right: 40px;
}
.admin-settings__name {
font-size: 14px;
font-weight: 700;
margin-bottom: 12px;
}
.admin-settings__description {
font-size: 13px;
color: $color-neutral-600;
line-height: 160%;
}
.admin-settings__control {
min-width: 0;
width: 100%;
}

View file

@ -5,9 +5,8 @@
@import 'form';
@import 'box';
@import 'layout';
@import 'menu';
@import 'sidebar';
@import 'tree';
@import 'sidebar';
@import 'header';
@import 'scrollbar';
@import 'modal';
@ -61,3 +60,4 @@
@import 'group_member';
@import 'separator';
@import 'quote';
@import 'admin_settings';

View file

@ -7,11 +7,15 @@
&::after {
content: '';
width: calc(45% - #{$api-docs-nav-width * 0.45});
width: calc(50% - #{$api-docs-nav-width * 0.5});
background-color: $color-neutral-600;
z-index: $api-docs-background-z-index;
@include absolute(0, 0, 0, auto);
@media screen and (max-width: $api-docs-breakpoint) {
display: none;
}
}
}
@ -98,16 +102,30 @@
.api-docs__item {
display: flex;
margin: 40px 0;
@media screen and (max-width: $api-docs-breakpoint) {
display: block;
}
}
.api-docs__left {
width: 55%;
width: 50%;
padding: 0 34px;
@media screen and (max-width: $api-docs-breakpoint) {
width: 100%;
}
}
.api-docs__right {
width: 45%;
width: 50%;
padding: 0 20px;
@media screen and (max-width: $api-docs-breakpoint) {
width: 100%;
padding: 0 34px;
margin: 20px 0;
}
}
.api-docs__content {

View file

@ -3,12 +3,21 @@
font-size: 18px;
font-weight: 700;
margin: 20px 0;
@media screen and (max-width: $api-docs-breakpoint) {
color: $color-primary-900;
}
}
.api-docs__example {
width: 100%;
min-width: 0;
position: relative;
padding: 16px;
background-color: $color-neutral-100;
&.api-docs__example--with-padding {
padding: 16px;
}
}
.api-docs__copy {
@ -65,11 +74,41 @@
.api-docs__example-type {
width: 200px;
margin-bottom: 20px;
padding: 16px 16px 0 16px;
}
.api-docs__example-content-container {
display: flex;
}
.api-docs__example-content-side {
position: relative;
flex: 0 0 120px;
padding: 16px 0;
background-color: $color-neutral-200;
}
.api-docs__example-content-line {
@extend %ellipsis;
margin-top: 16px;
padding: 0 10px;
font-size: 13px;
font-family: monospace;
color: $color-neutral-700;
@include absolute(auto, 0, auto, 0);
}
.api-docs__example-content-wrapper {
padding: 16px;
width: 100%;
min-width: 0;
}
.api-docs__example-content {
margin: 0;
line-height: 160%;
line-height: 21px;
color: $color-neutral-700;
overflow-x: auto;
}

View file

@ -27,8 +27,13 @@
}
.api-docs__parameter-name {
flex: 0 140px;
margin-right: 20px;
flex: 0 200px;
margin: 0 20px 20px 0;
}
.api-docs__parameter-visible-name {
color: $color-neutral-700;
margin-left: 4px;
}
.api-docs__parameter-optional {

View file

@ -3,14 +3,21 @@
}
.field-single-select__dropdown-link {
display: block;
padding: 6px 10px;
display: flex;
align-items: center;
padding: 0 10px;
height: 32px;
&:hover {
text-decoration: none;
}
}
.field-single-select__dropdown-selected {
display: flex;
align-items: center;
}
.field-single-select__dropdown-option {
@extend %ellipsis;
@ -21,8 +28,4 @@
max-width: 100%;
@include fixed-height(20px, 12px);
&.field-single-select__dropdown-option--align-32 {
margin-top: 5px;
}
}

View file

@ -12,42 +12,33 @@
left: 0;
top: 0;
bottom: 0;
width: 52px;
width: 240px;
.layout--collapsed & {
width: 48px;
}
}
.layout__col-2 {
position: absolute;
z-index: $z-index-layout-col-2;
left: 52px;
top: 0;
bottom: 0;
width: 226px;
.layout--collapsed & {
display: none;
}
}
.layout__col-3 {
position: absolute;
z-index: $z-index-layout-col-3;
left: 278px;
left: 240px;
top: 0;
bottom: 0;
right: 0;
.layout--collapsed & {
left: 52px;
left: 48px;
}
}
.layout__col-3-scroll {
.layout__col-2-scroll {
@include absolute(0);
overflow: auto;
}
.layout__col-3-1 {
.layout__col-2-1 {
position: absolute;
z-index: $z-index-layout-col-3-1;
left: 0;
@ -56,7 +47,7 @@
height: 52px;
}
.layout__col-3-2 {
.layout__col-2-2 {
position: absolute;
z-index: $z-index-layout-col-3-2;
left: 0;
@ -64,11 +55,3 @@
right: 0;
bottom: 0;
}
.layout__uncollapse {
display: none;
.layout--collapsed & {
display: block;
}
}

View file

@ -1,64 +0,0 @@
.menu {
display: flex;
flex-direction: column;
justify-content: space-between;
background-color: $color-primary-600;
color: $white;
}
.menu__items {
list-style: none;
padding: 0;
margin: 0;
}
.menu__item {
margin: 10px;
}
.menu__link {
position: relative;
display: block;
text-decoration: none;
border-radius: 3px;
color: $white;
@include center-text(32px, 16px);
&:hover {
background-color: rgba(0, 0, 0, 0.1);
text-decoration: none;
}
&.active {
background-color: rgba(0, 0, 0, 0.3);
}
}
.menu__link-text {
display: none;
position: absolute;
left: 36px;
top: 50%;
margin-top: -10.5px;
background-color: $color-neutral-900;
border-radius: 3px;
padding: 0 4px;
white-space: nowrap;
font-weight: 400;
@include center-text(auto, 11px, 21px);
a:hover & {
display: block;
}
}
.menu__user-item {
border-radius: 100%;
background-color: $color-primary-500;
color: $white;
font-weight: 700;
@include center-text(32px, 13px);
}

View file

@ -1,63 +1,187 @@
.sidebar {
display: flex;
flex-direction: column;
justify-content: space-between;
background-color: $white;
border-right: 1px solid $color-neutral-200;
}
@include absolute(0);
.sidebar__content-wrapper {
overflow-y: auto;
background-color: $color-neutral-10;
border-right: solid 1px $color-neutral-200;
height: 100%;
.layout--collapsed & {
overflow: visible;
}
}
.sidebar__content {
padding: 12px;
.sidebar__inner {
position: relative;
min-height: 100%;
padding-bottom: 46px;
.layout--collapsed & {
padding-bottom: 56px;
}
}
.sidebar__footer {
flex-grow: 0;
flex-shrink: 0;
border-top: 1px solid $color-neutral-200;
}
.sidebar__collapse {
display: block;
padding: 0 16px;
color: $color-primary-900;
font-weight: bold;
@include fixed-height(47px, 14px);
.sidebar__user {
display: flex;
align-items: center;
width: 100%;
padding: 16px;
margin-bottom: 4px;
&:hover {
text-decoration: none;
background-color: $color-neutral-100;
text-decoration: none;
}
.layout--collapsed & {
padding: 8px;
}
}
.sidebar__title {
font-family: $logo-font-stack;
font-size: 20px;
font-weight: 700;
margin-bottom: 16px;
.sidebar__user-initials {
flex: 0 0 36px;
font-weight: bold;
color: $white;
background-color: $color-primary-500;
border-radius: 100%;
margin-right: 12px;
img {
max-width: 104px;
@include center-text(36px, 15px);
.layout--collapsed & {
flex-basis: 32px;
margin-right: 0;
@include center-text(32px, 12px);
}
}
.sidebar__group-title {
font-size: 14px;
font-weight: 700;
margin-bottom: 10px;
.sidebar__user-info {
width: 100%;
min-width: 0;
.layout--collapsed & {
display: none;
}
}
.sidebar__user-info-top {
display: flex;
width: 100%;
justify-items: center;
margin-bottom: 4px;
}
.sidebar__user-name {
@extend %ellipsis;
min-width: 0;
color: $color-primary-900;
}
.sidebar__user-icon {
flex: 0 0 20px;
text-align: center;
font-size: 12px;
color: $color-primary-900;
}
.sidebar__user-email {
@extend %ellipsis;
font-size: 12px;
color: $color-neutral-600;
}
.sidebar__nav {
padding: 0 10px;
.layout--collapsed & {
padding: 0 8px;
}
}
.sidebar__new-wrapper {
margin-top: 12px;
}
.sidebar__new {
font-size: 13px;
color: $color-neutral-300;
margin-left: 7px;
margin-left: 6px;
&:hover {
color: $color-neutral-500;
text-decoration: none;
}
}
.sidebar__foot {
@include absolute(auto, 0, 0, 0);
display: flex;
width: 100%;
padding: 0 16px 16px 16px;
align-items: center;
justify-content: space-between;
.layout--collapsed & {
flex-direction: column;
height: 56px;
padding: 0 8px 8px 8px;
}
}
.sidebar__collapse-link {
color: $color-neutral-700;
border-radius: 3px;
@include center-text(20px, 12px);
&:hover {
display: inline-block;
text-decoration: none;
background-color: $color-neutral-100;
}
}
.layout--collapsed {
// Some minor changes regarding the tree items within the collapsed sidebar.
.tree .sidebar__tree {
padding-left: 0;
}
.sidebar__action {
.tree__link {
text-align: center;
}
.tree__icon {
margin-right: 0;
}
.sidebar__item-name {
background-color: $color-neutral-900;
color: $white;
border-radius: 3px;
padding: 0 4px;
white-space: nowrap;
font-weight: 400;
display: none;
@include absolute(6px, auto, auto, 36px);
@include center-text(auto, 11px, 21px);
}
&:hover .sidebar__item-name {
display: block;
}
}
.sidebar__logo {
display: inline-block;
order: 2;
width: 18px;
overflow: hidden;
}
}

View file

@ -5,6 +5,7 @@
.tree__item & {
padding-left: 8px;
margin-top: 6px;
}
}
@ -35,16 +36,21 @@
.tree__action {
@extend %tree__size;
padding: 0 32px 0 6px;
padding: 0 6px;
border-radius: 3px;
&:hover {
&:not(.tree__action--disabled):hover {
background-color: $color-neutral-100;
}
.tree__item.active &:hover {
background-color: transparent;
}
&.tree__action--has-options,
&.tree__action--has-right-icon {
padding-right: 32px;
}
}
.tree__link {
@ -58,16 +64,28 @@
&:hover {
text-decoration: none;
}
&.tree__link--group {
font-weight: 600;
}
.tree__action--disabled &:hover {
cursor: inherit;
}
}
.tree__type {
.tree__icon {
@extend %tree__size;
text-align: center;
width: $fa-fw-width;
color: $color-neutral-300;
color: $color-neutral-700;
margin-right: 4px;
font-size: 11px;
&.tree__icon--type {
color: $color-neutral-300;
}
}
%tree_sub-size {
@ -141,7 +159,7 @@
}
}
.tree__options {
.tree__right-icon {
display: none;
position: absolute;
z-index: 1;
@ -149,14 +167,10 @@
top: 0;
text-align: center;
width: 32px;
color: $color-neutral-300;
color: $color-neutral-700;
height: inherit;
line-height: inherit;
&:hover {
color: $color-neutral-700;
}
:hover > & {
display: block;
}
@ -165,3 +179,13 @@
display: none;
}
}
.tree__options {
@extend .tree__right-icon;
color: $color-neutral-300;
&:hover {
color: $color-neutral-700;
}
}

View file

@ -67,6 +67,13 @@
display: block !important;
}
// Sometimes we forcefully want to disable the the user-select property to initial
// because the element is for example being edited via a contenteditable attribute
// which does not work in combination with the user-select: none; property in Safari.
.forced-user-select-initial {
user-select: initial !important;
}
@keyframes spin {
0% {
transform: rotate(0);

View file

@ -19,6 +19,7 @@ $color-primary-700: #0f6499 !default;
$color-primary-800: #0a4970 !default;
$color-primary-900: #062e47 !default;
$color-neutral-10: #fcfcfc !default;
$color-neutral-50: #fafafa !default;
$color-neutral-100: #f5f5f5 !default;
$color-neutral-200: #d9dbde !default;
@ -86,13 +87,14 @@ $font-awesome-font-family: 'Font Awesome 5 Free', sans-serif !default;
$font-awesome-font-weight: 900 !default;
// API docs variables
$api-docs-nav-width: 240px !default;
$api-docs-nav-width: 220px !default;
$api-docs-header-height: 52px !default;
$api-docs-header-z-index: 4 !default;
$api-docs-databases-z-index: 5 !default;
$api-docs-nav-z-index: 3 !default;
$api-docs-body-z-index: 2 !default;
$api-docs-background-z-index: 1 !default;
$api-docs-breakpoint: 1100px !default;
// file field modal variables
$file-field-modal-head-height: 62px !default;

View file

@ -33,10 +33,15 @@ export default {
* @param vertical Bottom positions the context under the target.
* Top positions the context above the target.
* Over-bottom positions the context over and under the target.
* Over-top positions the context over and above the target
* @param horizontal Left aligns the context with the left side of the target.
* Right aligns the context with the right side of the target.
* @param offset The distance between the target element and the context.
* Over-top positions the context over and above the target.
* @param horizontal `left` aligns the context with the left side of the target.
* `right` aligns the context with the right side of the target.
* @param verticalOffset
* The offset indicates how many pixels the context is moved
* top from the original calculated position.
* @param horizontalOffset
* The offset indicates how many pixels the context is moved
* left from the original calculated position.
* @param value True if context must be shown, false if not and undefine
* will invert the current state.
*/
@ -44,7 +49,8 @@ export default {
target,
vertical = 'bottom',
horizontal = 'left',
offset = 10,
verticalOffset = 10,
horizontalOffset = 0,
value
) {
if (value === undefined) {
@ -52,7 +58,13 @@ export default {
}
if (value) {
this.show(target, vertical, horizontal, offset)
this.show(
target,
vertical,
horizontal,
verticalOffset,
horizontalOffset
)
} else {
this.hide()
}
@ -61,12 +73,24 @@ export default {
* Calculate the position, show the context menu and register a click event on the
* body to check if the user has clicked outside the context.
*/
show(target, vertical, horizontal, offset) {
show(target, vertical, horizontal, verticalOffset, horizontalOffset) {
const isElementOrigin = isDomElement(target)
const updatePosition = () => {
const css = isElementOrigin
? this.calculatePositionElement(target, vertical, horizontal, offset)
: this.calculatePositionFixed(target, vertical, horizontal, offset)
? this.calculatePositionElement(
target,
vertical,
horizontal,
verticalOffset,
horizontalOffset
)
: this.calculatePositionFixed(
target,
vertical,
horizontal,
verticalOffset,
horizontalOffset
)
// Set the calculated positions of the context.
for (const key in css) {
@ -132,7 +156,13 @@ export default {
* figure out the correct position, so in that case we force the element to be
* visible.
*/
calculatePositionElement(target, vertical, horizontal, offset) {
calculatePositionElement(
target,
vertical,
horizontal,
verticalOffset,
horizontalOffset
) {
const visible =
window.getComputedStyle(target).getPropertyValue('display') !== 'none'
@ -151,15 +181,29 @@ export default {
}
// Calculate if top, bottom, left and right positions are possible.
const canTop = targetRect.top - contextRect.height - offset > 0
const canTop = targetRect.top - contextRect.height - verticalOffset > 0
const canBottom =
window.innerHeight - targetRect.bottom - contextRect.height - offset > 0
const canOverTop = targetRect.bottom - contextRect.height - offset > 0
window.innerHeight -
targetRect.bottom -
contextRect.height -
verticalOffset >
0
const canOverTop =
targetRect.bottom - contextRect.height - verticalOffset > 0
const canOverBottom =
window.innerHeight - targetRect.bottom - contextRect.height - offset > 0
const canRight = targetRect.right - contextRect.width > 0
window.innerHeight -
targetRect.bottom -
contextRect.height -
verticalOffset >
0
const canRight =
targetRect.right - contextRect.width - horizontalOffset > 0
const canLeft =
window.innerWidth - targetRect.left - contextRect.width > 0
window.innerWidth -
targetRect.left -
contextRect.width -
horizontalOffset >
0
// If bottom, top, left or right doesn't fit, but their opposite does we switch to
// that.
@ -189,27 +233,29 @@ export default {
// Calculate the correct positions for horizontal and vertical values.
if (horizontal === 'left') {
positions.left = targetRect.left
positions.left = targetRect.left + horizontalOffset
}
if (horizontal === 'right') {
positions.right = window.innerWidth - targetRect.right
positions.right =
window.innerWidth - targetRect.right - horizontalOffset
}
if (vertical === 'bottom') {
positions.top = targetRect.bottom + offset
positions.top = targetRect.bottom + verticalOffset
}
if (vertical === 'top') {
positions.bottom = window.innerHeight - targetRect.top + offset
positions.bottom = window.innerHeight - targetRect.top + verticalOffset
}
if (vertical === 'over-bottom') {
positions.top = targetRect.top + offset
positions.top = targetRect.top + verticalOffset
}
if (vertical === 'over-top') {
positions.bottom = window.innerHeight - targetRect.bottom + offset
positions.bottom =
window.innerHeight - targetRect.bottom + verticalOffset
}
return positions

View file

@ -2,6 +2,7 @@
<span
ref="editable"
:contenteditable="editing"
:class="{ 'forced-user-select-initial': editing }"
@input="update"
@keydown="keydown"
@focusout="change"

View file

@ -8,7 +8,7 @@
</a>
</li>
<li>
<a @click="$refs.groupMembersModal.show()">
<a @click=";[$refs.groupMembersModal.show(), hide()]">
<i class="context__menu-icon fas fa-fw fa-users"></i>
Members
</a>

View file

@ -1,30 +1,215 @@
<template>
<div>
<div v-if="hasSelectedGroup">
<div class="sidebar__group-title">{{ selectedGroup.name }}</div>
<ul class="tree">
<SidebarApplication
v-for="application in applications"
:key="application.id"
:application="application"
></SidebarApplication>
</ul>
<div class="sidebar">
<div class="sidebar__inner">
<a
ref="createApplicationContextLink"
class="sidebar__new"
ref="userContextAnchor"
class="sidebar__user"
@click="
$refs.createApplicationContext.toggle(
$refs.createApplicationContextLink
$refs.userContext.toggle(
$refs.userContextAnchor,
'bottom',
'left',
isCollapsed ? -4 : -10,
isCollapsed ? 8 : 16
)
"
>
<i class="fas fa-plus"></i>
Create new
<div class="sidebar__user-initials">
{{ name | nameAbbreviation }}
</div>
<div class="sidebar__user-info">
<div class="sidebar__user-info-top">
<div class="sidebar__user-name">{{ name }}</div>
<div class="sidebar__user-icon">
<i class="fas fa-caret-down"></i>
</div>
</div>
<div class="sidebar__user-email">{{ email }}</div>
</div>
</a>
<CreateApplicationContext
ref="createApplicationContext"
:group="selectedGroup"
></CreateApplicationContext>
<Context ref="userContext">
<div class="context__menu-title">{{ name }}</div>
<ul class="context__menu">
<li>
<a
@click="
;[
$refs.settingsModal.show('password'),
$refs.userContext.hide(),
]
"
>
<i class="context__menu-icon fas fa-fw fa-cogs"></i>
Settings
</a>
<SettingsModal ref="settingsModal"></SettingsModal>
</li>
<li>
<a @click="logoff()">
<i class="context__menu-icon fas fa-fw fa-sign-out-alt"></i>
Logoff
</a>
</li>
</ul>
</Context>
<div class="sidebar__nav">
<ul class="tree">
<li
class="tree__item"
:class="{
active: $route.matched.some(({ name }) => name === 'dashboard'),
}"
>
<div class="tree__action sidebar__action">
<nuxt-link :to="{ name: 'dashboard' }" class="tree__link">
<i class="tree__icon fas fa-tachometer-alt"></i>
<span class="sidebar__item-name">Dashboard</span>
</nuxt-link>
</div>
</li>
<li v-if="isStaff" class="tree__item">
<div
class="tree__action sidebar__action"
:class="{ 'tree__action--disabled': isAdminPage }"
>
<a class="tree__link" @click.prevent="admin()">
<i class="tree__icon fas fa-users-cog"></i>
<span class="sidebar__item-name">Admin</span>
</a>
</div>
<ul v-show="isAdminPage" class="tree sidebar__tree">
<li
v-for="adminType in adminTypes"
:key="adminType.type"
class="tree__item"
:class="{
active: $route.matched.some(
({ name }) => name === adminType.routeName
),
}"
>
<div class="tree__action sidebar__action">
<nuxt-link
:to="{ name: adminType.routeName }"
class="tree__link"
>
<i
class="tree__icon fas"
:class="'fa-' + adminType.iconClass"
></i>
<span class="sidebar__item-name">{{ adminType.name }}</span>
</nuxt-link>
</div>
</li>
</ul>
</li>
<template v-if="hasSelectedGroup && !isCollapsed">
<li class="tree__item margin-top-2">
<div class="tree__action">
<a
ref="groupSelectToggle"
class="tree__link tree__link--group"
@click="
$refs.groupSelect.toggle(
$refs.groupSelectToggle,
'bottom',
'left',
0
)
"
>{{ selectedGroup.name }}</a
>
<GroupsContext ref="groupSelect"></GroupsContext>
</div>
</li>
<li class="tree__item">
<div class="tree__action">
<a class="tree__link" @click="$refs.groupMembersModal.show()">
<i class="tree__icon tree__icon--type fas fa-users"></i>
Invite others
</a>
<GroupMembersModal
ref="groupMembersModal"
:group="selectedGroup"
></GroupMembersModal>
</div>
</li>
<ul class="tree">
<SidebarApplication
v-for="application in applications"
:key="application.id"
:application="application"
></SidebarApplication>
</ul>
<li class="sidebar__new-wrapper">
<a
ref="createApplicationContextLink"
class="sidebar__new"
@click="
$refs.createApplicationContext.toggle(
$refs.createApplicationContextLink
)
"
>
<i class="fas fa-plus"></i>
Create new
</a>
</li>
<CreateApplicationContext
ref="createApplicationContext"
:group="selectedGroup"
></CreateApplicationContext>
</template>
<template v-else-if="!hasSelectedGroup && !isCollapsed">
<li v-if="groups.length === 0" class="tree_item margin-top-2">
<p>You dont have any groups.</p>
</li>
<li
v-for="(group, index) in groups"
:key="group.id"
class="tree__item"
:class="{ 'margin-top-2': index === 0 }"
>
<div class="tree__action tree__action--has-right-icon">
<a
class="tree__link tree__link--group"
@click="$store.dispatch('group/select', group)"
>{{ group.name }}</a
>
<i class="tree__right-icon fas fa-arrow-right"></i>
</div>
</li>
<li class="sidebar__new-wrapper">
<a class="sidebar__new" @click="$refs.createGroupModal.show()">
<i class="fas fa-plus"></i>
Create group
</a>
</li>
<CreateGroupModal ref="createGroupModal"></CreateGroupModal>
</template>
</ul>
</div>
<div class="sidebar__foot">
<div class="sidebar__logo">
<img
height="14"
src="@baserow/modules/core/static/img/logo.svg"
alt="Baserow logo"
/>
</div>
<a
class="sidebar__collapse-link"
@click="$store.dispatch('sidebar/toggleCollapsed')"
>
<i
class="fas"
:class="{
'fa-angle-double-right': isCollapsed,
'fa-angle-double-left': !isCollapsed,
}"
></i>
</a>
</div>
</div>
</div>
</template>
@ -32,14 +217,22 @@
<script>
import { mapGetters, mapState } from 'vuex'
import SettingsModal from '@baserow/modules/core/components/settings/SettingsModal'
import SidebarApplication from '@baserow/modules/core/components/sidebar/SidebarApplication'
import CreateApplicationContext from '@baserow/modules/core/components/application/CreateApplicationContext'
import GroupsContext from '@baserow/modules/core/components/group/GroupsContext'
import CreateGroupModal from '@baserow/modules/core/components/group/CreateGroupModal'
import GroupMembersModal from '@baserow/modules/core/components/group/GroupMembersModal'
export default {
name: 'Sidebar',
components: {
SettingsModal,
CreateApplicationContext,
SidebarApplication,
GroupsContext,
CreateGroupModal,
GroupMembersModal,
},
computed: {
/**
@ -51,14 +244,55 @@ export default {
this.selectedGroup
)
},
adminTypes() {
return this.$registry.getAll('admin')
},
/**
* Indicates whether the current user is visiting an admin page.
*/
isAdminPage() {
return Object.values(this.adminTypes).some((adminType) => {
return this.$route.matched.some(
({ name }) => name === adminType.routeName
)
})
},
...mapState({
allApplications: (state) => state.application.items,
groups: (state) => state.group.items,
selectedGroup: (state) => state.group.selected,
}),
...mapGetters({
isLoading: 'application/isLoading',
isStaff: 'auth/isStaff',
name: 'auth/getName',
email: 'auth/getUsername',
hasSelectedGroup: 'group/hasSelected',
isCollapsed: 'sidebar/isCollapsed',
}),
},
methods: {
logoff() {
this.$store.dispatch('auth/logoff')
this.$nuxt.$router.push({ name: 'login' })
},
/**
* Called when the user clicks on the admin menu. Because there isn't an
* admin page it will navigate to the route of the first registered admin
* type.
*/
admin() {
// If the user is already on an admin page we don't have to do anything because
// the link is disabled.
if (this.isAdminPage) {
return
}
const types = Object.values(this.adminTypes)
if (types.length > 0) {
this.$nuxt.$router.push({ name: types[0].routeName })
}
},
},
}
</script>

Some files were not shown because too many files have changed in this diff Show more