1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-13 16:49:07 +00:00

Merge branch 'develop' of gitlab.com:bramw/baserow

This commit is contained in:
Bram Wiepjes 2020-12-01 14:45:35 +01:00
commit d4b5df82b4
161 changed files with 5480 additions and 588 deletions
README.md
backend
Dockerfile.demoMakefile
requirements
setup.py
src/baserow
tests
changelog.md
docs
assets
getting-started
guides/installation
plugin-boilerplate/{{ cookiecutter.project_slug }}

View file

@ -113,3 +113,5 @@ The official repository can be found at https://gitlab.com/bramw/baserow.
The changelog can be found [here](./changelog.md).
Become a GitHub Sponsor [here](https://github.com/sponsors/bram2w).
Community chat via https://gitter.im/bramw-baserow/community.

View file

@ -1,7 +1,7 @@
FROM python:3.6
ADD . /backend
RUN mkdir -p /media
WORKDIR /backend
ENV PYTHONPATH $PYTHONPATH:/backend/src

View file

@ -6,7 +6,7 @@ install-dev-dependencies:
pip install -r requirements/dev.txt
lint:
flake8 src/baserow || exit;
flake8 src tests || exit;
test:
pytest tests || exit;

View file

@ -8,6 +8,7 @@ ipython==7.13.0
Faker==4.0.2
gunicorn==20.0.4
django-mjml==0.9.0
requests==2.23.0
requests==2.25.0
itsdangerous==1.1.0
drf-spectacular==0.9.12
Pillow==8.0.1

View file

@ -2,3 +2,4 @@ flake8==3.7.9
pytest-django>=3.5.0
pytest-env==0.6.2
freezegun==0.3.15
responses==0.12.0

View file

@ -29,7 +29,7 @@ setup(
author='Bram Wiepjes (Baserow)',
author_email='bram@baserow.io',
license='MIT',
description='Baserow: open source online database web frontend.',
description='Baserow: open source online database backend.',
long_description='',
platforms=['linux'],
package_dir={'': 'src'},

View file

@ -5,6 +5,7 @@ from drf_spectacular.views import SpectacularJSONAPIView, SpectacularRedocView
from baserow.core.registries import plugin_registry, application_type_registry
from .user import urls as user_urls
from .user_files import urls as user_files_urls
from .groups import urls as group_urls
from .applications import urls as application_urls
@ -19,6 +20,7 @@ urlpatterns = [
name='redoc'
),
path('user/', include(user_urls, namespace='user')),
path('user-files/', include(user_files_urls, namespace='user_files')),
path('groups/', include(group_urls, namespace='groups')),
path('applications/', include(application_urls, namespace='applications'))
] + application_type_registry.api_urls + plugin_registry.api_urls

View file

@ -4,8 +4,8 @@ from rest_framework.status import HTTP_400_BAD_REQUEST
ERROR_ALREADY_EXISTS = 'ERROR_EMAIL_ALREADY_EXISTS'
ERROR_USER_NOT_FOUND = 'ERROR_USER_NOT_FOUND'
ERROR_INVALID_OLD_PASSWORD = 'ERROR_INVALID_OLD_PASSWORD'
ERROR_DOMAIN_URL_IS_NOT_ALLOWED = (
'ERROR_DOMAIN_URL_IS_NOT_ALLOWED',
ERROR_HOSTNAME_IS_NOT_ALLOWED = (
'ERROR_HOSTNAME_IS_NOT_ALLOWED',
HTTP_400_BAD_REQUEST,
'Only the domain of the web frontend is allowed.'
'Only the hostname of the web frontend is allowed.'
)

View file

@ -20,7 +20,7 @@ from baserow.api.errors import BAD_TOKEN_SIGNATURE, EXPIRED_TOKEN_SIGNATURE
from baserow.api.schemas import get_error_schema
from baserow.core.user.handler import UserHandler
from baserow.core.user.exceptions import (
UserAlreadyExist, UserNotFound, InvalidPassword, BaseURLDomainNotAllowed
UserAlreadyExist, UserNotFound, InvalidPassword, BaseURLHostnameNotAllowed
)
from .serializers import (
@ -30,7 +30,7 @@ from .serializers import (
)
from .errors import (
ERROR_ALREADY_EXISTS, ERROR_USER_NOT_FOUND, ERROR_INVALID_OLD_PASSWORD,
ERROR_DOMAIN_URL_IS_NOT_ALLOWED
ERROR_HOSTNAME_IS_NOT_ALLOWED
)
from .schemas import create_user_response_schema, authenticate_user_schema
@ -171,7 +171,7 @@ class SendResetPasswordEmailView(APIView):
204: None,
400: get_error_schema([
'ERROR_REQUEST_BODY_VALIDATION',
'ERROR_DOMAIN_URL_IS_NOT_ALLOWED'
'ERROR_HOSTNAME_IS_NOT_ALLOWED'
])
},
auth=[None],
@ -179,7 +179,7 @@ class SendResetPasswordEmailView(APIView):
@transaction.atomic
@validate_body(SendResetPasswordEmailBodyValidationSerializer)
@map_exceptions({
BaseURLDomainNotAllowed: ERROR_DOMAIN_URL_IS_NOT_ALLOWED
BaseURLHostnameNotAllowed: ERROR_HOSTNAME_IS_NOT_ALLOWED
})
def post(self, request, data):
"""

View file

@ -0,0 +1,30 @@
from rest_framework.status import (
HTTP_400_BAD_REQUEST, HTTP_413_REQUEST_ENTITY_TOO_LARGE
)
ERROR_INVALID_FILE = (
'ERROR_INVALID_FILE',
HTTP_400_BAD_REQUEST,
'No file has been provided or the file is invalid.'
)
ERROR_FILE_SIZE_TOO_LARGE = (
'ERROR_FILE_SIZE_TOO_LARGE',
HTTP_413_REQUEST_ENTITY_TOO_LARGE,
'The provided file is too large. Max {e.max_size_mb}MB is allowed.'
)
ERROR_FILE_URL_COULD_NOT_BE_REACHED = (
'ERROR_FILE_URL_COULD_NOT_BE_REACHED',
HTTP_400_BAD_REQUEST,
'The provided URL could not be reached.'
)
ERROR_INVALID_USER_FILE_NAME_ERROR = (
'ERROR_INVALID_USER_FILE_NAME_ERROR',
HTTP_400_BAD_REQUEST,
'The user file name {e.name} is invalid.'
)
ERROR_USER_FILE_DOES_NOT_EXIST = (
'ERROR_USER_FILE_DOES_NOT_EXIST',
HTTP_400_BAD_REQUEST,
'The user file {e.name_or_id} does not exist.'
)

View file

@ -0,0 +1,63 @@
from rest_framework import serializers
from drf_spectacular.utils import extend_schema_field
from drf_spectacular.types import OpenApiTypes
from django.conf import settings
from django.core.files.storage import default_storage
from baserow.core.models import UserFile
from baserow.core.user_files.handler import UserFileHandler
class UserFileUploadViaURLRequestSerializer(serializers.Serializer):
url = serializers.URLField()
class UserFileURLAndThumbnailsSerializerMixin(serializers.Serializer):
url = serializers.SerializerMethodField()
thumbnails = serializers.SerializerMethodField()
def get_instance_attr(self, instance, name):
return getattr(instance, name)
@extend_schema_field(OpenApiTypes.URI)
def get_url(self, instance):
name = self.get_instance_attr(instance, 'name')
path = UserFileHandler().user_file_path(name)
url = default_storage.url(path)
return url
def get_thumbnails(self, instance):
if not self.get_instance_attr(instance, 'is_image'):
return None
name = self.get_instance_attr(instance, 'name')
return {
thumbnail_name: {
'url': default_storage.url(
UserFileHandler().user_file_thumbnail_path(
name,
thumbnail_name
)
),
'width': size[0],
'height': size[1]
}
for thumbnail_name, size in settings.USER_THUMBNAILS.items()
}
class UserFileSerializer(UserFileURLAndThumbnailsSerializerMixin,
serializers.ModelSerializer):
name = serializers.SerializerMethodField()
class Meta:
model = UserFile
fields = ('size', 'mime_type', 'is_image', 'image_width', 'image_height',
'uploaded_at', 'url', 'thumbnails', 'name', 'original_name')
@extend_schema_field(OpenApiTypes.STR)
def get_name(self, instance):
return instance.name

View file

@ -0,0 +1,13 @@
from django.conf.urls import url
from .views import (
UploadFileView, UploadViaURLView
)
app_name = 'baserow.api.user'
urlpatterns = [
url(r'^upload-file/$', UploadFileView.as_view(), name='upload_file'),
url(r'^upload-via-url/$', UploadViaURLView.as_view(), name='upload_via_url'),
]

View file

@ -0,0 +1,11 @@
from rest_framework.exceptions import ValidationError
from baserow.core.user_files.models import UserFile
from baserow.core.user_files.exceptions import InvalidUserFileNameError
def user_file_name_validator(value):
try:
UserFile.deconstruct_name(value)
except InvalidUserFileNameError:
raise ValidationError('The user file name is invalid.', code='invalid')

View file

@ -0,0 +1,91 @@
from django.db import transaction
from rest_framework.parsers import MultiPartParser
from drf_spectacular.utils import extend_schema
from drf_spectacular.plumbing import build_object_type
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from baserow.api.decorators import map_exceptions, validate_body
from baserow.api.schemas import get_error_schema
from baserow.core.user_files.exceptions import (
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached
)
from baserow.core.user_files.handler import UserFileHandler
from .serializers import UserFileSerializer, UserFileUploadViaURLRequestSerializer
from .errors import (
ERROR_INVALID_FILE, ERROR_FILE_SIZE_TOO_LARGE, ERROR_FILE_URL_COULD_NOT_BE_REACHED
)
class UploadFileView(APIView):
permission_classes = (IsAuthenticated,)
parser_classes = (MultiPartParser,)
@extend_schema(
tags=['User files'],
operation_id='upload_file',
description=(
'Uploads a file to Baserow by uploading the file contents directly. A '
'`file` multipart is expected containing the file contents.'
),
request=build_object_type(),
responses={
200: UserFileSerializer,
400: get_error_schema(['ERROR_INVALID_FILE', 'ERROR_FILE_SIZE_TOO_LARGE'])
}
)
@transaction.atomic
@map_exceptions({
InvalidFileStreamError: ERROR_INVALID_FILE,
FileSizeTooLargeError: ERROR_FILE_SIZE_TOO_LARGE
})
def post(self, request):
"""Uploads a file by uploading the contents directly."""
if 'file' not in request.FILES:
raise InvalidFileStreamError('No file was provided.')
file = request.FILES.get('file')
user_file = UserFileHandler().upload_user_file(request.user, file.name, file)
serializer = UserFileSerializer(user_file)
return Response(serializer.data)
class UploadViaURLView(APIView):
permission_classes = (IsAuthenticated,)
@extend_schema(
tags=['User files'],
operation_id='upload_via_url',
description=(
'Uploads a file to Baserow by downloading it from the provided URL.'
),
request=UserFileUploadViaURLRequestSerializer,
responses={
200: UserFileSerializer,
400: get_error_schema([
'ERROR_INVALID_FILE',
'ERROR_FILE_SIZE_TOO_LARGE',
'ERROR_FILE_URL_COULD_NOT_BE_REACHED'
])
}
)
@transaction.atomic
@map_exceptions({
InvalidFileStreamError: ERROR_INVALID_FILE,
FileSizeTooLargeError: ERROR_FILE_SIZE_TOO_LARGE,
FileURLCouldNotBeReached: ERROR_FILE_URL_COULD_NOT_BE_REACHED
})
@validate_body(UserFileUploadViaURLRequestSerializer)
def post(self, request, data):
"""Uploads a user file by downloading it from the provided URL."""
url = data['url']
user_file = UserFileHandler().upload_user_file_by_url(request.user, url)
serializer = UserFileSerializer(user_file)
return Response(serializer.data)

View file

@ -53,7 +53,7 @@ def map_exceptions(mapping):
if len(value) > 1 and value[1] is not None:
status_code = value[1]
if len(value) > 2 and value[2] is not None:
detail = value[2]
detail = value[2].format(e=e)
exc = APIException({
'error': error,
@ -78,26 +78,26 @@ def validate_data(serializer_class, data):
:rtype: dict
"""
def add(details, key, error_list):
if 'key' not in details:
details[key] = []
for error in error_list:
details[key].append({
def serialize_errors_recursive(error):
if isinstance(error, dict):
return {
key: serialize_errors_recursive(errors)
for key, errors in error.items()
}
elif isinstance(error, list):
return [
serialize_errors_recursive(errors)
for errors in error
]
else:
return {
'error': force_text(error),
'code': error.code
})
}
serializer = serializer_class(data=data)
if not serializer.is_valid():
detail = {}
for key, errors in serializer.errors.items():
if isinstance(errors, dict):
detail[key] = {}
for group_key, group_errors in errors.items():
add(detail[key], group_key, group_errors)
else:
add(detail, key, errors)
detail = serialize_errors_recursive(serializer.errors)
raise RequestBodyValidationException(detail)
return serializer.data

View file

@ -1,5 +1,6 @@
import os
import datetime
from urllib.parse import urlparse, urljoin
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@ -10,7 +11,7 @@ SECRET_KEY = os.getenv('SECRET_KEY', 'CHANGE_THIS_TO_SOMETHING_SECRET_IN_PRODUCT
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['localhost', 'backend', 'sandbox']
ALLOWED_HOSTS = ['localhost']
INSTALLED_APPS = [
'django.contrib.auth',
@ -156,6 +157,7 @@ SPECTACULAR_SETTINGS = {
'SERVE_INCLUDE_SCHEMA': False,
'TAGS': [
{'name': 'User'},
{'name': 'User files'},
{'name': 'Groups'},
{'name': 'Applications'},
{'name': 'Database tables'},
@ -171,18 +173,26 @@ SPECTACULAR_SETTINGS = {
DATABASE_ROUTERS = ('baserow.contrib.database.database_routers.TablesDatabaseRouter',)
# The storage must always overwrite existing files.
DEFAULT_FILE_STORAGE = 'baserow.core.storage.OverwriteFileSystemStorage'
MJML_BACKEND_MODE = 'tcpserver'
MJML_TCPSERVERS = [
(os.getenv('MJML_SERVER_HOST', 'mjml'), os.getenv('MJML_SERVER_PORT', 28101)),
(os.getenv('MJML_SERVER_HOST', 'mjml'), int(os.getenv('MJML_SERVER_PORT', 28101))),
]
PUBLIC_BACKEND_DOMAIN = os.getenv('PUBLIC_BACKEND_DOMAIN', 'localhost:8000')
PUBLIC_BACKEND_URL = os.getenv('PUBLIC_BACKEND_URL', 'http://localhost:8000')
PUBLIC_WEB_FRONTEND_DOMAIN = os.getenv('PUBLIC_WEB_FRONTEND_DOMAIN', 'localhost:3000')
PUBLIC_WEB_FRONTEND_URL = os.getenv('PUBLIC_WEB_FRONTEND_URL', 'http://localhost:3000')
PRIVATE_BACKEND_URL = os.getenv('PRIVATE_BACKEND_URL', 'http://backend:8000')
PUBLIC_BACKEND_HOSTNAME = urlparse(PUBLIC_BACKEND_URL).hostname
PUBLIC_WEB_FRONTEND_HOSTNAME = urlparse(PUBLIC_WEB_FRONTEND_URL).hostname
PRIVATE_BACKEND_HOSTNAME = urlparse(PRIVATE_BACKEND_URL).hostname
if PUBLIC_BACKEND_DOMAIN:
ALLOWED_HOSTS.append(PUBLIC_BACKEND_DOMAIN)
if PUBLIC_BACKEND_HOSTNAME:
ALLOWED_HOSTS.append(PUBLIC_BACKEND_HOSTNAME)
if PRIVATE_BACKEND_HOSTNAME:
ALLOWED_HOSTS.append(PRIVATE_BACKEND_HOSTNAME)
FROM_EMAIL = os.getenv('FROM_EMAIL', 'no-reply@localhost')
RESET_PASSWORD_TOKEN_MAX_AGE = 60 * 60 * 48 # 48 hours
@ -192,3 +202,19 @@ ROW_PAGE_SIZE_LIMIT = 200 # Indicates how many rows can be requested at once.
INITIAL_TABLE_DATA_LIMIT = None
if 'INITIAL_TABLE_DATA_LIMIT' in os.environ:
INITIAL_TABLE_DATA_LIMIT = int(os.getenv('INITIAL_TABLE_DATA_LIMIT'))
MEDIA_URL_PATH = '/media/'
MEDIA_URL = os.getenv('MEDIA_URL', urljoin(PUBLIC_BACKEND_URL, MEDIA_URL_PATH))
MEDIA_ROOT = os.getenv('MEDIA_ROOT', '/media')
# Indicates the directory where the user files and user thumbnails are stored.
USER_FILES_DIRECTORY = 'user_files'
USER_THUMBNAILS_DIRECTORY = 'thumbnails'
USER_FILE_SIZE_LIMIT = 1024 * 1024 * 20 # 20MB
# Configurable thumbnails that are going to be generated when a user uploads an image
# file.
USER_THUMBNAILS = {
'tiny': [None, 21],
'small': [48, 48]
}

View file

@ -1,4 +1,5 @@
from .base import * # noqa: F403, F401
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'

View file

@ -1 +1,6 @@
from .base import * # noqa: F403, F401
USER_FILES_DIRECTORY = 'user_files'
USER_THUMBNAILS_DIRECTORY = 'thumbnails'
USER_THUMBNAILS = {'tiny': [21, 21]}

View file

@ -1,6 +1,8 @@
from django.urls import include
from django.conf.urls import url
from django.http import HttpResponse
from django.conf import settings
from django.conf.urls.static import static
from baserow.core.registries import plugin_registry
@ -12,4 +14,7 @@ def health(request):
urlpatterns = [
url(r'^api/', include('baserow.api.urls', namespace='api')),
url(r'^_health$', health, name='health_check')
] + plugin_registry.urls
] + plugin_registry.urls + static(
settings.MEDIA_URL_PATH,
document_root=settings.MEDIA_ROOT
)

View file

@ -19,3 +19,19 @@ ERROR_FIELD_NOT_IN_TABLE = (
HTTP_400_BAD_REQUEST,
'The provided field does not belong in the related table.'
)
ERROR_ORDER_BY_FIELD_NOT_FOUND = (
'ERROR_ORDER_BY_FIELD_NOT_FOUND',
HTTP_400_BAD_REQUEST,
'The field {e.field_name} was not found in the table.'
)
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE = (
'ERROR_ORDER_BY_FIELD_NOT_POSSIBLE',
HTTP_400_BAD_REQUEST,
'It is not possible to order by {e.field_name} because the field type '
'{e.field_type} does not support filtering.'
)
ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE = (
'ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE',
HTTP_400_BAD_REQUEST,
'The field type {e.field_type} is not compatible with the primary field.'
)

View file

@ -5,6 +5,8 @@ from drf_spectacular.types import OpenApiTypes
from rest_framework import serializers
from baserow.api.user_files.validators import user_file_name_validator
from baserow.api.user_files.serializers import UserFileURLAndThumbnailsSerializerMixin
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.fields.models import Field
@ -71,3 +73,30 @@ class LinkRowValueSerializer(serializers.Serializer):
source=value_field_name,
required=False
)
class FileFieldRequestSerializer(serializers.Serializer):
visible_name = serializers.CharField(
required=False,
help_text='A visually editable name for the field.'
)
name = serializers.CharField(
required=True,
validators=[user_file_name_validator],
help_text='Accepts the name of the already uploaded user file.'
)
class FileFieldResponseSerializer(UserFileURLAndThumbnailsSerializerMixin,
serializers.Serializer):
visible_name = serializers.CharField()
name = serializers.CharField()
size = serializers.IntegerField()
mime_type = serializers.CharField()
is_image = serializers.BooleanField()
image_width = serializers.IntegerField()
image_height = serializers.IntegerField()
uploaded_at = serializers.DateTimeField()
def get_instance_attr(self, instance, name):
return instance[name]

View file

@ -13,7 +13,9 @@ from baserow.api.decorators import map_exceptions
from baserow.api.pagination import PageNumberPagination
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
from baserow.api.schemas import get_error_schema
from baserow.api.user_files.errors import ERROR_USER_FILE_DOES_NOT_EXIST
from baserow.core.exceptions import UserNotInGroupError
from baserow.core.user_files.exceptions import UserFileDoesNotExist
from baserow.contrib.database.api.tokens.authentications import TokenAuthentication
from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIST
from baserow.contrib.database.api.rows.errors import ERROR_ROW_DOES_NOT_EXIST
@ -21,6 +23,12 @@ from baserow.contrib.database.api.rows.serializers import (
example_pagination_row_serializer_class
)
from baserow.contrib.database.api.tokens.errors import ERROR_NO_PERMISSION_TO_TABLE
from baserow.contrib.database.api.fields.errors import (
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE, ERROR_ORDER_BY_FIELD_NOT_FOUND
)
from baserow.contrib.database.fields.exceptions import (
OrderByFieldNotFound, OrderByFieldNotPossible
)
from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.table.exceptions import TableDoesNotExist
from baserow.contrib.database.rows.handler import RowHandler
@ -64,6 +72,15 @@ class RowsView(APIView):
type=OpenApiTypes.STR,
description='If provided only rows with data that matches the search '
'query are going to be returned.'
),
OpenApiParameter(
name='order_by',
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
description='Optionally the rows can be ordered by provided field ids '
'separated by comma. By default a field is ordered in '
'ascending (A-Z) order, but by prepending the field with '
'a \'-\' it can be ordered descending (Z-A). '
)
],
tags=['Database table rows'],
@ -86,7 +103,9 @@ class RowsView(APIView):
'ERROR_USER_NOT_IN_GROUP',
'ERROR_REQUEST_BODY_VALIDATION',
'ERROR_PAGE_SIZE_LIMIT',
'ERROR_INVALID_PAGE'
'ERROR_INVALID_PAGE',
'ERROR_ORDER_BY_FIELD_NOT_FOUND',
'ERROR_ORDER_BY_FIELD_NOT_POSSIBLE'
]),
401: get_error_schema(['ERROR_NO_PERMISSION_TO_TABLE']),
404: get_error_schema(['ERROR_TABLE_DOES_NOT_EXIST'])
@ -95,7 +114,9 @@ class RowsView(APIView):
@map_exceptions({
UserNotInGroupError: ERROR_USER_NOT_IN_GROUP,
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
OrderByFieldNotFound: ERROR_ORDER_BY_FIELD_NOT_FOUND,
OrderByFieldNotPossible: ERROR_ORDER_BY_FIELD_NOT_POSSIBLE
})
def get(self, request, table_id):
"""
@ -108,12 +129,16 @@ class RowsView(APIView):
model = table.get_model()
search = request.GET.get('search')
order_by = request.GET.get('order_by')
queryset = model.objects.all().enhance_by_fields().order_by('id')
if search:
queryset = queryset.search_all_fields(search)
if order_by:
queryset = queryset.order_by_fields_string(order_by)
paginator = PageNumberPagination(limit_page_size=settings.ROW_PAGE_SIZE_LIMIT)
page = paginator.paginate_queryset(queryset, request, self)
serializer_class = get_row_serializer_class(model, RowSerializer,
@ -162,7 +187,8 @@ class RowsView(APIView):
@map_exceptions({
UserNotInGroupError: ERROR_USER_NOT_IN_GROUP,
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST
})
def post(self, request, table_id):
"""
@ -293,7 +319,8 @@ class RowView(APIView):
UserNotInGroupError: ERROR_USER_NOT_IN_GROUP,
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST
})
def patch(self, request, table_id, row_id):
"""

View file

@ -195,6 +195,7 @@ class TableView(APIView):
table = TableHandler().update_table(
request.user,
TableHandler().get_table(request.user, table_id),
base_queryset=Table.objects.select_for_update(),
name=data['name']
)
serializer = TableSerializer(table)

View file

@ -154,7 +154,11 @@ class TokenView(APIView):
def patch(self, request, data, token_id):
"""Updates the values of a token."""
token = TokenHandler().get_token(request.user, token_id)
token = TokenHandler().get_token(
request.user,
token_id,
base_queryset=Token.objects.select_for_update()
)
permissions = data.pop('permissions', None)
rotate_key = data.pop('rotate_key', False)

View file

@ -91,10 +91,11 @@ class GridViewFieldOptionsFieldFix(OpenApiSerializerFieldExtension):
class GridViewSerializer(serializers.ModelSerializer):
field_options = GridViewFieldOptionsField(required=False)
filters_disabled = serializers.BooleanField(required=False)
class Meta:
model = GridView
fields = ('field_options',)
fields = ('field_options', 'filters_disabled')
class GridViewFieldOptionsSerializer(serializers.ModelSerializer):

View file

@ -92,7 +92,7 @@ class ViewSerializer(serializers.ModelSerializer):
class Meta:
model = View
fields = ('id', 'name', 'order', 'type', 'table', 'filter_type', 'filters',
'sortings')
'sortings', 'filters_disabled')
extra_kwargs = {
'id': {
'read_only': True
@ -128,14 +128,15 @@ class CreateViewSerializer(serializers.ModelSerializer):
class Meta:
model = View
fields = ('name', 'type', 'filter_type')
fields = ('name', 'type', 'filter_type', 'filters_disabled')
class UpdateViewSerializer(serializers.ModelSerializer):
class Meta:
model = View
fields = ('name', 'filter_type')
fields = ('name', 'filter_type', 'filters_disabled')
extra_kwargs = {
'name': {'required': False},
'filter_type': {'required': False}
'filter_type': {'required': False},
'filters_disabled': {'required': False},
}

View file

@ -475,7 +475,11 @@ class ViewFilterView(APIView):
"""Updates the view filter if the user belongs to the group."""
handler = ViewHandler()
view_filter = handler.get_filter(request.user, view_filter_id)
view_filter = handler.get_filter(
request.user,
view_filter_id,
base_queryset=ViewFilter.objects.select_for_update()
)
if 'field' in data:
# We can safely assume the field exists because the
@ -695,7 +699,11 @@ class ViewSortView(APIView):
"""Updates the view sort if the user belongs to the group."""
handler = ViewHandler()
view_sort = handler.get_sort(request.user, view_sort_id)
view_sort = handler.get_sort(
request.user,
view_sort_id,
base_queryset=ViewSort.objects.select_for_update()
)
if 'field' in data:
# We can safely assume the field exists because the

View file

@ -45,7 +45,8 @@ class DatabaseConfig(AppConfig):
from .fields.field_types import (
TextFieldType, LongTextFieldType, URLFieldType, NumberFieldType,
BooleanFieldType, DateFieldType, LinkRowFieldType, EmailFieldType
BooleanFieldType, DateFieldType, LinkRowFieldType, EmailFieldType,
FileFieldType
)
field_type_registry.register(TextFieldType())
field_type_registry.register(LongTextFieldType())
@ -55,9 +56,11 @@ class DatabaseConfig(AppConfig):
field_type_registry.register(BooleanFieldType())
field_type_registry.register(DateFieldType())
field_type_registry.register(LinkRowFieldType())
field_type_registry.register(FileFieldType())
from .fields.field_converters import LinkRowFieldConverter
from .fields.field_converters import LinkRowFieldConverter, FileFieldConverter
field_converter_registry.register(LinkRowFieldConverter())
field_converter_registry.register(FileFieldConverter())
from .views.view_types import GridViewType
view_type_registry.register(GridViewType())

View file

@ -42,3 +42,28 @@ class LinkRowTableNotInSameDatabase(Exception):
"""
Raised when the desired link row table is not in the same database as the table.
"""
class OrderByFieldNotFound(Exception):
"""Raised when the field was not found in the table."""
def __init__(self, field_name=None, *args, **kwargs):
self.field_name = field_name
super().__init__(*args, **kwargs)
class OrderByFieldNotPossible(Exception):
"""Raised when it is not possible to order by a field."""
def __init__(self, field_name=None, field_type=None, *args, **kwargs):
self.field_name = field_name
self.field_type = field_type
super().__init__(*args, **kwargs)
class IncompatiblePrimaryFieldTypeError(Exception):
"""Raised when the primary field is changed to an incompatible field type."""
def __init__(self, field_type=None, *args, **kwargs):
self.field_type = field_type
super().__init__(*args, **kwargs)

View file

@ -1,5 +1,5 @@
from .registries import FieldConverter
from .models import LinkRowField
from .models import LinkRowField, FileField
class RecreateFieldConverter(FieldConverter):
@ -35,3 +35,18 @@ class LinkRowFieldConverter(RecreateFieldConverter):
from_field.link_row_table_id != to_field.link_row_table_id
)
)
class FileFieldConverter(RecreateFieldConverter):
type = 'file'
def is_applicable(self, from_model, from_field, to_field):
return (
(
isinstance(from_field, FileField) and
not isinstance(to_field, FileField)
) or (
not isinstance(from_field, FileField) and
isinstance(to_field, FileField)
)
)

View file

@ -6,24 +6,33 @@ from dateutil.parser import ParserError
from datetime import datetime, date
from django.db import models
from django.contrib.postgres.fields import JSONField
from django.core.validators import URLValidator, EmailValidator
from django.core.exceptions import ValidationError
from django.utils.timezone import make_aware
from rest_framework import serializers
from baserow.contrib.database.api.fields.serializers import LinkRowValueSerializer
from baserow.core.models import UserFile
from baserow.core.user_files.exceptions import UserFileDoesNotExist
from baserow.contrib.database.api.fields.serializers import (
LinkRowValueSerializer, FileFieldRequestSerializer, FileFieldResponseSerializer
)
from baserow.contrib.database.api.fields.errors import (
ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE, ERROR_LINK_ROW_TABLE_NOT_PROVIDED
ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE, ERROR_LINK_ROW_TABLE_NOT_PROVIDED,
ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE
)
from .handler import FieldHandler
from .registries import FieldType
from .models import (
NUMBER_TYPE_INTEGER, NUMBER_TYPE_DECIMAL, TextField, LongTextField, URLField,
NumberField, BooleanField, DateField, LinkRowField, EmailField
NumberField, BooleanField, DateField, LinkRowField, EmailField, FileField
)
from .exceptions import (
LinkRowTableNotInSameDatabase, LinkRowTableNotProvided,
IncompatiblePrimaryFieldTypeError
)
from .exceptions import LinkRowTableNotInSameDatabase, LinkRowTableNotProvided
class TextFieldType(FieldType):
@ -287,9 +296,11 @@ class LinkRowFieldType(FieldType):
}
api_exceptions_map = {
LinkRowTableNotProvided: ERROR_LINK_ROW_TABLE_NOT_PROVIDED,
LinkRowTableNotInSameDatabase: ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE
LinkRowTableNotInSameDatabase: ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE,
IncompatiblePrimaryFieldTypeError: ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE
}
can_sort_in_view = False
can_order_by = False
can_be_primary_field = False
def enhance_queryset(self, queryset, field, name):
"""
@ -587,3 +598,101 @@ class EmailFieldType(FieldType):
)"""
return super().get_alter_column_type_function(connection, instance)
class FileFieldType(FieldType):
type = 'file'
model_class = FileField
def prepare_value_for_db(self, instance, value):
if value is None:
return []
if not isinstance(value, list):
raise ValidationError('The provided value must be a list.')
if len(value) == 0:
return []
# Validates the provided object and extract the names from it. We need the name
# to validate if the file actually exists and to get the 'real' properties
# from it.
provided_files = []
for o in value:
if not isinstance(o, object) or not isinstance(o.get('name'), str):
raise ValidationError('Every provided value must at least contain '
'the file name as `name`.')
if 'visible_name' in o and not isinstance(o['visible_name'], str):
raise ValidationError('The provided `visible_name` must be a string.')
provided_files.append(o)
# Create a list of the serialized UserFiles in the originally provided order
# because that is also the order we need to store the serialized versions in.
user_files = []
queryset = UserFile.objects.all().name(*[f['name'] for f in provided_files])
for file in provided_files:
try:
user_file = next(
user_file
for user_file in queryset
if user_file.name == file['name']
)
serialized = user_file.serialize()
serialized['visible_name'] = (
file.get('visible_name') or user_file.original_name
)
except StopIteration:
raise UserFileDoesNotExist(
file['name'],
f"The provided file {file['name']} does not exist."
)
user_files.append(serialized)
return user_files
def get_serializer_field(self, instance, **kwargs):
return serializers.ListSerializer(
child=FileFieldRequestSerializer(),
required=False,
allow_null=True,
**kwargs
)
def get_response_serializer_field(self, instance, **kwargs):
return FileFieldResponseSerializer(many=True, required=False, **kwargs)
def get_serializer_help_text(self, instance):
return 'This field accepts an `array` containing objects with the name of ' \
'the file. The response contains an `array` of more detailed objects ' \
'related to the files.'
def get_model_field(self, instance, **kwargs):
return JSONField(default=[], **kwargs)
def random_value(self, instance, fake, cache):
"""
Selects between 0 and 3 random user files and returns those serialized in a
list.
"""
count_name = f'field_{instance.id}_count'
if count_name not in cache:
cache[count_name] = UserFile.objects.all().count()
values = []
count = cache[count_name]
if count == 0:
return values
for i in range(0, randrange(0, 3)):
instance = UserFile.objects.all()[randint(0, count - 1)]
serialized = instance.serialize()
serialized['visible_name'] = serialized['name']
values.append(serialized)
return values

View file

@ -12,7 +12,7 @@ from baserow.contrib.database.views.handler import ViewHandler
from .exceptions import (
PrimaryFieldAlreadyExists, CannotDeletePrimaryField, CannotChangeFieldType,
FieldDoesNotExist
FieldDoesNotExist, IncompatiblePrimaryFieldTypeError
)
from .registries import field_type_registry, field_converter_registry
from .models import Field
@ -168,6 +168,10 @@ class FieldHandler:
# to remove all view filters.
if new_type_name and field_type.type != new_type_name:
field_type = field_type_registry.get(new_type_name)
if field.primary and not field_type.can_be_primary_field:
raise IncompatiblePrimaryFieldTypeError(new_type_name)
new_model_class = field_type.model_class
field.change_polymorphic_type_to(new_model_class)

View file

@ -239,3 +239,7 @@ class LinkRowField(Field):
class EmailField(Field):
pass
class FileField(Field):
pass

View file

@ -36,8 +36,11 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
field_type_registry.register(ExampleFieldType())
"""
can_sort_in_view = True
"""Indicates whether is is possible to sort on a field in a view."""
can_order_by = True
"""Indicates whether it is possible to order by this field type."""
can_be_primary_field = True
"""Some field types cannot be the primary field."""
def prepare_value_for_db(self, instance, value):
"""

View file

@ -0,0 +1,23 @@
# Generated by Django 2.2.11 on 2020-10-25 22:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('database', '0016_token_tokenpermission'),
]
operations = [
migrations.AddField(
model_name='view',
name='filters_disabled',
field=models.BooleanField(
default=False,
help_text='Allows users to see results unfiltered '
'while still keeping the filters for the view'
'saved.'
),
),
]

View file

@ -0,0 +1,27 @@
# Generated by Django 2.2.11 on 2020-11-10 12:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('database', '0017_view_filters_disabled'),
]
operations = [
migrations.AlterField(
model_name='token',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='view',
name='filters_disabled',
field=models.BooleanField(
default=False,
help_text='Allows users to see results unfiltered while still keeping '
'the filters saved for the view.'
),
),
]

View file

@ -0,0 +1,31 @@
# Generated by Django 2.2.11 on 2020-11-16 08:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('database', '0018_auto_20201110_1251'),
]
operations = [
migrations.CreateModel(
name='FileField',
fields=[
(
'field_ptr',
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to='database.Field'
)
),
],
bases=('database.field',),
),
]

View file

@ -0,0 +1,76 @@
# Generated by Django 2.2.11 on 2020-11-16 08:53
from django.db import migrations, connections
from django.db.models import Exists, OuterRef, Q
from django.conf import settings
from baserow.contrib.database.table.models import Table as TableModel
from baserow.contrib.database.fields.models import Field as FieldModel
def forward(apps, schema_editor):
"""
This migration fixes the not allowed situations where link row fields are primary
fields or if a table doesn't have a primary field anymore (because it was
deleted by the related link row field). In both cases a new primary text field is
created because that is allowed.
"""
Table = apps.get_model('database', 'Table')
Field = apps.get_model('database', 'Field')
LinkRowField = apps.get_model('database', 'LinkRowField')
TextField = apps.get_model('database', 'TextField')
ContentType = apps.get_model('contenttypes', 'ContentType')
text_field_content_type = ContentType.objects.get_for_model(TextField)
# Check if there are tables without a primary field or where the primary field is
# a link row field, which is not allowed.
tables_without_primary = Table.objects.annotate(
has_primary=Exists(Field.objects.filter(table=OuterRef('pk'), primary=True)),
has_link_row_primary=Exists(
LinkRowField.objects.filter(table=OuterRef('pk'), primary=True)
)
).filter(Q(has_primary=False) | Q(has_link_row_primary=True))
for table in tables_without_primary:
# If the table has a link row field as primary field it needs to be marked as
# normal field because they are not allowed to be primary.
if table.has_link_row_primary:
link_row_primary = LinkRowField.objects.get(table=table, primary=True)
link_row_primary.primary = False
link_row_primary.save()
# It might be possible in the future that the get_model or db_column methods
# are going to disappear. If that is the case then the creation of the field
# cannot be executed, so we can skip that.
if (
not hasattr(TableModel, 'get_model') or
not hasattr(FieldModel, 'db_column')
):
continue
# We now know for sure there isn't a primary field in the table, so we can
# create a new primary text field because the table expects one.
new_primary = TextField.objects.create(
table=table,
name='Primary (auto created)',
order=0,
content_type=text_field_content_type,
primary=True
)
connection = connections[settings.USER_TABLE_DATABASE]
with connection.schema_editor() as tables_schema_editor:
to_model = TableModel.get_model(table, field_ids=[new_primary.id])
field_name = FieldModel.db_column.__get__(new_primary, FieldModel)
model_field = to_model._meta.get_field(field_name)
tables_schema_editor.add_field(to_model, model_field)
class Migration(migrations.Migration):
dependencies = [
('database', '0019_filefield'),
]
operations = [
migrations.RunPython(forward, migrations.RunPython.noop),
]

View file

@ -18,7 +18,7 @@ from .exceptions import (
class TableHandler:
def get_table(self, user, table_id):
def get_table(self, user, table_id, base_queryset=None):
"""
Selects a table with a given id from the database.
@ -26,14 +26,20 @@ class TableHandler:
:type user: User
:param table_id: The identifier of the table that must be returned.
:type table_id: int
:param base_queryset: The base queryset from where to select the table
object from. This can for example be used to do a `select_related`.
:type base_queryset: Queryset
:raises TableDoesNotExist: When the table with the provided id does not exist.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The requested table of the provided id.
:rtype: Table
"""
if not base_queryset:
base_queryset = Table.objects
try:
table = Table.objects.select_related('database__group').get(id=table_id)
table = base_queryset.select_related('database__group').get(id=table_id)
except Table.DoesNotExist:
raise TableDoesNotExist(f'The table with id {table_id} doe not exist.')

View file

@ -1,6 +1,11 @@
import re
from django.db import models
from baserow.core.mixins import OrderableMixin
from baserow.contrib.database.fields.exceptions import (
OrderByFieldNotFound, OrderByFieldNotPossible
)
from baserow.contrib.database.fields.registries import field_type_registry
@ -59,6 +64,55 @@ class TableModelQuerySet(models.QuerySet):
return self.filter(search_queries) if len(search_queries) > 0 else self
def order_by_fields_string(self, order_string):
"""
Orders the query by the given field order string. This string is often directly
forwarded from a GET, POST or other user provided parameter. Multiple fields
can be provided by separating the values by a comma. The field id is extracted
from the string so it can either be provided as field_1, 1, id_1, etc.
:param order_string: The field ids to order the queryset by separated by a
comma. For example `field_1,2` which will order by field with id 1 first
and then by field with id 2 second.
:type order_string: str
:raises OrderByFieldNotFound: when the provided field id is not found in the
model.
:raises OrderByFieldNotPossible: when it is not possible to order by the
field's type.
:return: The queryset ordered by the provided order_string.
:rtype: QuerySet
"""
order_by = order_string.split(',')
if len(order_by) == 0:
raise ValueError('At least one field must be provided.')
for index, order in enumerate(order_by):
field_id = int(re.sub("[^0-9]", "", str(order)))
if field_id not in self.model._field_objects:
raise OrderByFieldNotFound(order, f'Field {field_id} does not exist.')
field_object = self.model._field_objects[field_id]
field_type = field_object['type']
field_name = field_object['name']
if not field_object['type'].can_order_by:
raise OrderByFieldNotPossible(
field_name,
field_type.type,
f'It is not possible to order by field type {field_type.type}.',
)
order_by[index] = '{}{}'.format(
'-' if order[:1] == '-' else '',
field_name
)
order_by.append('id')
return self.order_by(*order_by)
class TableModelManager(models.Manager):
def get_queryset(self):
@ -113,7 +167,8 @@ class Table(OrderableMixin, models.Model):
meta = type('Meta', (), {
'managed': False,
'db_table': f'database_table_{self.id}',
'app_label': app_label
'app_label': app_label,
'ordering': ['id']
})
attrs = {

View file

@ -24,7 +24,7 @@ class Token(models.Model):
help_text='The unique token key that can be used to authorize for the table '
'row endpoints.'
)
created = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,

View file

@ -84,7 +84,11 @@ class ViewHandler:
# Figure out which model to use for the given view type.
view_type = view_type_registry.get(type_name)
model_class = view_type.model_class
allowed_fields = ['name', 'filter_type'] + view_type.allowed_fields
allowed_fields = [
'name',
'filter_type',
'filters_disabled'
] + view_type.allowed_fields
view_values = extract_allowed(kwargs, allowed_fields)
last_order = model_class.get_last_order(table)
@ -117,7 +121,11 @@ class ViewHandler:
raise UserNotInGroupError(user, group)
view_type = view_type_registry.get_by_model(view)
allowed_fields = ['name', 'filter_type'] + view_type.allowed_fields
allowed_fields = [
'name',
'filter_type',
'filters_disabled'
] + view_type.allowed_fields
view = set_allowed_attrs(kwargs, allowed_fields, view)
view.save()
@ -187,7 +195,7 @@ class ViewHandler:
# If the new field type does not support sorting then all sortings will be
# removed.
if not field_type.can_sort_in_view:
if not field_type.can_order_by:
field.viewsort_set.all().delete()
# Check which filters are not compatible anymore and remove those.
@ -218,6 +226,10 @@ class ViewHandler:
if not hasattr(model, '_field_objects'):
raise ValueError('A queryset of the table model is required.')
# If the filter are disabled we don't have to do anything with the queryset.
if view.filters_disabled:
return queryset
q_filters = Q()
for view_filter in view.viewfilter_set.all():
@ -247,7 +259,7 @@ class ViewHandler:
return queryset
def get_filter(self, user, view_filter_id):
def get_filter(self, user, view_filter_id, base_queryset=None):
"""
Returns an existing view filter by the given id.
@ -255,14 +267,20 @@ class ViewHandler:
:type user: User
:param view_filter_id: The id of the view filter.
:type view_filter_id: int
:param base_queryset: The base queryset from where to select the view filter
object. This can for example be used to do a `select_related`.
:type base_queryset: Queryset
:raises ViewFilterDoesNotExist: The the requested view does not exists.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The requested view filter instance.
:type: ViewFilter
"""
if not base_queryset:
base_queryset = ViewFilter.objects
try:
view_filter = ViewFilter.objects.select_related(
view_filter = base_queryset.select_related(
'view__table__database__group'
).get(
pk=view_filter_id
@ -464,7 +482,7 @@ class ViewHandler:
return queryset
def get_sort(self, user, view_sort_id):
def get_sort(self, user, view_sort_id, base_queryset=None):
"""
Returns an existing view sort with the given id.
@ -472,14 +490,20 @@ class ViewHandler:
:type user: User
:param view_sort_id: The id of the view sort.
:type view_sort_id: int
:param base_queryset: The base queryset from where to select the view sort
object from. This can for example be used to do a `select_related`.
:type base_queryset: Queryset
:raises ViewSortDoesNotExist: The the requested view does not exists.
:raises UserNotInGroupError: When the user does not belong to the related group.
:return: The requested view sort instance.
:type: ViewSort
"""
if not base_queryset:
base_queryset = ViewSort.objects
try:
view_sort = ViewSort.objects.select_related(
view_sort = base_queryset.select_related(
'view__table__database__group'
).get(
pk=view_sort_id
@ -529,7 +553,7 @@ class ViewHandler:
# Check if the field supports sorting.
field_type = field_type_registry.get_by_model(field.specific_class)
if not field_type.can_sort_in_view:
if not field_type.can_order_by:
raise ViewSortFieldNotSupported(f'The field {field.pk} does not support '
f'sorting.')
@ -586,7 +610,7 @@ class ViewHandler:
field_type = field_type_registry.get_by_model(field.specific_class)
if (
field.id != view_sort.field_id and
not field_type.can_sort_in_view
not field_type.can_order_by
):
raise ViewSortFieldNotSupported(f'The field {field.pk} does not support '
f'sorting.')

View file

@ -41,6 +41,11 @@ class View(OrderableMixin, PolymorphicContentTypeMixin, models.Model):
help_text='Indicates whether all the rows should apply to all filters (AND) '
'or to any filter (OR).'
)
filters_disabled = models.BooleanField(
default=False,
help_text='Allows users to see results unfiltered while still keeping '
'the filters saved for the view.'
)
class Meta:
ordering = ('order',)

View file

@ -7,10 +7,11 @@ from dateutil.parser import ParserError
from django.db.models import Q, IntegerField, BooleanField
from django.db.models.fields.related import ManyToManyField
from django.contrib.postgres.fields import JSONField
from baserow.contrib.database.fields.field_types import (
TextFieldType, LongTextFieldType, URLFieldType, NumberFieldType, DateFieldType,
LinkRowFieldType, BooleanFieldType, EmailFieldType
LinkRowFieldType, BooleanFieldType, EmailFieldType, FileFieldType
)
from .registries import ViewFilterType
@ -251,7 +252,8 @@ class EmptyViewFilterType(ViewFilterType):
BooleanFieldType.type,
DateFieldType.type,
LinkRowFieldType.type,
EmailFieldType.type
EmailFieldType.type,
FileFieldType.type
]
def get_filter(self, field_name, value, model_field):
@ -265,6 +267,10 @@ class EmptyViewFilterType(ViewFilterType):
q = Q(**{f'{field_name}__isnull': True})
q.add(Q(**{f'{field_name}': None}), Q.OR)
if isinstance(model_field, JSONField):
q.add(Q(**{f'{field_name}': []}), Q.OR)
q.add(Q(**{f'{field_name}': {}}), Q.OR)
# If the model field accepts an empty string as value we are going to add
# that to the or statement.
try:

View file

@ -52,9 +52,9 @@ class BaseEmailMessage(EmailMultiAlternatives):
def get_context(self):
return {
'public_backend_domain': settings.PUBLIC_BACKEND_DOMAIN,
'public_backend_hostname': settings.PUBLIC_BACKEND_HOSTNAME,
'public_backend_url': settings.PUBLIC_BACKEND_URL,
'public_web_frontend_domain': settings.PUBLIC_WEB_FRONTEND_DOMAIN,
'public_web_frontend_hostname': settings.PUBLIC_WEB_FRONTEND_HOSTNAME,
'public_web_frontend_url': settings.PUBLIC_WEB_FRONTEND_URL
}

View file

@ -0,0 +1,45 @@
from PIL import Image
from django.core.management.base import BaseCommand
from django.core.files.storage import default_storage
from baserow.core.user_files.models import UserFile
from baserow.core.user_files.handler import UserFileHandler
class Command(BaseCommand):
help = 'Regenerates all the user file thumbnails based on the current settings. ' \
'Existing files will be overwritten.'
def handle(self, *args, **options):
"""
Regenerates the thumbnails of all image user files. If the USER_THUMBNAILS
setting ever changes then this file can be used to fix all the thumbnails.
"""
i = 0
handler = UserFileHandler()
buffer_size = 100
queryset = UserFile.objects.filter(is_image=True)
count = queryset.count()
while i < count:
user_files = queryset[i:min(count, i + buffer_size)]
for user_file in user_files:
i += 1
full_path = handler.user_file_path(user_file)
stream = default_storage.open(full_path)
try:
image = Image.open(stream)
handler.generate_and_save_image_thumbnails(
image, user_file, storage=default_storage
)
image.close()
except IOError:
pass
stream.close()
self.stdout.write(self.style.SUCCESS(f"{i} thumbnails have been regenerated."))

View file

@ -0,0 +1,49 @@
# Generated by Django 2.2.11 on 2020-11-10 13:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserFile',
fields=[
(
'id',
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID'
)
),
('original_name', models.CharField(max_length=255)),
('original_extension', models.CharField(max_length=64)),
('unique', models.CharField(max_length=32)),
('size', models.PositiveIntegerField()),
('mime_type', models.CharField(max_length=127, blank=True)),
('is_image', models.BooleanField(default=False)),
('image_width', models.PositiveSmallIntegerField(null=True)),
('image_height', models.PositiveSmallIntegerField(null=True)),
('uploaded_at', models.DateTimeField(auto_now_add=True)),
('sha256_hash', models.CharField(db_index=True, max_length=64)),
(
'uploaded_by',
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL
)
),
],
options={'ordering': ('id',)}
),
]

View file

@ -2,9 +2,13 @@ from django.db import models
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from baserow.core.user_files.models import UserFile
from .managers import GroupQuerySet
from .mixins import OrderableMixin, PolymorphicContentTypeMixin
__all__ = ['UserFile']
User = get_user_model()

View file

@ -0,0 +1,11 @@
from django.core.files.storage import FileSystemStorage
class OverwriteFileSystemStorage(FileSystemStorage):
def _save(self, name, content):
if self.exists(name):
self.delete(name)
return super()._save(name, content)
def get_available_name(self, name, *args, **kwargs):
return name

View file

@ -6,7 +6,7 @@
<mj-text mj-class="title">Reset password</mj-text>
<mj-text mj-class="text">
A password reset was requested for your account ({{ user.username }}) on
Baserow ({{ public_web_frontend_domain }}). If you did not authorize this, you
Baserow ({{ public_web_frontend_hostname }}). If you did not authorize this, you
may simply ignore this email.
</mj-text>
<mj-text mj-class="text" padding-bottom="20px">

View file

@ -10,7 +10,7 @@ class InvalidPassword(Exception):
"""Raised when the provided password is incorrect."""
class BaseURLDomainNotAllowed(Exception):
class BaseURLHostnameNotAllowed(Exception):
"""
Raised when the provided base url is not allowed when requesting a password
reset email.

View file

@ -9,7 +9,7 @@ from baserow.core.handler import CoreHandler
from baserow.core.registries import plugin_registry
from .exceptions import (
UserAlreadyExist, UserNotFound, InvalidPassword, BaseURLDomainNotAllowed
UserAlreadyExist, UserNotFound, InvalidPassword, BaseURLHostnameNotAllowed
)
from .emails import ResetPasswordEmail
from .utils import normalize_email_address
@ -101,14 +101,15 @@ class UserHandler:
:type user: User
:param base_url: The base url of the frontend, where the user can reset his
password. The reset token is appended to the URL (base_url + '/TOKEN').
Only the PUBLIC_WEB_FRONTEND_DOMAIN is allowed as domain name.
Only the PUBLIC_WEB_FRONTEND_HOSTNAME is allowed as domain name.
:type base_url: str
"""
parsed_base_url = urlparse(base_url)
if parsed_base_url.netloc != settings.PUBLIC_WEB_FRONTEND_DOMAIN:
raise BaseURLDomainNotAllowed(f'The domain {parsed_base_url.netloc} is '
f'not allowed.')
if parsed_base_url.hostname != settings.PUBLIC_WEB_FRONTEND_HOSTNAME:
raise BaseURLHostnameNotAllowed(
f'The hostname {parsed_base_url.netloc} is not allowed.'
)
signer = self.get_reset_password_signer()
signed_user_id = signer.dumps(user.id)

View file

@ -0,0 +1,40 @@
import math
class InvalidFileStreamError(Exception):
"""Raised when the provided file stream is invalid."""
class FileSizeTooLargeError(Exception):
"""Raised when the provided file is too large."""
def __init__(self, max_size_bytes, *args, **kwargs):
self.max_size_mb = math.floor(max_size_bytes / 1024 / 1024)
super().__init__(*args, **kwargs)
class FileURLCouldNotBeReached(Exception):
"""Raised when the provided URL could not be reached."""
class InvalidUserFileNameError(Exception):
"""Raised when the provided user file name is invalid."""
def __init__(self, name, *args, **kwargs):
self.name = name
super().__init__(*args, **kwargs)
class UserFileDoesNotExist(Exception):
"""Raised when a user file with the provided name or id does not exist."""
def __init__(self, name_or_id, *args, **kwargs):
self.name_or_id = name_or_id
super().__init__(*args, **kwargs)
class MaximumUniqueTriesError(Exception):
"""
Raised when the maximum tries has been exceeded while generating a unique user file
string.
"""

View file

@ -0,0 +1,265 @@
import pathlib
import mimetypes
from os.path import join
from io import BytesIO
import requests
from requests.exceptions import RequestException
from PIL import Image, ImageOps
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from baserow.core.utils import sha256_hash, stream_size, random_string
from .exceptions import (
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached,
MaximumUniqueTriesError
)
from .models import UserFile
class UserFileHandler:
def user_file_path(self, user_file_name):
"""
Generates the full user file path based on the provided file name. This path
can be used with the storage.
:param user_file_name: The user file name.
:type user_file_name: str
:return: The generated path.
:rtype: str
"""
if isinstance(user_file_name, UserFile):
user_file_name = user_file_name.name
return join(settings.USER_FILES_DIRECTORY, user_file_name)
def user_file_thumbnail_path(self, user_file_name, thumbnail_name):
"""
Generates the full user file thumbnail path based on the provided filename.
This path can be used with the storage.
:param user_file_name: The user file name.
:type user_file_name: str
:param thumbnail_name: The thumbnail type name.
:type thumbnail_name: str
:return: The generated path.
:rtype: str
"""
if isinstance(user_file_name, UserFile):
user_file_name = user_file_name.name
return join(settings.USER_THUMBNAILS_DIRECTORY, thumbnail_name, user_file_name)
def generate_unique(self, sha256_hash, extension, length=32, max_tries=1000):
"""
Generates a unique non existing string for a new user file.
:param sha256_hash: The hash of the file name. Needed because they are
required to be unique together.
:type sha256_hash: str
:param extension: The extension of the file name. Needed because they are
required to be unique together.
:type extension: str
:param length: Indicates the amount of characters that the unique must contain.
:type length: int
:param max_tries: The maximum amount of tries to check if a unique already
exists.
:type max_tries: int
:raises MaximumUniqueTriesError: When the maximum amount of tries has
been exceeded.
:return: The generated unique string
:rtype: str
"""
i = 0
while True:
if i > max_tries:
raise MaximumUniqueTriesError(
f'Tried {max_tries} tokens, but none of them are unique.'
)
i += 1
unique = random_string(length)
if not UserFile.objects.filter(
sha256_hash=sha256_hash,
original_extension=extension,
unique=unique
).exists():
return unique
def generate_and_save_image_thumbnails(self, image, user_file, storage=None):
"""
Generates the thumbnails based on the current settings and saves them to the
provided storage. Note that existing files with the same name will be
overwritten.
:param image: The original Pillow image that serves as base when generating the
the image.
:type image: Image
:param user_file: The user file for which the thumbnails must be generated
and saved.
:type user_file: UserFile
:param storage: The storage where the thumbnails must be saved to.
:type storage: Storage or None
:raises ValueError: If the provided user file is not a valid image.
"""
if not user_file.is_image:
raise ValueError('The provided user file is not an image.')
storage = storage or default_storage
image_width = user_file.image_width
image_height = user_file.image_height
for name, size in settings.USER_THUMBNAILS.items():
size_copy = size.copy()
# If the width or height is None we want to keep the aspect ratio.
if size_copy[0] is None and size_copy[1] is not None:
size_copy[0] = round(image_width / image_height * size_copy[1])
elif size_copy[1] is None and size_copy[0] is not None:
size_copy[1] = round(image_height / image_width * size_copy[0])
thumbnail = ImageOps.fit(image.copy(), size_copy, Image.ANTIALIAS)
thumbnail_stream = BytesIO()
thumbnail.save(thumbnail_stream, image.format)
thumbnail_stream.seek(0)
thumbnail_path = self.user_file_thumbnail_path(user_file, name)
storage.save(thumbnail_path, thumbnail_stream)
del thumbnail
del thumbnail_stream
def upload_user_file(self, user, file_name, stream, storage=None):
"""
Saves the provided uploaded file in the provided storage. If no storage is
provided the default_storage will be used. An entry into the user file table
is also created.
:param user: The user on whose behalf the file is uploaded.
:type user: User
:param file_name: The provided file name when the file was uploaded.
:type file_name: str
:param stream: An IO stream containing the uploaded file.
:type stream: IOBase
:param storage: The storage where the file must be saved to.
:type storage: Storage
:raises InvalidFileStreamError: If the provided stream is invalid.
:raises FileSizeToLargeError: If the provided content is too large.
:return: The newly created user file.
:rtype: UserFile
"""
if not hasattr(stream, 'read'):
raise InvalidFileStreamError('The provided stream is not readable.')
size = stream_size(stream)
if size > settings.USER_FILE_SIZE_LIMIT:
raise FileSizeTooLargeError(
settings.USER_FILE_SIZE_LIMIT,
'The provided file is too large.'
)
storage = storage or default_storage
hash = sha256_hash(stream)
try:
return UserFile.objects.get(original_name=file_name, sha256_hash=hash)
except UserFile.DoesNotExist:
pass
extension = pathlib.Path(file_name).suffix[1:].lower()
mime_type = mimetypes.guess_type(file_name)[0] or ''
unique = self.generate_unique(hash, extension)
# By default the provided file is not an image.
image = None
is_image = False
image_width = None
image_height = None
# Try to open the image with Pillow. If that succeeds we know the file is an
# image.
try:
image = Image.open(stream)
is_image = True
image_width = image.width
image_height = image.height
except IOError:
pass
user_file = UserFile.objects.create(
original_name=file_name,
original_extension=extension,
size=size,
mime_type=mime_type,
unique=unique,
uploaded_by=user,
sha256_hash=hash,
is_image=is_image,
image_width=image_width,
image_height=image_height
)
# If the uploaded file is an image we need to generate the configurable
# thumbnails for it. We want to generate them before the file is saved to the
# storage because some storages close the stream after saving.
if image:
self.generate_and_save_image_thumbnails(image, user_file, storage=storage)
# When all the thumbnails have been generated, the image can be deleted
# from memory.
del image
# Save the file to the storage.
full_path = self.user_file_path(user_file)
storage.save(full_path, stream)
# Close the stream because we don't need it anymore.
stream.close()
return user_file
def upload_user_file_by_url(self, user, url, storage=None):
"""
Uploads a user file by downloading it from the provided URL.
:param user: The user on whose behalf the file is uploaded.
:type user: User
:param url: The URL where the file must be downloaded from.
:type url: str
:param storage: The storage where the file must be saved to.
:type storage: Storage
:raises FileURLCouldNotBeReached: If the file could not be downloaded from
the URL.
:return: The newly created user file.
:rtype: UserFile
"""
file_name = url.split('/')[-1]
try:
response = requests.get(url, stream=True, timeout=10)
if not response.ok:
raise FileURLCouldNotBeReached('The response did not respond with an '
'OK status code.')
content = response.raw.read(
settings.USER_FILE_SIZE_LIMIT + 1,
decode_content=True
)
except RequestException:
raise FileURLCouldNotBeReached('The provided URL could not be reached.')
file = SimpleUploadedFile(file_name, content)
return UserFileHandler().upload_user_file(user, file_name, file, storage)

View file

@ -0,0 +1,15 @@
from django.db import models
from django.db.models import Q
class UserFileQuerySet(models.QuerySet):
def name(self, *names):
if len(names) == 0:
raise ValueError('At least one name must be provided.')
q_or = Q()
for name in names:
q_or |= Q(**self.model.deconstruct_name(name))
return self.filter(q_or)

View file

@ -0,0 +1,80 @@
import re
from django.db import models
from django.contrib.auth import get_user_model
from .exceptions import InvalidUserFileNameError
from .managers import UserFileQuerySet
User = get_user_model()
deconstruct_user_file_regex = re.compile(
r'([a-zA-Z0-9]*)_([a-zA-Z0-9]*)\.([a-zA-Z0-9]*)$'
)
class UserFile(models.Model):
original_name = models.CharField(max_length=255)
original_extension = models.CharField(max_length=64)
unique = models.CharField(max_length=32)
size = models.PositiveIntegerField()
mime_type = models.CharField(max_length=127, blank=True)
is_image = models.BooleanField(default=False)
image_width = models.PositiveSmallIntegerField(null=True)
image_height = models.PositiveSmallIntegerField(null=True)
uploaded_at = models.DateTimeField(auto_now_add=True)
uploaded_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
sha256_hash = models.CharField(max_length=64, db_index=True)
objects = UserFileQuerySet.as_manager()
class Meta:
ordering = ('id',)
def serialize(self):
"""
Generates a serialized version that can be stored in other data sources. This
is possible because the state of the UserFile never changes.
:return: The serialized version.
:rtype: dict
"""
return {
'name': self.name,
'size': self.size,
'mime_type': self.mime_type,
'is_image': self.is_image,
'image_width': self.image_width,
'image_height': self.image_height,
'uploaded_at': self.uploaded_at.isoformat()
}
@property
def name(self):
return f'{self.unique}_{self.sha256_hash}.{self.original_extension}'
@staticmethod
def deconstruct_name(name):
"""
Extracts the model field name values from the provided file name and returns it
as a mapping.
:param name: The model generated file name.
:type name: str
:return: The field name and extracted value mapping.
:rtype: dict
"""
matches = deconstruct_user_file_regex.match(name)
if not matches:
raise InvalidUserFileNameError(
name,
'The provided name is not in the correct format.'
)
return {
'unique': matches[1],
'sha256_hash': matches[2],
'original_extension': matches[3]
}

View file

@ -1,6 +1,8 @@
import os
import re
import random
import string
import hashlib
from collections import namedtuple
@ -175,3 +177,39 @@ def random_string(length):
string.ascii_letters + string.digits
) for _ in range(length)
)
def sha256_hash(stream, block_size=65536):
"""
Calculates a sha256 hash for the contents of the provided stream.
:param stream: The stream of the content where to calculate the hash for.
:type stream: IOStream
:param block_size: The amount of bytes that are read each time.
:type block_size: int
:return: The calculated hash.
:rtype: str
"""
stream.seek(0)
hasher = hashlib.sha256()
for stream_chunk in iter(lambda: stream.read(block_size), b''):
hasher.update(stream_chunk)
stream.seek(0)
return hasher.hexdigest()
def stream_size(stream):
"""
Calculates the total amount of bytes of the stream's content.
:param stream: The stream of the content where to calculate the size for.
:type stream: IOStream
:return: The total size of the stream.
:rtype: int
"""
stream.seek(0, os.SEEK_END)
size = stream.tell()
stream.seek(0)
return size

View file

@ -106,8 +106,9 @@ def test_validate_body():
validate_body(TemporarySerializer)(func)(*[object, request])
assert api_exception_1.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_1.value.detail['detail']['field_2'][0]['error'] == \
assert api_exception_1.value.detail['detail']['field_2'][0]['error'] == (
'This field is required.'
)
assert api_exception_1.value.detail['detail']['field_2'][0]['code'] == 'required'
assert api_exception_1.value.status_code == status.HTTP_400_BAD_REQUEST
@ -122,10 +123,12 @@ def test_validate_body():
validate_body(TemporarySerializer)(func)(*[object, request])
assert api_exception_2.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_2.value.detail['detail']['field_2'][0]['error'] == \
assert api_exception_2.value.detail['detail']['field_2'][0]['error'] == (
'"wrong" is not a valid choice.'
assert api_exception_2.value.detail['detail']['field_2'][0]['code'] == \
)
assert api_exception_2.value.detail['detail']['field_2'][0]['code'] == (
'invalid_choice'
)
assert api_exception_2.value.status_code == status.HTTP_400_BAD_REQUEST
request = Request(factory.post(
@ -156,8 +159,9 @@ def test_validate_body_custom_fields():
registry, 'serializer_class')(func)(*[object, request])
assert api_exception_1.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_1.value.detail['detail']['type'][0]['error'] == \
assert api_exception_1.value.detail['detail']['type'][0]['error'] == (
'This field is required.'
)
assert api_exception_1.value.detail['detail']['type'][0]['code'] == 'required'
assert api_exception_1.value.status_code == status.HTTP_400_BAD_REQUEST
@ -172,8 +176,9 @@ def test_validate_body_custom_fields():
validate_body_custom_fields(registry)(func)(*[object, request])
assert api_exception_2.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_2.value.detail['detail']['type'][0]['error'] == \
assert api_exception_2.value.detail['detail']['type'][0]['error'] == (
'"NOT_EXISTING" is not a valid choice.'
)
assert api_exception_2.value.detail['detail']['type'][0]['code'] == 'invalid_choice'
assert api_exception_2.value.status_code == status.HTTP_400_BAD_REQUEST
@ -188,8 +193,9 @@ def test_validate_body_custom_fields():
validate_body_custom_fields(registry)(func)(*[object, request])
assert api_exception_3.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_3.value.detail['detail']['name'][0]['error'] == \
assert api_exception_3.value.detail['detail']['name'][0]['error'] == (
'This field is required.'
)
assert api_exception_3.value.detail['detail']['name'][0]['code'] == 'required'
assert api_exception_3.value.status_code == status.HTTP_400_BAD_REQUEST
@ -206,8 +212,9 @@ def test_validate_body_custom_fields():
)(func)(*[object, request])
assert api_exception_4.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_4.value.detail['detail']['name'][0]['error'] == \
assert api_exception_4.value.detail['detail']['name'][0]['error'] == (
'This field is required.'
)
assert api_exception_4.value.detail['detail']['name'][0]['code'] == 'required'
assert api_exception_4.value.status_code == status.HTTP_400_BAD_REQUEST
@ -222,8 +229,9 @@ def test_validate_body_custom_fields():
validate_body_custom_fields(registry)(func)(*[object, request])
assert api_exception_5.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_5.value.detail['detail']['name'][0]['error'] == \
assert api_exception_5.value.detail['detail']['name'][0]['error'] == (
'A valid integer is required.'
)
assert api_exception_5.value.detail['detail']['name'][0]['code'] == 'invalid'
assert api_exception_5.value.status_code == status.HTTP_400_BAD_REQUEST

View file

@ -27,6 +27,12 @@ class TemporarySerializer(serializers.Serializer):
field_2 = serializers.ChoiceField(choices=('choice_1', 'choice_2'))
class TemporaryListSerializer(serializers.ListSerializer):
def __init__(self, *args, **kwargs):
kwargs['child'] = TemporarySerializer()
super().__init__(*args, **kwargs)
class TemporarySerializerWithList(serializers.Serializer):
field_3 = serializers.IntegerField()
field_4 = serializers.ListField(child=serializers.IntegerField())
@ -56,7 +62,7 @@ class TemporaryTypeRegistry(Registry):
def test_map_exceptions():
with pytest.raises(APIException) as api_exception_1:
with map_exceptions({ TemporaryException: 'ERROR_TEMPORARY' }):
with map_exceptions({TemporaryException: 'ERROR_TEMPORARY'}):
raise TemporaryException
assert api_exception_1.value.detail['error'] == 'ERROR_TEMPORARY'
@ -68,13 +74,15 @@ def test_map_exceptions():
TemporaryException: (
'ERROR_TEMPORARY_2',
HTTP_404_NOT_FOUND,
'Another message'
'Another message {e.message}'
)
}):
raise TemporaryException
e = TemporaryException()
e.message = 'test'
raise e
assert api_exception_2.value.detail['error'] == 'ERROR_TEMPORARY_2'
assert api_exception_2.value.detail['detail'] == 'Another message'
assert api_exception_2.value.detail['detail'] == 'Another message test'
assert api_exception_2.value.status_code == status.HTTP_404_NOT_FOUND
with pytest.raises(TemporaryException2):
@ -94,8 +102,9 @@ def test_validate_data():
validate_data(TemporarySerializer, {'field_1': 'test'})
assert api_exception_1.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_1.value.detail['detail']['field_2'][0]['error'] == \
assert api_exception_1.value.detail['detail']['field_2'][0]['error'] == (
'This field is required.'
)
assert api_exception_1.value.detail['detail']['field_2'][0]['code'] == 'required'
assert api_exception_1.value.status_code == status.HTTP_400_BAD_REQUEST
@ -106,10 +115,12 @@ def test_validate_data():
)
assert api_exception_2.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_2.value.detail['detail']['field_2'][0]['error'] == \
assert api_exception_2.value.detail['detail']['field_2'][0]['error'] == (
'"wrong" is not a valid choice.'
assert api_exception_2.value.detail['detail']['field_2'][0]['code'] == \
)
assert api_exception_2.value.detail['detail']['field_2'][0]['code'] == (
'invalid_choice'
)
assert api_exception_2.value.status_code == status.HTTP_400_BAD_REQUEST
validated_data = validate_data(
@ -128,19 +139,36 @@ def test_validate_data():
assert api_exception_1.value.status_code == status.HTTP_400_BAD_REQUEST
assert api_exception_1.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_1.value.detail['detail']['field_3'][0]['error'] == \
assert api_exception_1.value.detail['detail']['field_3'][0]['error'] == (
'A valid integer is required.'
)
assert api_exception_1.value.detail['detail']['field_3'][0]['code'] == 'invalid'
assert len(api_exception_1.value.detail['detail']['field_4']) == 2
assert api_exception_1.value.detail['detail']['field_4'][0][0]['error'] == \
assert api_exception_1.value.detail['detail']['field_4'][0][0]['error'] == (
'A valid integer is required.'
assert api_exception_1.value.detail['detail']['field_4'][0][0]['code'] == \
)
assert api_exception_1.value.detail['detail']['field_4'][0][0]['code'] == (
'invalid'
assert api_exception_1.value.detail['detail']['field_4'][1][0]['error'] == \
)
assert api_exception_1.value.detail['detail']['field_4'][1][0]['error'] == (
'A valid integer is required.'
assert api_exception_1.value.detail['detail']['field_4'][1][0]['code'] == \
)
assert api_exception_1.value.detail['detail']['field_4'][1][0]['code'] == (
'invalid'
)
with pytest.raises(APIException) as api_exception_3:
validate_data(
TemporaryListSerializer,
[{'something': 'nothing'}]
)
assert api_exception_3.value.status_code == status.HTTP_400_BAD_REQUEST
assert api_exception_3.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert len(api_exception_3.value.detail['detail']) == 1
assert api_exception_3.value.detail['detail'][0]['field_1'][0]['code'] == 'required'
assert api_exception_3.value.detail['detail'][0]['field_2'][0]['code'] == 'required'
def test_validate_data_custom_fields():
@ -152,8 +180,9 @@ def test_validate_data_custom_fields():
validate_data_custom_fields('NOT_EXISTING', registry, {})
assert api_exception.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception.value.detail['detail']['type'][0]['error'] == \
assert api_exception.value.detail['detail']['type'][0]['error'] == (
'"NOT_EXISTING" is not a valid choice.'
)
assert api_exception.value.detail['detail']['type'][0]['code'] == 'invalid_choice'
assert api_exception.value.status_code == status.HTTP_400_BAD_REQUEST
@ -161,8 +190,9 @@ def test_validate_data_custom_fields():
validate_data_custom_fields('temporary_2', registry, {})
assert api_exception_2.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_2.value.detail['detail']['name'][0]['error'] == \
assert api_exception_2.value.detail['detail']['name'][0]['error'] == (
'This field is required.'
)
assert api_exception_2.value.detail['detail']['name'][0]['code'] == 'required'
assert api_exception_2.value.status_code == status.HTTP_400_BAD_REQUEST
@ -170,8 +200,9 @@ def test_validate_data_custom_fields():
validate_data_custom_fields('temporary_2', registry, {'name': 'test1'})
assert api_exception_3.value.detail['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert api_exception_3.value.detail['detail']['name'][0]['error'] == \
assert api_exception_3.value.detail['detail']['name'][0]['error'] == (
'A valid integer is required.'
)
assert api_exception_3.value.detail['detail']['name'][0]['code'] == 'invalid'
assert api_exception_3.value.status_code == status.HTTP_400_BAD_REQUEST

View file

@ -0,0 +1,217 @@
import pytest
import responses
from unittest.mock import patch
from freezegun import freeze_time
from PIL import Image
from django.shortcuts import reverse
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.files.storage import FileSystemStorage
from rest_framework.status import (
HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_413_REQUEST_ENTITY_TOO_LARGE
)
from baserow.core.models import UserFile
@pytest.mark.django_db
def test_upload_file(api_client, data_fixture, tmpdir):
user, token = data_fixture.create_user_and_token(
email='test@test.nl', password='password', first_name='Test1')
response = api_client.post(
reverse('api:user_files:upload_file'),
format='multipart',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()['error'] == 'ERROR_INVALID_FILE'
response = api_client.post(
reverse('api:user_files:upload_file'),
data={'file': ''},
format='multipart',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()['error'] == 'ERROR_INVALID_FILE'
old_limit = settings.USER_FILE_SIZE_LIMIT
settings.USER_FILE_SIZE_LIMIT = 6
response = api_client.post(
reverse('api:user_files:upload_file'),
data={'file': SimpleUploadedFile('test.txt', b'Hello World')},
format='multipart',
HTTP_AUTHORIZATION=f'JWT {token}'
)
settings.USER_FILE_SIZE_LIMIT = old_limit
assert response.status_code == HTTP_413_REQUEST_ENTITY_TOO_LARGE
assert response.json()['error'] == 'ERROR_FILE_SIZE_TOO_LARGE'
assert response.json()['detail'] == (
'The provided file is too large. Max 0MB is allowed.'
)
response = api_client.post(
reverse('api:user_files:upload_file'),
data={'file': 'not a file'},
format='multipart',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()['error'] == 'ERROR_INVALID_FILE'
storage = FileSystemStorage(location=str(tmpdir), base_url='http://localhost')
with patch('baserow.core.user_files.handler.default_storage', new=storage):
with freeze_time('2020-01-01 12:00'):
file = SimpleUploadedFile('test.txt', b'Hello World')
response = api_client.post(
reverse('api:user_files:upload_file'),
data={'file': file},
format='multipart',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json['size'] == 11
assert response_json['mime_type'] == 'text/plain'
assert response_json['is_image'] is False
assert response_json['image_width'] is None
assert response_json['image_height'] is None
assert response_json['uploaded_at'] == '2020-01-01T12:00:00Z'
assert response_json['thumbnails'] is None
assert response_json['original_name'] == 'test.txt'
assert 'localhost:8000' in response_json['url']
user_file = UserFile.objects.all().last()
assert user_file.name == response_json['name']
assert response_json['url'].endswith(response_json['name'])
file_path = tmpdir.join('user_files', user_file.name)
assert file_path.isfile()
with patch('baserow.core.user_files.handler.default_storage', new=storage):
file = SimpleUploadedFile('test.txt', b'Hello World')
response_2 = api_client.post(
reverse('api:user_files:upload_file'),
data={'file': file},
format='multipart',
HTTP_AUTHORIZATION=f'JWT {token}'
)
# The old file should be provided.
assert response_2.json()['name'] == response_json['name']
assert response_json['original_name'] == 'test.txt'
image = Image.new('RGB', (100, 140), color='red')
file = SimpleUploadedFile('test.png', b'')
image.save(file, format='PNG')
file.seek(0)
with patch('baserow.core.user_files.handler.default_storage', new=storage):
response = api_client.post(
reverse('api:user_files:upload_file'),
data={'file': file},
format='multipart',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json['mime_type'] == 'image/png'
assert response_json['is_image'] is True
assert response_json['image_width'] == 100
assert response_json['image_height'] == 140
assert len(response_json['thumbnails']) == 1
assert 'localhost:8000' in response_json['thumbnails']['tiny']['url']
assert 'tiny' in response_json['thumbnails']['tiny']['url']
assert response_json['thumbnails']['tiny']['width'] == 21
assert response_json['thumbnails']['tiny']['height'] == 21
assert response_json['original_name'] == 'test.png'
user_file = UserFile.objects.all().last()
file_path = tmpdir.join('user_files', user_file.name)
assert file_path.isfile()
file_path = tmpdir.join('thumbnails', 'tiny', user_file.name)
assert file_path.isfile()
thumbnail = Image.open(file_path.open('rb'))
assert thumbnail.height == 21
assert thumbnail.width == 21
@pytest.mark.django_db
@responses.activate
def test_upload_file_via_url(api_client, data_fixture, tmpdir):
user, token = data_fixture.create_user_and_token(
email='test@test.nl', password='password', first_name='Test1'
)
response = api_client.post(
reverse('api:user_files:upload_via_url'),
data={},
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
response = api_client.post(
reverse('api:user_files:upload_via_url'),
data={'url': 'NOT_A_URL'},
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
response = api_client.post(
reverse('api:user_files:upload_via_url'),
data={'url': 'http://localhost/test2.txt'},
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()['error'] == 'ERROR_FILE_URL_COULD_NOT_BE_REACHED'
responses.add(
responses.GET,
'http://localhost/test.txt',
body=b'Hello World',
status=200,
content_type="text/plain",
stream=True,
)
old_limit = settings.USER_FILE_SIZE_LIMIT
settings.USER_FILE_SIZE_LIMIT = 6
response = api_client.post(
reverse('api:user_files:upload_via_url'),
data={'url': 'http://localhost/test.txt'},
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_413_REQUEST_ENTITY_TOO_LARGE
assert response.json()['error'] == 'ERROR_FILE_SIZE_TOO_LARGE'
settings.USER_FILE_SIZE_LIMIT = old_limit
storage = FileSystemStorage(location=str(tmpdir), base_url='http://localhost')
with patch('baserow.core.user_files.handler.default_storage', new=storage):
response = api_client.post(
reverse('api:user_files:upload_via_url'),
data={'url': 'http://localhost/test.txt'},
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json['size'] == 11
assert response_json['mime_type'] == 'text/plain'
assert response_json['is_image'] is False
assert response_json['image_width'] is None
assert response_json['image_height'] is None
assert response_json['thumbnails'] is None
assert response_json['original_name'] == 'test.txt'
assert 'localhost:8000' in response_json['url']
user_file = UserFile.objects.all().last()
file_path = tmpdir.join('user_files', user_file.name)
assert file_path.isfile()

View file

@ -106,7 +106,7 @@ def test_send_reset_password_email(data_fixture, client, mailoutbox):
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json['error'] == 'ERROR_DOMAIN_URL_IS_NOT_ALLOWED'
assert response_json['error'] == 'ERROR_HOSTNAME_IS_NOT_ALLOWED'
assert len(mailoutbox) == 0
response = client.post(

View file

@ -168,7 +168,7 @@ def test_update_field(api_client, data_fixture):
user_2, token_2 = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
table_2 = data_fixture.create_database_table(user=user_2)
text = data_fixture.create_text_field(table=table)
text = data_fixture.create_text_field(table=table, primary=True)
text_2 = data_fixture.create_text_field(table=table_2)
url = reverse('api:database:fields:item', kwargs={'field_id': text_2.id})
@ -192,6 +192,22 @@ def test_update_field(api_client, data_fixture):
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()['error'] == 'ERROR_FIELD_DOES_NOT_EXIST'
# The primary field is not compatible with a link row field so that should result
# in an error.
url = reverse('api:database:fields:item', kwargs={'field_id': text.id})
response = api_client.patch(
url,
{'type': 'link_row'},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()['error'] == 'ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE'
assert (
response.json()['detail'] ==
'The field type link_row is not compatible with the primary field.'
)
url = reverse('api:database:fields:item', kwargs={'field_id': text.id})
response = api_client.patch(
url,
@ -275,9 +291,9 @@ def test_update_field(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json['name'] == 'Test 2'
assert response_json['type'] == 'boolean'
assert not 'number_type' in response_json
assert not 'number_decimal_places' in response_json
assert not 'number_negative' in response_json
assert 'number_type' not in response_json
assert 'number_decimal_places' not in response_json
assert 'number_negative' not in response_json
@pytest.mark.django_db

View file

@ -2,13 +2,14 @@ import pytest
from faker import Faker
from pytz import timezone
from datetime import date, datetime
from freezegun import freeze_time
from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST
from django.shortcuts import reverse
from baserow.contrib.database.fields.models import (
LongTextField, URLField, DateField, EmailField
LongTextField, URLField, DateField, EmailField, FileField
)
@ -104,10 +105,10 @@ def test_long_text_field_type(api_client, data_fixture):
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json[f'field_{field_id}'] == None
assert response_json[f'field_{field_id}'] is None
row = model.objects.all().last()
assert row.long_text_2 == None
assert row.long_text_2 is None
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
@ -117,10 +118,10 @@ def test_long_text_field_type(api_client, data_fixture):
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json[f'field_{field_id}'] == None
assert response_json[f'field_{field_id}'] is None
row = model.objects.all().last()
assert row.long_text_2 == None
assert row.long_text_2 is None
url = reverse('api:database:fields:item', kwargs={'field_id': field_id})
response = api_client.delete(url, HTTP_AUTHORIZATION=f'JWT {token}')
@ -262,8 +263,9 @@ def test_date_field_type(api_client, data_fixture):
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert response_json['detail'][f'field_{date_field_id}'][0]['code'] == 'invalid'
assert response_json['detail'][f'field_{date_time_field_id}'][0]['code'] == \
'invalid'
assert response_json['detail'][f'field_{date_time_field_id}'][0]['code'] == (
'invalid'
)
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
@ -381,3 +383,263 @@ def test_email_field_type(api_client, data_fixture):
response = api_client.delete(email, HTTP_AUTHORIZATION=f'JWT {token}')
assert response.status_code == HTTP_204_NO_CONTENT
assert EmailField.objects.all().count() == 0
@pytest.mark.django_db
def test_file_field_type(api_client, data_fixture):
user, token = data_fixture.create_user_and_token(
email='test@test.nl', password='password', first_name='Test1'
)
table = data_fixture.create_database_table(user=user)
grid = data_fixture.create_grid_view(table=table)
with freeze_time('2020-01-01 12:00'):
user_file_1 = data_fixture.create_user_file(
original_name='test.txt',
original_extension='txt',
unique='sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA',
size=10,
mime_type='text/plain',
is_image=True,
image_width=1920,
image_height=1080,
sha256_hash=(
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e'
),
)
user_file_2 = data_fixture.create_user_file()
user_file_3 = data_fixture.create_user_file()
response = api_client.post(
reverse('api:database:fields:list', kwargs={'table_id': table.id}),
{'name': 'File', 'type': 'file'},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json['type'] == 'file'
assert FileField.objects.all().count() == 1
field_id = response_json['id']
response = api_client.patch(
reverse('api:database:fields:item', kwargs={'field_id': field_id}),
{'name': 'File2'},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_200_OK
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json[f'field_{field_id}'] == []
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
{
f'field_{field_id}': []
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json[f'field_{field_id}'] == []
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
{
f'field_{field_id}': [{'without_name': 'test'}]
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
{
f'field_{field_id}': [{'name': 'an__invalid__name.jpg'}]
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
assert (
response_json['detail'][f'field_{field_id}'][0]['name'][0]['code'] == 'invalid'
)
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
{
f'field_{field_id}': [{'name': 'not_existing.jpg'}]
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert response_json['error'] == 'ERROR_USER_FILE_DOES_NOT_EXIST'
assert response_json['detail'] == 'The user file not_existing.jpg does not exist.'
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
{
f'field_{field_id}': [
{
'name': user_file_1.name,
'is_image': True
}
]
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert (
response_json[f'field_{field_id}'][0]['visible_name'] ==
user_file_1.original_name
)
assert response_json[f'field_{field_id}'][0]['name'] == (
'sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA_'
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e.txt'
)
assert response_json[f'field_{field_id}'][0]['size'] == 10
assert response_json[f'field_{field_id}'][0]['mime_type'] == 'text/plain'
assert response_json[f'field_{field_id}'][0]['is_image'] is True
assert response_json[f'field_{field_id}'][0]['image_width'] == 1920
assert response_json[f'field_{field_id}'][0]['image_height'] == 1080
assert response_json[f'field_{field_id}'][0]['uploaded_at'] == (
'2020-01-01T12:00:00+00:00'
)
assert 'localhost:8000' in response_json[f'field_{field_id}'][0]['url']
assert len(response_json[f'field_{field_id}'][0]['thumbnails']) == 1
assert (
'localhost:8000' in
response_json[f'field_{field_id}'][0]['thumbnails']['tiny']['url']
)
assert (
'sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA_'
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e.txt' in
response_json[f'field_{field_id}'][0]['thumbnails']['tiny']['url']
)
assert (
'tiny' in response_json[f'field_{field_id}'][0]['thumbnails']['tiny']['url']
)
assert response_json[f'field_{field_id}'][0]['thumbnails']['tiny']['width'] == 21
assert response_json[f'field_{field_id}'][0]['thumbnails']['tiny']['height'] == 21
assert 'original_name' not in response_json
assert 'original_extension' not in response_json
assert 'sha256_hash' not in response_json
response = api_client.patch(
reverse('api:database:rows:item', kwargs={
'table_id': table.id,
'row_id': response_json['id']
}),
{
f'field_{field_id}': [
{'name': user_file_3.name},
{'name': user_file_2.name, 'visible_name': 'new_name_1.txt'}
]
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert response_json[f'field_{field_id}'][0]['name'] == user_file_3.name
assert (
response_json[f'field_{field_id}'][0]['visible_name'] ==
user_file_3.original_name
)
assert 'localhost:8000' in response_json[f'field_{field_id}'][0]['url']
assert response_json[f'field_{field_id}'][0]['is_image'] is False
assert response_json[f'field_{field_id}'][0]['image_width'] is None
assert response_json[f'field_{field_id}'][0]['image_height'] is None
assert response_json[f'field_{field_id}'][0]['thumbnails'] is None
assert response_json[f'field_{field_id}'][1]['name'] == user_file_2.name
assert response_json[f'field_{field_id}'][1]['visible_name'] == 'new_name_1.txt'
response = api_client.patch(
reverse('api:database:rows:item', kwargs={
'table_id': table.id,
'row_id': response_json['id']
}),
{},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_200_OK
response = api_client.get(
reverse('api:database:rows:item', kwargs={
'table_id': table.id,
'row_id': response_json['id']
}),
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert response_json[f'field_{field_id}'][0]['name'] == user_file_3.name
assert (
response_json[f'field_{field_id}'][0]['visible_name'] ==
user_file_3.original_name
)
assert 'localhost:8000' in response_json[f'field_{field_id}'][0]['url']
assert response_json[f'field_{field_id}'][1]['name'] == user_file_2.name
assert response_json[f'field_{field_id}'][1]['visible_name'] == 'new_name_1.txt'
response = api_client.get(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert len(response_json['results']) == 3
assert response_json['results'][0][f'field_{field_id}'] == []
assert response_json['results'][1][f'field_{field_id}'] == []
assert (
response_json['results'][2][f'field_{field_id}'][0]['name'] == user_file_3.name
)
assert (
'localhost:8000' in response_json['results'][2][f'field_{field_id}'][0]['url']
)
assert (
response_json['results'][2][f'field_{field_id}'][1]['name'] == user_file_2.name
)
# We also need to check if the grid view returns the correct url because the
# request context must be provided there in order to work.
url = reverse('api:database:views:grid:list', kwargs={'view_id': grid.id})
response = api_client.get(
url,
**{'HTTP_AUTHORIZATION': f'JWT {token}'}
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert len(response_json['results']) == 3
assert response_json['results'][0][f'field_{field_id}'] == []
assert response_json['results'][1][f'field_{field_id}'] == []
assert (
response_json['results'][2][f'field_{field_id}'][0]['name'] == user_file_3.name
)
assert (
'localhost:8000' in response_json['results'][2][f'field_{field_id}'][0]['url']
)
assert (
response_json['results'][2][f'field_{field_id}'][1]['name'] == user_file_2.name
)

View file

@ -44,7 +44,7 @@ def test_get_table_serializer(data_fixture):
serializer_instance = serializer_class(data={'color': None})
assert serializer_instance.is_valid()
assert serializer_instance.data['color'] == None
assert serializer_instance.data['color'] is None
# number field
serializer_instance = serializer_class(data={'horsepower': 120})
@ -53,7 +53,7 @@ def test_get_table_serializer(data_fixture):
serializer_instance = serializer_class(data={'horsepower': None})
assert serializer_instance.is_valid()
assert serializer_instance.data['horsepower'] == None
assert serializer_instance.data['horsepower'] is None
serializer_instance = serializer_class(data={'horsepower': 'abc'})
assert not serializer_instance.is_valid()
@ -66,11 +66,11 @@ def test_get_table_serializer(data_fixture):
# boolean field
serializer_instance = serializer_class(data={'for_sale': True})
assert serializer_instance.is_valid()
assert serializer_instance.data['for_sale'] == True
assert serializer_instance.data['for_sale'] is True
serializer_instance = serializer_class(data={'for_sale': False})
assert serializer_instance.is_valid()
assert serializer_instance.data['for_sale'] == False
assert serializer_instance.data['for_sale'] is False
serializer_instance = serializer_class(data={'for_sale': None})
assert not serializer_instance.is_valid()
@ -95,7 +95,7 @@ def test_get_table_serializer(data_fixture):
serializer_instance = serializer_class(data={'price': None})
assert serializer_instance.is_valid()
assert serializer_instance.data['price'] == None
assert serializer_instance.data['price'] is None
# not existing value
serializer_instance = serializer_class(data={'NOT_EXISTING': True})
@ -148,12 +148,15 @@ def test_get_example_row_serializer_class():
request_serializer = get_example_row_serializer_class()
response_serializer = get_example_row_serializer_class(add_id=True)
assert len(request_serializer._declared_fields) == \
len(field_type_registry.registry.values())
assert len(response_serializer._declared_fields) == \
len(request_serializer._declared_fields) + 1
assert len(response_serializer._declared_fields) == \
len(field_type_registry.registry.values()) + 1
assert len(request_serializer._declared_fields) == (
len(field_type_registry.registry.values())
)
assert len(response_serializer._declared_fields) == (
len(request_serializer._declared_fields) + 1
)
assert len(response_serializer._declared_fields) == (
len(field_type_registry.registry.values()) + 1
)
assert isinstance(response_serializer._declared_fields['id'],
serializers.IntegerField)

View file

@ -8,6 +8,7 @@ from rest_framework.status import (
from django.shortcuts import reverse
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.tokens.handler import TokenHandler
@ -178,6 +179,52 @@ def test_list_rows(api_client, data_fixture):
assert len(response_json['results']) == 1
assert response_json['results'][0]['id'] == row_4.id
url = reverse('api:database:rows:list', kwargs={'table_id': table.id})
response = api_client.get(
f'{url}?order_by=field_999999',
format='json',
HTTP_AUTHORIZATION=f'JWT {jwt_token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
response_json = response.json()
assert response_json['error'] == 'ERROR_ORDER_BY_FIELD_NOT_FOUND'
assert response_json['detail'] == (
'The field field_999999 was not found in the table.'
)
number_field_type = field_type_registry.get('number')
old_can_order_by = number_field_type.can_order_by
number_field_type.can_order_by = False
url = reverse('api:database:rows:list', kwargs={'table_id': table.id})
response = api_client.get(
f'{url}?order_by=-field_{field_2.id}',
format='json',
HTTP_AUTHORIZATION=f'JWT {jwt_token}'
)
assert response.status_code == HTTP_400_BAD_REQUEST
response_json = response.json()
assert response_json['error'] == 'ERROR_ORDER_BY_FIELD_NOT_POSSIBLE'
assert response_json['detail'] == (
f'It is not possible to order by field_{field_2.id} because the field type '
f'number does not support filtering.'
)
number_field_type.can_order_by = old_can_order_by
url = reverse('api:database:rows:list', kwargs={'table_id': table.id})
response = api_client.get(
f'{url}?order_by=-field_{field_2.id}',
format='json',
HTTP_AUTHORIZATION=f'JWT {jwt_token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json['count'] == 4
assert len(response_json['results']) == 4
assert response_json['results'][0]['id'] == row_4.id
assert response_json['results'][1]['id'] == row_3.id
assert response_json['results'][2]['id'] == row_2.id
assert response_json['results'][3]['id'] == row_1.id
@pytest.mark.django_db
def test_create_row(api_client, data_fixture):
@ -262,8 +309,8 @@ def test_create_row(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json_row_1[f'field_{text_field.id}'] == 'white'
assert not response_json_row_1[f'field_{number_field.id}']
assert response_json_row_1[f'field_{boolean_field.id}'] == False
assert response_json_row_1[f'field_{text_field_2.id}'] == None
assert response_json_row_1[f'field_{boolean_field.id}'] is False
assert response_json_row_1[f'field_{text_field_2.id}'] is None
response = api_client.post(
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
@ -279,7 +326,7 @@ def test_create_row(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json_row_2[f'field_{text_field.id}'] == 'white'
assert not response_json_row_2[f'field_{number_field.id}']
assert response_json_row_2[f'field_{boolean_field.id}'] == False
assert response_json_row_2[f'field_{boolean_field.id}'] is False
assert response_json_row_2[f'field_{text_field_2.id}'] == ''
response = api_client.post(
@ -325,29 +372,29 @@ def test_create_row(api_client, data_fixture):
row_1 = rows[0]
assert row_1.id == response_json_row_1['id']
assert getattr(row_1, f'field_{text_field.id}') == 'white'
assert getattr(row_1, f'field_{number_field.id}') == None
assert getattr(row_1, f'field_{boolean_field.id}') == False
assert getattr(row_1, f'field_{text_field_2.id}') == None
assert getattr(row_1, f'field_{number_field.id}') is None
assert getattr(row_1, f'field_{boolean_field.id}') is False
assert getattr(row_1, f'field_{text_field_2.id}') is None
row_2 = rows[1]
assert row_2.id == response_json_row_2['id']
assert getattr(row_2, f'field_{text_field.id}') == 'white'
assert getattr(row_2, f'field_{number_field.id}') == None
assert getattr(row_2, f'field_{boolean_field.id}') == False
assert getattr(row_1, f'field_{text_field_2.id}') == None
assert getattr(row_2, f'field_{number_field.id}') is None
assert getattr(row_2, f'field_{boolean_field.id}') is False
assert getattr(row_1, f'field_{text_field_2.id}') is None
row_3 = rows[2]
assert row_3.id == response_json_row_3['id']
assert getattr(row_3, f'field_{text_field.id}') == 'Green'
assert getattr(row_3, f'field_{number_field.id}') == 120
assert getattr(row_3, f'field_{boolean_field.id}') == True
assert getattr(row_3, f'field_{boolean_field.id}') is True
assert getattr(row_3, f'field_{text_field_2.id}') == 'Not important'
row_4 = rows[3]
assert row_4.id == response_json_row_4['id']
assert getattr(row_4, f'field_{text_field.id}') == 'Purple'
assert getattr(row_4, f'field_{number_field.id}') == 240
assert getattr(row_4, f'field_{boolean_field.id}') == True
assert getattr(row_4, f'field_{boolean_field.id}') is True
assert getattr(row_4, f'field_{text_field_2.id}') == ''
@ -455,7 +502,7 @@ def test_get_row(api_client, data_fixture):
assert response_json['id'] == row_1.id
assert response_json[f'field_{text_field.id}'] == 'Green'
assert response_json[f'field_{number_field.id}'] == 120
assert response_json[f'field_{boolean_field.id}'] == False
assert response_json[f'field_{boolean_field.id}'] is False
url = reverse('api:database:rows:item', kwargs={
'table_id': table.id,
@ -471,7 +518,7 @@ def test_get_row(api_client, data_fixture):
assert response_json['id'] == row_2.id
assert response_json[f'field_{text_field.id}'] == 'Purple'
assert response_json[f'field_{number_field.id}'] == 240
assert response_json[f'field_{boolean_field.id}'] == True
assert response_json[f'field_{boolean_field.id}'] is True
@pytest.mark.django_db
@ -600,12 +647,12 @@ def test_update_row(api_client, data_fixture):
assert response_json_row_1['id'] == row_1.id
assert response_json_row_1[f'field_{text_field.id}'] == 'Green'
assert response_json_row_1[f'field_{number_field.id}'] == 120
assert response_json_row_1[f'field_{boolean_field.id}'] == True
assert response_json_row_1[f'field_{boolean_field.id}'] is True
row_1.refresh_from_db()
assert getattr(row_1, f'field_{text_field.id}') == 'Green'
assert getattr(row_1, f'field_{number_field.id}') == 120
assert getattr(row_1, f'field_{boolean_field.id}') == True
assert getattr(row_1, f'field_{boolean_field.id}') is True
response = api_client.patch(
url,
@ -636,7 +683,7 @@ def test_update_row(api_client, data_fixture):
row_1.refresh_from_db()
assert getattr(row_1, f'field_{text_field.id}') == 'Orange'
assert getattr(row_1, f'field_{number_field.id}') == 120
assert getattr(row_1, f'field_{boolean_field.id}') == True
assert getattr(row_1, f'field_{boolean_field.id}') is True
url = reverse('api:database:rows:item', kwargs={
'table_id': table.id,
@ -657,12 +704,12 @@ def test_update_row(api_client, data_fixture):
assert response_json_row_2['id'] == row_2.id
assert response_json_row_2[f'field_{text_field.id}'] == 'Blue'
assert response_json_row_2[f'field_{number_field.id}'] == 50
assert response_json_row_2[f'field_{boolean_field.id}'] == False
assert response_json_row_2[f'field_{boolean_field.id}'] is False
row_2.refresh_from_db()
assert getattr(row_2, f'field_{text_field.id}') == 'Blue'
assert getattr(row_2, f'field_{number_field.id}') == 50
assert getattr(row_2, f'field_{boolean_field.id}') == False
assert getattr(row_2, f'field_{boolean_field.id}') is False
url = reverse('api:database:rows:item', kwargs={
'table_id': table.id,
@ -681,14 +728,14 @@ def test_update_row(api_client, data_fixture):
response_json_row_2 = response.json()
assert response.status_code == HTTP_200_OK
assert response_json_row_2['id'] == row_2.id
assert response_json_row_2[f'field_{text_field.id}'] == None
assert response_json_row_2[f'field_{number_field.id}'] == None
assert response_json_row_2[f'field_{boolean_field.id}'] == False
assert response_json_row_2[f'field_{text_field.id}'] is None
assert response_json_row_2[f'field_{number_field.id}'] is None
assert response_json_row_2[f'field_{boolean_field.id}'] is False
row_2.refresh_from_db()
assert getattr(row_2, f'field_{text_field.id}') == None
assert getattr(row_2, f'field_{number_field.id}') == None
assert getattr(row_2, f'field_{boolean_field.id}') == False
assert getattr(row_2, f'field_{text_field.id}') is None
assert getattr(row_2, f'field_{number_field.id}') is None
assert getattr(row_2, f'field_{boolean_field.id}') is False
table_3 = data_fixture.create_database_table(user=user)
decimal_field = data_fixture.create_number_field(

View file

@ -16,7 +16,7 @@ def test_list_tables(api_client, data_fixture):
database_2 = data_fixture.create_database_application()
table_1 = data_fixture.create_database_table(database=database, order=2)
table_2 = data_fixture.create_database_table(database=database, order=1)
table_3 = data_fixture.create_database_table(database=database_2)
data_fixture.create_database_table(database=database_2)
url = reverse('api:database:tables:list', kwargs={'database_id': database.id})
response = api_client.get(url, HTTP_AUTHORIZATION=f'JWT {token}')
@ -384,7 +384,7 @@ def test_get_database_application_with_tables(api_client, data_fixture):
database = data_fixture.create_database_application(user=user)
table_1 = data_fixture.create_database_table(database=database, order=0)
table_2 = data_fixture.create_database_table(database=database, order=1)
table_3 = data_fixture.create_database_table()
data_fixture.create_database_table()
url = reverse('api:applications:item', kwargs={'application_id': database.id})
response = api_client.get(

View file

@ -151,10 +151,8 @@ def test_get_token(api_client, data_fixture):
database_1 = data_fixture.create_database_application(group=group_1)
database_2 = data_fixture.create_database_application(group=group_1)
table_1 = data_fixture.create_database_table(database=database_1,
create_table=False)
table_2 = data_fixture.create_database_table(database=database_1,
create_table=False)
data_fixture.create_database_table(database=database_1, create_table=False)
data_fixture.create_database_table(database=database_1, create_table=False)
table_3 = data_fixture.create_database_table(database=database_2,
create_table=False)
@ -278,8 +276,7 @@ def test_update_token(api_client, data_fixture):
database_3 = data_fixture.create_database_application()
table_1 = data_fixture.create_database_table(database=database_1,
create_table=False)
table_2 = data_fixture.create_database_table(database=database_1,
create_table=False)
data_fixture.create_database_table(database=database_1, create_table=False)
table_3 = data_fixture.create_database_table(database=database_2,
create_table=False)
table_4 = data_fixture.create_database_table(database=database_3,
@ -610,4 +607,3 @@ def test_delete_token(api_client, data_fixture):
assert Token.objects.all().count() == 2
assert TokenPermission.objects.all().count() == 0

View file

@ -296,7 +296,7 @@ def test_list_filtered_rows(api_client, data_fixture):
f'field_{number_field.id}': 100,
f'field_{boolean_field.id}': True
})
row_4 = model.objects.create(**{
model.objects.create(**{
f'field_{text_field.id}': 'Purple',
f'field_{number_field.id}': 1000,
f'field_{boolean_field.id}': False
@ -361,7 +361,7 @@ def test_patch_grid_view(api_client, data_fixture):
email='test@test.nl', password='password', first_name='Test1')
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table)
unknown_field = data_fixture.create_text_field()
data_fixture.create_text_field()
grid = data_fixture.create_grid_view(table=table)
# The second field is deliberately created after the creation of the grid field
# so that the GridViewFieldOptions entry is not created. This should

View file

@ -18,7 +18,12 @@ def test_list_views(api_client, data_fixture):
table_2 = data_fixture.create_database_table()
view_1 = data_fixture.create_grid_view(table=table_1, order=1)
view_2 = data_fixture.create_grid_view(table=table_1, order=3)
view_3 = data_fixture.create_grid_view(table=table_1, order=2, filter_type='OR')
view_3 = data_fixture.create_grid_view(
table=table_1,
order=2,
filter_type='OR',
filters_disabled=True
)
data_fixture.create_grid_view(table=table_2, order=1)
response = api_client.get(
@ -34,14 +39,17 @@ def test_list_views(api_client, data_fixture):
assert response_json[0]['id'] == view_1.id
assert response_json[0]['type'] == 'grid'
assert response_json[0]['filter_type'] == 'AND'
assert response_json[0]['filters_disabled'] is False
assert response_json[1]['id'] == view_3.id
assert response_json[1]['type'] == 'grid'
assert response_json[1]['filter_type'] == 'OR'
assert response_json[1]['filters_disabled'] is True
assert response_json[2]['id'] == view_2.id
assert response_json[2]['type'] == 'grid'
assert response_json[2]['filter_type'] == 'AND'
assert response_json[2]['filters_disabled'] is False
response = api_client.get(
reverse('api:database:views:list', kwargs={'table_id': table_2.id}), **{
@ -74,7 +82,7 @@ def test_list_views_including_filters(api_client, data_fixture):
filter_1 = data_fixture.create_view_filter(view=view_1, field=field_1)
filter_2 = data_fixture.create_view_filter(view=view_1, field=field_2)
filter_3 = data_fixture.create_view_filter(view=view_2, field=field_1)
filter_4 = data_fixture.create_view_filter(view=view_3, field=field_3)
data_fixture.create_view_filter(view=view_3, field=field_3)
response = api_client.get(
'{}'.format(reverse(
@ -124,7 +132,7 @@ def test_list_views_including_sortings(api_client, data_fixture):
sort_1 = data_fixture.create_view_sort(view=view_1, field=field_1)
sort_2 = data_fixture.create_view_sort(view=view_1, field=field_2)
sort_3 = data_fixture.create_view_sort(view=view_2, field=field_1)
sort_4 = data_fixture.create_view_sort(view=view_3, field=field_3)
data_fixture.create_view_sort(view=view_3, field=field_3)
response = api_client.get(
'{}'.format(reverse(
@ -202,7 +210,8 @@ def test_create_view(api_client, data_fixture):
{
'name': 'Test 1',
'type': 'grid',
'filter_type': 'OR'
'filter_type': 'OR',
'filters_disabled': True
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
@ -211,12 +220,14 @@ def test_create_view(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json['type'] == 'grid'
assert response_json['filter_type'] == 'OR'
assert response_json['filters_disabled'] is True
grid = GridView.objects.filter()[0]
assert response_json['id'] == grid.id
assert response_json['name'] == grid.name
assert response_json['order'] == grid.order
assert response_json['filter_type'] == grid.filter_type
assert response_json['filters_disabled'] == grid.filters_disabled
assert 'filters' not in response_json
assert 'sortings' not in response_json
@ -227,7 +238,8 @@ def test_create_view(api_client, data_fixture):
{
'name': 'Test 2',
'type': 'grid',
'filter_type': 'AND'
'filter_type': 'AND',
'filters_disabled': False
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
@ -237,9 +249,28 @@ def test_create_view(api_client, data_fixture):
assert response_json['name'] == 'Test 2'
assert response_json['type'] == 'grid'
assert response_json['filter_type'] == 'AND'
assert response_json['filters_disabled'] is False
assert response_json['filters'] == []
assert response_json['sortings'] == []
response = api_client.post(
'{}'.format(reverse('api:database:views:list', kwargs={'table_id': table.id})),
{
'name': 'Test 3',
'type': 'grid'
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json['name'] == 'Test 3'
assert response_json['type'] == 'grid'
assert response_json['filter_type'] == 'AND'
assert response_json['filters_disabled'] is False
assert 'filters' not in response_json
assert 'sortings' not in response_json
@pytest.mark.django_db
def test_get_view(api_client, data_fixture):
@ -280,6 +311,7 @@ def test_get_view(api_client, data_fixture):
assert response_json['type'] == 'grid'
assert response_json['table']['id'] == table.id
assert response_json['filter_type'] == 'AND'
assert not response_json['filters_disabled']
assert 'filters' not in response_json
assert 'sortings' not in response_json
@ -352,15 +384,20 @@ def test_update_view(api_client, data_fixture):
assert response_json['id'] == view.id
assert response_json['name'] == 'Test 1'
assert response_json['filter_type'] == 'AND'
assert not response_json['filters_disabled']
view.refresh_from_db()
assert view.name == 'Test 1'
assert view.filter_type == 'AND'
assert not view.filters_disabled
url = reverse('api:database:views:item', kwargs={'view_id': view.id})
response = api_client.patch(
url,
{'filter_type': 'OR'},
{
'filter_type': 'OR',
'filters_disabled': True,
},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
@ -368,11 +405,13 @@ def test_update_view(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json['id'] == view.id
assert response_json['filter_type'] == 'OR'
assert response_json['filters_disabled']
assert 'filters' not in response_json
assert 'sortings' not in response_json
view.refresh_from_db()
assert view.filter_type == 'OR'
assert view.filters_disabled
filter_1 = data_fixture.create_view_filter(view=view)
url = reverse('api:database:views:item', kwargs={'view_id': view.id})
@ -386,6 +425,7 @@ def test_update_view(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json['id'] == view.id
assert response_json['filter_type'] == 'AND'
assert response_json['filters_disabled'] is True
assert response_json['filters'][0]['id'] == filter_1.id
assert response_json['sortings'] == []
@ -430,8 +470,8 @@ def test_list_view_filters(api_client, data_fixture):
view_3 = data_fixture.create_grid_view(table=table_2, order=1)
filter_1 = data_fixture.create_view_filter(view=view_1, field=field_1)
filter_2 = data_fixture.create_view_filter(view=view_1, field=field_2)
filter_3 = data_fixture.create_view_filter(view=view_2, field=field_1)
filter_4 = data_fixture.create_view_filter(view=view_3, field=field_3)
data_fixture.create_view_filter(view=view_2, field=field_1)
data_fixture.create_view_filter(view=view_3, field=field_3)
response = api_client.get(
reverse(
@ -776,7 +816,7 @@ def test_update_view_filter(api_client, data_fixture):
'api:database:views:filter_item',
kwargs={'view_filter_id': filter_1.id}
),
{'type': 'equal',},
{'type': 'equal'},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
@ -796,7 +836,7 @@ def test_update_view_filter(api_client, data_fixture):
'api:database:views:filter_item',
kwargs={'view_filter_id': filter_1.id}
),
{'value': 'test 3',},
{'value': 'test 3'},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
@ -817,7 +857,7 @@ def test_update_view_filter(api_client, data_fixture):
'api:database:views:filter_item',
kwargs={'view_filter_id': filter_1.id}
),
{'value': '',},
{'value': ''},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)
@ -874,11 +914,11 @@ def test_list_view_sortings(api_client, data_fixture):
field_2 = data_fixture.create_text_field(table=table_1)
field_3 = data_fixture.create_text_field(table=table_2)
view_1 = data_fixture.create_grid_view(table=table_1, order=1)
view_2 = data_fixture.create_grid_view(table=table_1, order=2)
data_fixture.create_grid_view(table=table_1, order=2)
view_3 = data_fixture.create_grid_view(table=table_2, order=1)
sort_1 = data_fixture.create_view_sort(view=view_1, field=field_1)
sort_2 = data_fixture.create_view_sort(view=view_1, field=field_2)
sort_4 = data_fixture.create_view_sort(view=view_3, field=field_3)
data_fixture.create_view_sort(view=view_3, field=field_3)
response = api_client.get(
reverse(
@ -1233,7 +1273,7 @@ def test_update_view_sort(api_client, data_fixture):
'api:database:views:sort_item',
kwargs={'view_sort_id': sort_1.id}
),
{'order': 'DESC',},
{'order': 'DESC'},
format='json',
HTTP_AUTHORIZATION=f'JWT {token}'
)

View file

@ -33,5 +33,6 @@ def test_lenient_schema_editor():
connection,
"REGEXP_REPLACE(p_in, 'test', '', 'g')"
) as schema_editor:
assert schema_editor.alert_column_type_function == \
assert schema_editor.alert_column_type_function == (
"REGEXP_REPLACE(p_in, 'test', '', 'g')"
)

View file

@ -8,7 +8,7 @@ from baserow.contrib.database.fields.models import (
)
from baserow.contrib.database.fields.exceptions import (
FieldTypeDoesNotExist, PrimaryFieldAlreadyExists, CannotDeletePrimaryField,
FieldDoesNotExist
FieldDoesNotExist, IncompatiblePrimaryFieldTypeError
)
@ -165,6 +165,15 @@ def test_update_field(data_fixture):
with pytest.raises(FieldTypeDoesNotExist):
handler.update_field(user=user, field=field, new_type_name='NOT_EXISTING')
# The link row field is not compatible with a primary field so an exception
# is expected.
field.primary = True
field.save()
with pytest.raises(IncompatiblePrimaryFieldTypeError):
handler.update_field(user=user, field=field, new_type_name='link_row')
field.primary = False
field.save()
# Change some values of the text field and test if they have been changed.
field = handler.update_field(user=user, field=field, name='Text field',
text_default='Default value')
@ -186,12 +195,12 @@ def test_update_field(data_fixture):
assert field.name == 'Number field'
assert field.number_type == 'INTEGER'
assert field.number_negative == False
assert field.number_negative is False
assert not hasattr(field, 'text_default')
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{field.id}') == None
assert getattr(rows[0], f'field_{field.id}') is None
assert getattr(rows[1], f'field_{field.id}') == 100
assert getattr(rows[2], f'field_{field.id}') == 10
@ -203,11 +212,11 @@ def test_update_field(data_fixture):
assert field.name == 'Price field'
assert field.number_type == 'DECIMAL'
assert field.number_decimal_places == 2
assert field.number_negative == True
assert field.number_negative is True
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{field.id}') == None
assert getattr(rows[0], f'field_{field.id}') is None
assert getattr(rows[1], f'field_{field.id}') == Decimal('100.00')
assert getattr(rows[2], f'field_{field.id}') == Decimal('10.00')
@ -223,9 +232,9 @@ def test_update_field(data_fixture):
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{field.id}') == False
assert getattr(rows[1], f'field_{field.id}') == False
assert getattr(rows[2], f'field_{field.id}') == False
assert getattr(rows[0], f'field_{field.id}') is False
assert getattr(rows[1], f'field_{field.id}') is False
assert getattr(rows[2], f'field_{field.id}') is False
@pytest.mark.django_db

View file

@ -1,4 +1,5 @@
import pytest
import json
from pytz import timezone
from datetime import date
from faker import Faker
@ -7,9 +8,12 @@ from decimal import Decimal
from django.core.exceptions import ValidationError
from django.utils.timezone import make_aware, datetime
from baserow.core.user_files.exceptions import (
InvalidUserFileNameError, UserFileDoesNotExist
)
from baserow.contrib.database.fields.field_types import DateFieldType
from baserow.contrib.database.fields.models import (
LongTextField, URLField, DateField, EmailField
LongTextField, URLField, DateField, EmailField, FileField
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.rows.handler import RowHandler
@ -178,7 +182,7 @@ def test_long_text_field_type(data_fixture):
def test_url_field_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
table_2 = data_fixture.create_database_table(user=user, database=table.database)
data_fixture.create_database_table(user=user, database=table.database)
field = data_fixture.create_text_field(table=table, order=1, name='name')
field_handler = FieldHandler()
@ -207,32 +211,32 @@ def test_url_field_type(data_fixture):
'url': 'httpss'
}, model=model)
row_0 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'http://test.nl',
'url': 'https://baserow.io',
'number': 5
}, model=model)
row_1 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'http;//',
'url': 'http://localhost',
'number': 10
}, model=model)
row_2 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'bram@test.nl',
'url': 'http://www.baserow.io'
}, model=model)
row_3 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'NOT A URL',
'url': 'http://www.baserow.io/blog/building-a-database'
}, model=model)
row_4 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'ftps://www.complex.website.com?querystring=test&something=else',
'url': ''
}, model=model)
row_5 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'url': None,
}, model=model)
row_6 = row_handler.create_row(user=user, table=table, values={}, model=model)
row_handler.create_row(user=user, table=table, values={}, model=model)
# Convert to text field to a url field so we can check how the conversion of values
# went.
@ -289,7 +293,7 @@ def test_date_field_type_prepare_value(data_fixture):
with pytest.raises(ValidationError):
assert d.prepare_value_for_db(f, 'TEST')
assert d.prepare_value_for_db(f, None) == None
assert d.prepare_value_for_db(f, None) is None
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
@ -339,15 +343,15 @@ def test_date_field_type(data_fixture):
date_field_2 = field_handler.create_field(user=user, table=table, type_name='date',
name='Datetime', date_include_time=True)
assert date_field_1.date_include_time == False
assert date_field_2.date_include_time == True
assert date_field_1.date_include_time is False
assert date_field_2.date_include_time is True
assert len(DateField.objects.all()) == 2
model = table.get_model(attribute_names=True)
row = row_handler.create_row(user=user, table=table, values={}, model=model)
assert row.date == None
assert row.datetime == None
assert row.date is None
assert row.datetime is None
row = row_handler.create_row(user=user, table=table, values={
'date': '2020-4-1',
@ -361,7 +365,7 @@ def test_date_field_type(data_fixture):
'datetime': make_aware(datetime(2020, 4, 1, 12, 30, 30), amsterdam)
}, model=model)
row.refresh_from_db()
assert row.date == None
assert row.date is None
assert row.datetime == datetime(2020, 4, 1, 10, 30, 30, tzinfo=timezone('UTC'))
date_field_1 = field_handler.update_field(user=user, field=date_field_1,
@ -369,17 +373,17 @@ def test_date_field_type(data_fixture):
date_field_2 = field_handler.update_field(user=user, field=date_field_2,
date_include_time=False)
assert date_field_1.date_include_time == True
assert date_field_2.date_include_time == False
assert date_field_1.date_include_time is True
assert date_field_2.date_include_time is False
model = table.get_model(attribute_names=True)
rows = model.objects.all()
assert rows[0].date == None
assert rows[0].datetime == None
assert rows[0].date is None
assert rows[0].datetime is None
assert rows[1].date == datetime(2020, 4, 1, tzinfo=timezone('UTC'))
assert rows[1].datetime == date(2020, 4, 1)
assert rows[2].date == None
assert rows[2].date is None
assert rows[2].datetime == date(2020, 4, 1)
field_handler.delete_field(user=user, field=date_field_1)
@ -392,7 +396,7 @@ def test_date_field_type(data_fixture):
def test_email_field_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
table_2 = data_fixture.create_database_table(user=user, database=table.database)
data_fixture.create_database_table(user=user, database=table.database)
field = data_fixture.create_text_field(table=table, order=1, name='name')
field_handler = FieldHandler()
@ -416,32 +420,32 @@ def test_email_field_type(data_fixture):
'email': 'invalid@email'
}, model=model)
row_0 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'a.very.STRANGE@email.address.coM',
'email': 'test@test.nl',
'number': 5
}, model=model)
row_1 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'someuser',
'email': 'some@user.com',
'number': 10
}, model=model)
row_2 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'http://www.baserow.io',
'email': 'bram@test.nl'
}, model=model)
row_3 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'NOT AN EMAIL',
'email': 'something@example.com'
}, model=model)
row_4 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'name': 'testing@nowhere.org',
'email': ''
}, model=model)
row_5 = row_handler.create_row(user=user, table=table, values={
row_handler.create_row(user=user, table=table, values={
'email': None,
}, model=model)
row_6 = row_handler.create_row(user=user, table=table, values={}, model=model)
row_handler.create_row(user=user, table=table, values={}, model=model)
# Convert the text field to a url field so we can check how the conversion of
# values went.
@ -481,3 +485,137 @@ def test_email_field_type(data_fixture):
field_handler.delete_field(user=user, field=field_2)
assert len(EmailField.objects.all()) == 2
@pytest.mark.django_db
def test_file_field_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
user_file_1 = data_fixture.create_user_file()
user_file_2 = data_fixture.create_user_file()
user_file_3 = data_fixture.create_user_file()
field_handler = FieldHandler()
row_handler = RowHandler()
file = field_handler.create_field(user=user, table=table, type_name='file',
name='File')
assert FileField.objects.all().count() == 1
model = table.get_model(attribute_names=True)
with pytest.raises(ValidationError):
row_handler.create_row(user=user, table=table, values={
'file': 'not_a_json'
}, model=model)
with pytest.raises(ValidationError):
row_handler.create_row(user=user, table=table, values={
'file': {}
}, model=model)
with pytest.raises(ValidationError):
row_handler.create_row(user=user, table=table, values={
'file': [{'no_name': 'test'}]
}, model=model)
with pytest.raises(InvalidUserFileNameError):
row_handler.create_row(user=user, table=table, values={
'file': [{'name': 'wrongfilename.jpg'}]
}, model=model)
with pytest.raises(UserFileDoesNotExist):
row_handler.create_row(user=user, table=table, values={
'file': [{'name': 'file_name.jpg'}]
}, model=model)
row = row_handler.create_row(user=user, table=table, values={
'file': [{'name': user_file_1.name}]
}, model=model)
assert row.file[0]['visible_name'] == user_file_1.original_name
del row.file[0]['visible_name']
assert row.file[0] == user_file_1.serialize()
row = row_handler.create_row(user=user, table=table, values={
'file': [
{'name': user_file_2.name},
{'name': user_file_1.name},
{'name': user_file_1.name}
]
}, model=model)
assert row.file[0]['visible_name'] == user_file_2.original_name
assert row.file[1]['visible_name'] == user_file_1.original_name
assert row.file[2]['visible_name'] == user_file_1.original_name
del row.file[0]['visible_name']
del row.file[1]['visible_name']
del row.file[2]['visible_name']
assert row.file[0] == user_file_2.serialize()
assert row.file[1] == user_file_1.serialize()
assert row.file[2] == user_file_1.serialize()
row = row_handler.create_row(user=user, table=table, values={
'file': [
{'name': user_file_1.name},
{'name': user_file_3.name},
{'name': user_file_2.name}
]
}, model=model)
assert row.file[0]['visible_name'] == user_file_1.original_name
assert row.file[1]['visible_name'] == user_file_3.original_name
assert row.file[2]['visible_name'] == user_file_2.original_name
del row.file[0]['visible_name']
del row.file[1]['visible_name']
del row.file[2]['visible_name']
assert row.file[0] == user_file_1.serialize()
assert row.file[1] == user_file_3.serialize()
assert row.file[2] == user_file_2.serialize()
row = row_handler.update_row(user=user, table=table, row_id=row.id, values={
'file': [
{'name': user_file_1.name, 'visible_name': 'not_original.jpg'},
]
}, model=model)
assert row.file[0]['visible_name'] == 'not_original.jpg'
del row.file[0]['visible_name']
assert row.file[0] == user_file_1.serialize()
assert model.objects.all().count() == 3
field_handler.delete_field(user=user, field=file)
assert FileField.objects.all().count() == 0
model.objects.all().delete()
text = field_handler.create_field(user=user, table=table, type_name='text',
name='Text')
model = table.get_model(attribute_names=True)
row = row_handler.create_row(user=user, table=table, values={
'text': 'Some random text'
}, model=model)
row_handler.create_row(user=user, table=table, values={
'text': '["Not compatible"]'
}, model=model)
row_handler.create_row(user=user, table=table, values={
'text': json.dumps(user_file_1.serialize())
}, model=model)
file = field_handler.update_field(user=user, table=table, field=text,
new_type_name='file', name='File')
model = table.get_model(attribute_names=True)
results = model.objects.all()
assert results[0].file == []
assert results[1].file == []
assert results[2].file == []
row_handler.update_row(user=user, table=table, row_id=row.id, values={
'file': [
{'name': user_file_1.name, 'visible_name': 'not_original.jpg'},
]
}, model=model)
field_handler.update_field(user=user, table=table, field=file,
new_type_name='text', name='text')
model = table.get_model(attribute_names=True)
results = model.objects.all()
assert results[0].text is None
assert results[1].text is None
assert results[2].text is None

View file

@ -42,10 +42,10 @@ def test_link_row_field_type(data_fixture):
cars_primary_field = field_handler.create_field(
user=user, table=cars_table, type_name='text', name='Name', primary=True
)
cars_row_1 = row_handler.create_row(user=user, table=cars_table, values={
row_handler.create_row(user=user, table=cars_table, values={
f'field_{cars_primary_field.id}': 'BMW'
})
cars_row_2 = row_handler.create_row(user=user, table=cars_table, values={
row_handler.create_row(user=user, table=cars_table, values={
f'field_{cars_primary_field.id}': 'Audi'
})
@ -339,13 +339,13 @@ def test_link_row_enhance_queryset(data_fixture, django_assert_num_queries):
customers_row_2 = row_handler.create_row(user=user, table=customers_table)
customers_row_3 = row_handler.create_row(user=user, table=customers_table)
row = row_handler.create_row(user=user, table=example_table, values={
row_handler.create_row(user=user, table=example_table, values={
f'field_{link_row_field.id}': [customers_row_1.id, customers_row_2.id],
})
row_2 = row_handler.create_row(user=user, table=example_table, values={
row_handler.create_row(user=user, table=example_table, values={
f'field_{link_row_field.id}': [customers_row_1.id],
})
row_3 = row_handler.create_row(user=user, table=example_table, values={
row_handler.create_row(user=user, table=example_table, values={
f'field_{link_row_field.id}': [customers_row_3.id],
})
@ -545,7 +545,7 @@ def test_link_row_field_type_api_row_views(api_client, data_fixture):
database=database)
grid = data_fixture.create_grid_view(table=example_table)
example_primary = data_fixture.create_text_field(
data_fixture.create_text_field(
name='Name',
table=example_table,
primary=True

View file

@ -191,7 +191,7 @@ def test_delete_row(data_fixture):
handler = RowHandler()
model = table.get_model()
row = handler.create_row(user=user, table=table)
row_2 = handler.create_row(user=user, table=table)
handler.create_row(user=user, table=table)
with pytest.raises(UserNotInGroupError):
handler.delete_row(user=user_2, table=table, row_id=row.id)

View file

@ -28,6 +28,14 @@ def test_get_database_table(data_fixture):
with pytest.raises(TableDoesNotExist):
handler.get_table(user=user, table_id=99999)
# If the error is raised we know for sure that the base query has resolved.
with pytest.raises(AttributeError):
handler.get_table(
user=user,
table_id=table.id,
base_queryset=Table.objects.prefetch_related('UNKNOWN')
)
table_copy = handler.get_table(user=user, table_id=table.id)
assert table_copy.id == table.id

View file

@ -5,6 +5,9 @@ from unittest.mock import MagicMock
from django.db import models
from baserow.contrib.database.table.models import Table
from baserow.contrib.database.fields.exceptions import (
OrderByFieldNotPossible, OrderByFieldNotFound
)
@pytest.mark.django_db
@ -193,3 +196,90 @@ def test_search_all_fields_queryset(data_fixture):
results = model.objects.all().search_all_fields('white car')
assert len(results) == 0
@pytest.mark.django_db
def test_order_by_fields_string_queryset(data_fixture):
table = data_fixture.create_database_table(name='Cars')
table_2 = data_fixture.create_database_table(database=table.database)
name_field = data_fixture.create_text_field(table=table, order=0, name='Name')
color_field = data_fixture.create_text_field(table=table, order=1, name='Color')
price_field = data_fixture.create_number_field(table=table, order=2, name='Price')
description_field = data_fixture.create_long_text_field(
table=table, order=3, name='Description'
)
link_field = data_fixture.create_link_row_field(table=table,
link_row_table=table_2)
model = table.get_model(attribute_names=True)
row_1 = model.objects.create(
name='BMW',
color='Blue',
price=10000,
description='Sports car.'
)
row_2 = model.objects.create(
name='Audi',
color='Orange',
price=20000,
description='This is the most expensive car we have.'
)
row_3 = model.objects.create(
name='Volkswagen',
color='White',
price=5000,
description='A very old car.'
)
row_4 = model.objects.create(
name='Volkswagen',
color='Green',
price=4000,
description='Strange color.'
)
with pytest.raises(ValueError):
model.objects.all().order_by_fields_string('xxxx')
with pytest.raises(ValueError):
model.objects.all().order_by_fields_string('')
with pytest.raises(ValueError):
model.objects.all().order_by_fields_string('id')
with pytest.raises(OrderByFieldNotFound):
model.objects.all().order_by_fields_string('field_99999')
with pytest.raises(OrderByFieldNotPossible):
model.objects.all().order_by_fields_string(f'field_{link_field.id}')
results = model.objects.all().order_by_fields_string(
f'-field_{price_field.id}'
)
assert results[0].id == row_2.id
assert results[1].id == row_1.id
assert results[2].id == row_3.id
assert results[3].id == row_4.id
results = model.objects.all().order_by_fields_string(
f'field_{name_field.id},-field_{price_field.id}'
)
assert results[0].id == row_2.id
assert results[1].id == row_1.id
assert results[2].id == row_3.id
assert results[3].id == row_4.id
results = model.objects.all().order_by_fields_string(
f'-field_{price_field.id}'
)
assert results[0].id == row_2.id
assert results[1].id == row_1.id
assert results[2].id == row_3.id
assert results[3].id == row_4.id
results = model.objects.all().order_by_fields_string(
f'{description_field.id},-field_{color_field.id}'
)
assert results[0].id == row_3.id
assert results[1].id == row_1.id
assert results[2].id == row_4.id
assert results[3].id == row_2.id

View file

@ -18,11 +18,11 @@ from baserow.contrib.database.tokens.exceptions import (
@pytest.mark.django_db
def test_get_by_key(data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
data_fixture.create_user()
group_1 = data_fixture.create_group(user=user)
group_2 = data_fixture.create_group()
token = data_fixture.create_token(user=user, group=group_1)
token_2 = data_fixture.create_token(user=user, group=group_2)
data_fixture.create_token(user=user, group=group_2)
handler = TokenHandler()
@ -92,7 +92,7 @@ def test_generate_token(data_fixture):
@pytest.mark.django_db
def test_create_token(data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
data_fixture.create_user()
group_1 = data_fixture.create_group(user=user)
group_2 = data_fixture.create_group()
@ -114,23 +114,23 @@ def test_create_token(data_fixture):
assert permissions[0].token_id == token.id
assert permissions[0].type == 'create'
assert permissions[0].database_id == None
assert permissions[0].table_id == None
assert permissions[0].database_id is None
assert permissions[0].table_id is None
assert permissions[1].token_id == token.id
assert permissions[1].type == 'read'
assert permissions[1].database_id == None
assert permissions[1].table_id == None
assert permissions[1].database_id is None
assert permissions[1].table_id is None
assert permissions[2].token_id == token.id
assert permissions[2].type == 'update'
assert permissions[2].database_id == None
assert permissions[2].table_id == None
assert permissions[2].database_id is None
assert permissions[2].table_id is None
assert permissions[3].token_id == token.id
assert permissions[3].type == 'delete'
assert permissions[3].database_id == None
assert permissions[3].table_id == None
assert permissions[3].database_id is None
assert permissions[3].table_id is None
@pytest.mark.django_db
@ -202,16 +202,16 @@ def test_update_token_permission(data_fixture):
handler.update_token_permissions(user, token=token_1, create=True, read=True,
update=True, delete=True)
assert TokenPermission.objects.all().count() == 4
permission_1_1 = TokenPermission.objects.get(
TokenPermission.objects.get(
token=token_1, type='create', database__isnull=True, table__isnull=True
)
permission_1_2 = TokenPermission.objects.get(
TokenPermission.objects.get(
token=token_1, type='read', database__isnull=True, table__isnull=True
)
permission_1_3 = TokenPermission.objects.get(
TokenPermission.objects.get(
token=token_1, type='update', database__isnull=True, table__isnull=True
)
permission_1_4 = TokenPermission.objects.get(
TokenPermission.objects.get(
token=token_1, type='delete', database__isnull=True, table__isnull=True
)
@ -230,13 +230,13 @@ def test_update_token_permission(data_fixture):
permission_2_2 = TokenPermission.objects.get(
token=token_1, type='read', database_id=database_2.id, table__isnull=True
)
permission_2_3 = TokenPermission.objects.get(
TokenPermission.objects.get(
token=token_1, type='read', database__isnull=True, table_id=table_2.id
)
permission_2_4 = TokenPermission.objects.get(
token=token_1, type='update', database__isnull=True, table_id=table_1.id
)
permission_2_5 = TokenPermission.objects.get(
TokenPermission.objects.get(
token=token_1, type='delete', database__isnull=True, table__isnull=True
)
@ -252,7 +252,7 @@ def test_update_token_permission(data_fixture):
permission_3_1 = TokenPermission.objects.get(
token=token_1, type='create', database_id=database_1.id, table__isnull=True
)
permission_3_2 = TokenPermission.objects.get(
TokenPermission.objects.get(
token=token_1, type='create', database__isnull=True, table_id=table_2.id
)
permission_3_3 = TokenPermission.objects.get(

View file

@ -265,7 +265,7 @@ def test_contains_filter_type(data_fixture):
f'field_{text_field.id}': 'My name is John Doe.',
f'field_{long_text_field.id}': 'Long text that is not empty.',
})
row_2 = model.objects.create(**{
model.objects.create(**{
f'field_{text_field.id}': '',
f'field_{long_text_field.id}': '',
})
@ -487,7 +487,7 @@ def test_higher_than_filter_type(data_fixture):
f'field_{integer_field.id}': 10,
f'field_{decimal_field.id}': 20.20,
})
row_2 = model.objects.create(**{
model.objects.create(**{
f'field_{integer_field.id}': None,
f'field_{decimal_field.id}': None,
})
@ -633,7 +633,7 @@ def test_lower_than_filter_type(data_fixture):
f'field_{integer_field.id}': 10,
f'field_{decimal_field.id}': 20.20,
})
row_2 = model.objects.create(**{
model.objects.create(**{
f'field_{integer_field.id}': None,
f'field_{decimal_field.id}': None,
})
@ -782,11 +782,11 @@ def test_date_equal_filter_type(data_fixture):
f'field_{date_field.id}': date(2019, 1, 1),
f'field_{date_time_field.id}': make_aware(datetime(2020, 6, 17, 1, 30, 5), utc)
})
row_3 = model.objects.create(**{
model.objects.create(**{
f'field_{date_field.id}': None,
f'field_{date_time_field.id}': None
})
row_4 = model.objects.create(**{
model.objects.create(**{
f'field_{date_field.id}': date(2010, 1, 1),
f'field_{date_time_field.id}': make_aware(datetime(2010, 2, 4, 2, 45, 45), utc)
})
@ -1016,6 +1016,7 @@ def test_empty_filter_type(data_fixture):
date_include_time=True
)
boolean_field = data_fixture.create_boolean_field(table=table)
file_field = data_fixture.create_file_field(table=table)
tmp_table = data_fixture.create_database_table(database=table.database)
tmp_field = data_fixture.create_text_field(table=tmp_table, primary=True)
@ -1038,6 +1039,7 @@ def test_empty_filter_type(data_fixture):
f'field_{date_field.id}': None,
f'field_{date_time_field.id}': None,
f'field_{boolean_field.id}': False,
f'field_{file_field.id}': []
})
row_2 = model.objects.create(**{
f'field_{text_field.id}': 'Value',
@ -1047,6 +1049,7 @@ def test_empty_filter_type(data_fixture):
f'field_{date_field.id}': date(2020, 6, 17),
f'field_{date_time_field.id}': make_aware(datetime(2020, 6, 17, 1, 30, 0), utc),
f'field_{boolean_field.id}': True,
f'field_{file_field.id}': [{'name': 'test_file.png'}]
})
getattr(row_2, f'field_{link_row_field.id}').add(tmp_row.id)
row_3 = model.objects.create(**{
@ -1057,6 +1060,9 @@ def test_empty_filter_type(data_fixture):
f'field_{date_field.id}': date(1970, 1, 1),
f'field_{date_time_field.id}': make_aware(datetime(1970, 1, 1, 0, 0, 0), utc),
f'field_{boolean_field.id}': True,
f'field_{file_field.id}': [
{'name': 'test_file.png'}, {'name': 'another_file.jpg'}
]
})
getattr(row_3, f'field_{link_row_field.id}').add(tmp_row.id)
@ -1096,6 +1102,10 @@ def test_empty_filter_type(data_fixture):
filter.save()
assert handler.apply_filters(grid_view, model.objects.all()).get().id == row.id
filter.field = file_field
filter.save()
assert handler.apply_filters(grid_view, model.objects.all()).get().id == row.id
@pytest.mark.django_db
def test_not_empty_filter_type(data_fixture):
@ -1116,6 +1126,7 @@ def test_not_empty_filter_type(data_fixture):
date_include_time=True
)
boolean_field = data_fixture.create_boolean_field(table=table)
file_field = data_fixture.create_file_field(table=table)
tmp_table = data_fixture.create_database_table(database=table.database)
tmp_field = data_fixture.create_text_field(table=tmp_table, primary=True)
@ -1130,7 +1141,7 @@ def test_not_empty_filter_type(data_fixture):
model = table.get_model()
utc = timezone('UTC')
row = model.objects.create(**{
model.objects.create(**{
f'field_{text_field.id}': '',
f'field_{long_text_field.id}': '',
f'field_{integer_field.id}': None,
@ -1138,6 +1149,7 @@ def test_not_empty_filter_type(data_fixture):
f'field_{date_field.id}': None,
f'field_{date_time_field.id}': None,
f'field_{boolean_field.id}': False,
f'field_{file_field.id}': []
})
row_2 = model.objects.create(**{
f'field_{text_field.id}': 'Value',
@ -1147,6 +1159,7 @@ def test_not_empty_filter_type(data_fixture):
f'field_{date_field.id}': date(2020, 6, 17),
f'field_{date_time_field.id}': make_aware(datetime(2020, 6, 17, 1, 30, 0), utc),
f'field_{boolean_field.id}': True,
f'field_{file_field.id}': [{'name': 'test_file.png'}]
})
getattr(row_2, f'field_{link_row_field.id}').add(tmp_row.id)
@ -1185,3 +1198,7 @@ def test_not_empty_filter_type(data_fixture):
filter.field = boolean_field
filter.save()
assert handler.apply_filters(grid_view, model.objects.all()).get().id == row_2.id
filter.field = file_field
filter.save()
assert handler.apply_filters(grid_view, model.objects.all()).get().id == row_2.id

View file

@ -36,6 +36,7 @@ def test_get_view(data_fixture):
assert view.id == grid.id
assert view.name == grid.name
assert view.filter_type == 'AND'
assert not view.filters_disabled
assert isinstance(view, View)
view = handler.get_view(user=user, view_id=grid.id, view_model=GridView)
@ -43,6 +44,7 @@ def test_get_view(data_fixture):
assert view.id == grid.id
assert view.name == grid.name
assert view.filter_type == 'AND'
assert not view.filters_disabled
assert isinstance(view, GridView)
# If the error is raised we know for sure that the query has resolved.
@ -58,6 +60,7 @@ def test_create_view(data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
table_2 = data_fixture.create_database_table(user=user)
handler = ViewHandler()
handler.create_view(user=user, table=table, type_name='grid', name='Test grid')
@ -70,6 +73,32 @@ def test_create_view(data_fixture):
assert grid.order == 1
assert grid.table == table
assert grid.filter_type == 'AND'
assert not grid.filters_disabled
handler.create_view(user=user, table=table, type_name='grid',
name='Something else', filter_type='OR', filters_disabled=True)
assert View.objects.all().count() == 2
assert GridView.objects.all().count() == 2
grid = GridView.objects.all().last()
assert grid.name == 'Something else'
assert grid.order == 2
assert grid.table == table
assert grid.filter_type == 'OR'
assert grid.filters_disabled
grid = handler.create_view(user=user, table=table_2, type_name='grid', name='Name',
filter_type='OR', filters_disabled=False)
assert View.objects.all().count() == 3
assert GridView.objects.all().count() == 3
assert grid.name == 'Name'
assert grid.order == 1
assert grid.table == table_2
assert grid.filter_type == 'OR'
assert not grid.filters_disabled
with pytest.raises(UserNotInGroupError):
handler.create_view(user=user_2, table=table, type_name='grid', name='')
@ -97,11 +126,14 @@ def test_update_view(data_fixture):
grid.refresh_from_db()
assert grid.name == 'Test 1'
assert grid.filter_type == 'AND'
assert not grid.filters_disabled
handler.update_view(user=user, view=grid, filter_type='OR')
handler.update_view(user=user, view=grid, filter_type='OR', filters_disabled=True)
grid.refresh_from_db()
assert grid.filter_type == 'OR'
assert grid.filters_disabled
@pytest.mark.django_db
@ -134,24 +166,36 @@ def test_update_grid_view_field_options(data_fixture):
field_3 = data_fixture.create_text_field()
with pytest.raises(ValueError):
ViewHandler().update_grid_view_field_options(grid_view=grid_view, field_options={
'strange_format': {'height': 150},
})
ViewHandler().update_grid_view_field_options(
grid_view=grid_view,
field_options={
'strange_format': {'height': 150},
}
)
with pytest.raises(UnrelatedFieldError):
ViewHandler().update_grid_view_field_options(grid_view=grid_view, field_options={
99999: {'width': 150},
})
ViewHandler().update_grid_view_field_options(
grid_view=grid_view,
field_options={
99999: {'width': 150},
}
)
with pytest.raises(UnrelatedFieldError):
ViewHandler().update_grid_view_field_options(grid_view=grid_view, field_options={
field_3.id: {'width': 150},
})
ViewHandler().update_grid_view_field_options(
grid_view=grid_view,
field_options={
field_3.id: {'width': 150},
}
)
ViewHandler().update_grid_view_field_options(grid_view=grid_view, field_options={
str(field_1.id): {'width': 150},
field_2.id: {'width': 250}
})
ViewHandler().update_grid_view_field_options(
grid_view=grid_view,
field_options={
str(field_1.id): {'width': 150},
field_2.id: {'width': 250}
}
)
options_4 = grid_view.get_field_options()
assert len(options_4) == 2
@ -182,9 +226,9 @@ def test_field_type_changed(data_fixture):
table_2 = data_fixture.create_database_table(user=user, database=table.database)
text_field = data_fixture.create_text_field(table=table)
grid_view = data_fixture.create_grid_view(table=table)
contains_filter = data_fixture.create_view_filter(view=grid_view, field=text_field,
type='contains', value='test')
sort = data_fixture.create_view_sort(view=grid_view, field=text_field, order='ASC')
data_fixture.create_view_filter(view=grid_view, field=text_field,
type='contains', value='test')
data_fixture.create_view_sort(view=grid_view, field=text_field, order='ASC')
field_handler = FieldHandler()
long_text_field = field_handler.update_field(user=user, field=text_field,
@ -192,7 +236,8 @@ def test_field_type_changed(data_fixture):
assert ViewFilter.objects.all().count() == 1
assert ViewSort.objects.all().count() == 1
field_handler.update_field(user=user, field=long_text_field, new_type_name='number')
field_handler.update_field(user=user, field=long_text_field,
new_type_name='number')
assert ViewFilter.objects.all().count() == 0
assert ViewSort.objects.all().count() == 1
@ -334,6 +379,14 @@ def test_apply_filters(data_fixture):
assert rows[0].id == row_2.id
assert rows[1].id == row_4.id
grid_view.filters_disabled = True
grid_view.save()
rows = view_handler.apply_filters(grid_view, model.objects.all())
assert rows[0].id == row_1.id
assert rows[1].id == row_2.id
assert rows[2].id == row_3.id
assert rows[3].id == row_4.id
@pytest.mark.django_db
def test_get_filter(data_fixture):
@ -349,6 +402,13 @@ def test_get_filter(data_fixture):
with pytest.raises(UserNotInGroupError):
handler.get_filter(user=user_2, view_filter_id=equal_filter.id)
with pytest.raises(AttributeError):
handler.get_filter(
user=user,
view_filter_id=equal_filter.id,
base_queryset=ViewFilter.objects.prefetch_related('UNKNOWN')
)
filter = handler.get_filter(user=user, view_filter_id=equal_filter.id)
assert filter.id == equal_filter.id
@ -409,7 +469,7 @@ def test_create_filter(data_fixture):
tmp_field = Field.objects.get(pk=text_field.id)
view_filter_2 = handler.create_filter(user=user, view=grid_view, field=tmp_field,
type_name='equal', value='test')
type_name='equal', value='test')
assert view_filter_2.view_id == grid_view.id
assert view_filter_2.field_id == text_field.id
assert view_filter_2.type == 'equal'
@ -618,6 +678,13 @@ def test_get_sort(data_fixture):
with pytest.raises(UserNotInGroupError):
handler.get_sort(user=user_2, view_sort_id=equal_sort.id)
with pytest.raises(AttributeError):
handler.get_sort(
user=user,
view_sort_id=equal_sort.id,
base_queryset=ViewSort.objects.prefetch_related('UNKNOWN')
)
sort = handler.get_sort(user=user, view_sort_id=equal_sort.id)
assert sort.id == equal_sort.id
@ -717,7 +784,7 @@ def test_update_sort(data_fixture):
assert updated_sort.field_id == text_field.id
assert updated_sort.view_id == grid_view.id
view_sort_2 = data_fixture.create_view_sort(view=grid_view, field=long_text_field)
data_fixture.create_view_sort(view=grid_view, field=long_text_field)
with pytest.raises(ViewSortFieldAlreadyExist):
handler.update_sort(user=user, view_sort=view_sort, order='ASC',

View file

@ -35,9 +35,9 @@ def test_base_email_message():
email = SimpleResetPasswordEmail(['test@baserow.io'])
context = email.get_context()
assert 'public_backend_domain' in context
assert 'public_backend_url' in context
assert 'public_web_frontend_domain' in context
assert 'public_backend_hostname' in context
assert 'public_web_frontend_url' in context
assert 'public_web_frontend_hostname' in context
assert email.get_from_email() == 'no-reply@localhost'
assert email.get_subject() == 'Reset password'

View file

@ -12,7 +12,7 @@ def test_group_user_get_next_order(data_fixture):
group_user_1 = data_fixture.create_user_group(order=0)
group_user_2_1 = data_fixture.create_user_group(order=10)
group_user_2_2 = data_fixture.create_user_group(user=group_user_2_1.user, order=11)
data_fixture.create_user_group(user=group_user_2_1.user, order=11)
assert GroupUser.get_last_order(group_user_1.user) == 1
assert GroupUser.get_last_order(group_user_2_1.user) == 12

View file

@ -1,6 +1,9 @@
from io import BytesIO
from baserow.core.utils import (
extract_allowed, set_allowed_attrs, to_pascal_case, to_snake_case,
remove_special_characters, dict_to_object, random_string
remove_special_characters, dict_to_object, random_string, sha256_hash,
stream_size
)
@ -61,3 +64,16 @@ def test_dict_to_object():
def test_random_string():
assert len(random_string(32)) == 32
assert random_string(32) != random_string(32)
def test_sha256_hash():
assert sha256_hash(BytesIO(b'test')) == (
'9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08'
)
assert sha256_hash(BytesIO(b'Hello World')) == (
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e'
)
def test_stream_size():
assert stream_size(BytesIO(b'test')) == 4

View file

@ -11,10 +11,11 @@ from baserow.contrib.database.models import (
)
from baserow.contrib.database.views.models import GridViewFieldOptions
from baserow.core.user.exceptions import (
UserAlreadyExist, UserNotFound, InvalidPassword, BaseURLDomainNotAllowed
UserAlreadyExist, UserNotFound, InvalidPassword, BaseURLHostnameNotAllowed
)
from baserow.core.user.handler import UserHandler
@pytest.mark.django_db
def test_get_user(data_fixture):
user_1 = data_fixture.create_user(email='user1@localhost')
@ -80,7 +81,7 @@ def test_send_reset_password_email(data_fixture, mailoutbox):
user = data_fixture.create_user(email='test@localhost')
handler = UserHandler()
with pytest.raises(BaseURLDomainNotAllowed):
with pytest.raises(BaseURLHostnameNotAllowed):
handler.send_reset_password_email(user, 'http://test.nl/reset-password')
signer = handler.get_reset_password_signer()

View file

@ -0,0 +1,306 @@
import pytest
import responses
import string
from freezegun import freeze_time
from PIL import Image
from io import BytesIO
from django.conf import settings
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage
from baserow.core.models import UserFile
from baserow.core.user_files.exceptions import (
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached,
MaximumUniqueTriesError
)
from baserow.core.user_files.handler import UserFileHandler
@pytest.mark.django_db
def test_user_file_path(data_fixture):
handler = UserFileHandler()
assert handler.user_file_path('test.jpg') == 'user_files/test.jpg'
assert handler.user_file_path('another_file.png') == 'user_files/another_file.png'
user_file = data_fixture.create_user_file()
assert handler.user_file_path(user_file) == f'user_files/{user_file.name}'
@pytest.mark.django_db
def test_user_file_thumbnail_path(data_fixture):
handler = UserFileHandler()
assert handler.user_file_thumbnail_path(
'test.jpg',
'tiny'
) == 'thumbnails/tiny/test.jpg'
assert handler.user_file_thumbnail_path(
'another_file.png',
'small'
) == 'thumbnails/small/another_file.png'
user_file = data_fixture.create_user_file()
assert handler.user_file_thumbnail_path(
user_file,
'tiny'
) == f'thumbnails/tiny/{user_file.name}'
@pytest.mark.django_db
def test_generate_unique(data_fixture):
user = data_fixture.create_user()
handler = UserFileHandler()
assert len(handler.generate_unique('test', 'txt', 32)) == 32
assert len(handler.generate_unique('test', 'txt', 10)) == 10
assert (
handler.generate_unique('test', 'txt', 32) !=
handler.generate_unique('test', 'txt', 32)
)
unique = handler.generate_unique('test', 'txt', 32)
assert not UserFile.objects.filter(unique=unique).exists()
for char in string.ascii_letters + string.digits:
data_fixture.create_user_file(uploaded_by=user, unique=char,
original_extension='txt', sha256_hash='test')
with pytest.raises(MaximumUniqueTriesError):
handler.generate_unique('test', 'txt', 1, 3)
handler.generate_unique('test2', 'txt', 1, 3)
handler.generate_unique('test', 'txt2', 1, 3)
@pytest.mark.django_db
def test_upload_user_file(data_fixture, tmpdir):
user = data_fixture.create_user()
storage = FileSystemStorage(location=str(tmpdir), base_url='http://localhost')
handler = UserFileHandler()
with pytest.raises(InvalidFileStreamError):
handler.upload_user_file(
user,
'test.txt',
'NOT A STREAM!',
storage=storage
)
with pytest.raises(InvalidFileStreamError):
handler.upload_user_file(
user,
'test.txt',
None,
storage=storage
)
old_limit = settings.USER_FILE_SIZE_LIMIT
settings.USER_FILE_SIZE_LIMIT = 6
with pytest.raises(FileSizeTooLargeError):
handler.upload_user_file(
user,
'test.txt',
ContentFile(b'Hello World')
)
settings.USER_FILE_SIZE_LIMIT = old_limit
with freeze_time('2020-01-01 12:00'):
user_file = handler.upload_user_file(
user,
'test.txt',
ContentFile(b'Hello World'),
storage=storage
)
assert user_file.original_name == 'test.txt'
assert user_file.original_extension == 'txt'
assert len(user_file.unique) == 32
assert user_file.size == 11
assert user_file.mime_type == 'text/plain'
assert user_file.uploaded_by_id == user.id
assert user_file.uploaded_at.year == 2020
assert user_file.uploaded_at.month == 1
assert user_file.uploaded_at.day == 1
assert user_file.is_image is False
assert user_file.image_width is None
assert user_file.image_height is None
assert user_file.sha256_hash == (
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e'
)
file_path = tmpdir.join('user_files', user_file.name)
assert file_path.isfile()
assert file_path.open().read() == 'Hello World'
user_file = handler.upload_user_file(
user,
'another.txt',
BytesIO(b'Hello'),
storage=storage
)
assert user_file.original_name == 'another.txt'
assert user_file.original_extension == 'txt'
assert user_file.mime_type == 'text/plain'
assert user_file.size == 5
assert user_file.sha256_hash == (
'185f8db32271fe25f561a6fc938b2e264306ec304eda518007d1764826381969'
)
file_path = tmpdir.join('user_files', user_file.name)
assert file_path.isfile()
assert file_path.open().read() == 'Hello'
assert (
handler.upload_user_file(
user,
'another.txt',
ContentFile(b'Hello'),
storage=storage
).id == user_file.id
)
assert handler.upload_user_file(
user,
'another_name.txt',
ContentFile(b'Hello'),
storage=storage
).id != user_file.id
image = Image.new('RGB', (100, 140), color='red')
image_bytes = BytesIO()
image.save(image_bytes, format='PNG')
user_file = handler.upload_user_file(
user,
'some image.png',
image_bytes,
storage=storage
)
assert user_file.mime_type == 'image/png'
assert user_file.is_image is True
assert user_file.image_width == 100
assert user_file.image_height == 140
file_path = tmpdir.join('user_files', user_file.name)
assert file_path.isfile()
file_path = tmpdir.join('thumbnails', 'tiny', user_file.name)
assert file_path.isfile()
thumbnail = Image.open(file_path.open('rb'))
assert thumbnail.height == 21
assert thumbnail.width == 21
old_thumbnail_settings = settings.USER_THUMBNAILS
settings.USER_THUMBNAILS = {'tiny': [None, 100]}
image = Image.new('RGB', (1920, 1080), color='red')
image_bytes = BytesIO()
image.save(image_bytes, format='PNG')
user_file = handler.upload_user_file(
user,
'red.png',
image_bytes,
storage=storage
)
file_path = tmpdir.join('thumbnails', 'tiny', user_file.name)
assert file_path.isfile()
thumbnail = Image.open(file_path.open('rb'))
assert thumbnail.width == 178
assert thumbnail.height == 100
image = Image.new('RGB', (400, 400), color='red')
image_bytes = BytesIO()
image.save(image_bytes, format='PNG')
user_file = handler.upload_user_file(
user,
'red2.png',
image_bytes,
storage=storage
)
file_path = tmpdir.join('thumbnails', 'tiny', user_file.name)
assert file_path.isfile()
thumbnail = Image.open(file_path.open('rb'))
assert thumbnail.width == 100
assert thumbnail.height == 100
settings.USER_THUMBNAILS = {'tiny': [21, None]}
image = Image.new('RGB', (1400, 1000), color='red')
image_bytes = BytesIO()
image.save(image_bytes, format='PNG')
user_file = handler.upload_user_file(
user,
'red3.png',
image_bytes,
storage=storage
)
file_path = tmpdir.join('thumbnails', 'tiny', user_file.name)
assert file_path.isfile()
thumbnail = Image.open(file_path.open('rb'))
assert thumbnail.width == 21
assert thumbnail.height == 15
settings.USER_THUMBNAILS = old_thumbnail_settings
assert UserFile.objects.all().count() == 7
@pytest.mark.django_db
@responses.activate
def test_upload_user_file_by_url(data_fixture, tmpdir):
user = data_fixture.create_user()
storage = FileSystemStorage(location=str(tmpdir), base_url='http://localhost')
handler = UserFileHandler()
responses.add(
responses.GET,
'http://localhost/test.txt',
body=b'Hello World',
status=200,
content_type="text/plain",
stream=True,
)
responses.add(
responses.GET,
'http://localhost/not-found.pdf',
body=b'Hello World',
status=404,
content_type="application/pdf",
stream=True,
)
with pytest.raises(FileURLCouldNotBeReached):
handler.upload_user_file_by_url(
user,
'http://localhost/test2.txt',
storage=storage
)
with freeze_time('2020-01-01 12:00'):
user_file = handler.upload_user_file_by_url(
user,
'http://localhost/test.txt',
storage=storage
)
with pytest.raises(FileURLCouldNotBeReached):
handler.upload_user_file_by_url(
user,
'http://localhost/not-found.pdf',
storage=storage
)
assert user_file.original_name == 'test.txt'
assert user_file.original_extension == 'txt'
assert len(user_file.unique) == 32
assert user_file.size == 11
assert user_file.mime_type == 'text/plain'
assert user_file.uploaded_by_id == user.id
assert user_file.uploaded_at.year == 2020
assert user_file.uploaded_at.month == 1
assert user_file.uploaded_at.day == 1
assert user_file.is_image is False
assert user_file.image_width is None
assert user_file.image_height is None
assert user_file.sha256_hash == (
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e'
)
file_path = tmpdir.join('user_files', user_file.name)
assert file_path.isfile()
assert file_path.open().read() == 'Hello World'

View file

@ -0,0 +1,32 @@
import pytest
from baserow.core.models import UserFile
from baserow.core.user_files.exceptions import InvalidUserFileNameError
@pytest.mark.django_db
def test_user_file_name(data_fixture):
user_file = data_fixture.create_user_file()
user_file_2 = data_fixture.create_user_file()
user_file_3 = data_fixture.create_user_file()
with pytest.raises(InvalidUserFileNameError):
UserFile.objects.all().name('wrong.jpg')
queryset = UserFile.objects.all().name(user_file.name)
assert len(queryset) == 1
assert queryset[0].id == user_file.id
queryset = UserFile.objects.all().name(user_file_2.name)
assert len(queryset) == 1
assert queryset[0].id == user_file_2.id
queryset = UserFile.objects.all().name(user_file.name, user_file_2.name)
assert len(queryset) == 2
assert queryset[0].id == user_file.id
assert queryset[1].id == user_file_2.id
queryset = UserFile.objects.all().name(user_file_3.name, user_file.name)
assert len(queryset) == 2
assert queryset[0].id == user_file.id
assert queryset[1].id == user_file_3.id

View file

@ -0,0 +1,79 @@
import pytest
from baserow.core.user_files.exceptions import InvalidUserFileNameError
from baserow.core.models import UserFile
@pytest.mark.django_db
def test_serialize_user_file():
user_file = UserFile.objects.create(
original_name='test.txt',
original_extension='txt',
unique='sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA',
size=10,
mime_type='plain/text',
is_image=True,
image_width=100,
image_height=100,
sha256_hash='a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e'
)
assert user_file.serialize() == {
'name': 'sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA_'
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e.txt',
'size': 10,
'mime_type': 'plain/text',
'is_image': True,
'image_width': 100,
'image_height': 100,
'uploaded_at': user_file.uploaded_at.isoformat()
}
@pytest.mark.django_db
def test_user_file_name():
user_file = UserFile.objects.create(
original_name='test.txt',
original_extension='txt',
unique='sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA',
size=0,
mime_type='plain/text',
is_image=True,
image_width=0,
image_height=0,
sha256_hash='a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e'
)
assert user_file.name == (
'sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA_'
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e.txt'
)
@pytest.mark.django_db
def test_user_file_deconstruct_name():
with pytest.raises(InvalidUserFileNameError):
UserFile.deconstruct_name('something.jpg')
with pytest.raises(InvalidUserFileNameError):
UserFile.deconstruct_name('something__test.jpg')
with pytest.raises(InvalidUserFileNameError):
UserFile.deconstruct_name('something_testjpg')
with pytest.raises(InvalidUserFileNameError):
UserFile.deconstruct_name('nothing_test.-')
assert UserFile.deconstruct_name('random_hash.jpg') == {
'unique': 'random',
'sha256_hash': 'hash',
'original_extension': 'jpg'
}
assert UserFile.deconstruct_name(
'sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA_'
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e.txt'
) == {
'unique': 'sdafi6WtHfnDrU6S1lQKh9PdC7PeafCA',
'sha256_hash': (
'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e'
),
'original_extension': 'txt'
}

View file

@ -1,6 +1,7 @@
from faker import Faker
from .user import UserFixtures
from .user_file import UserFileFixtures
from .group import GroupFixtures
from .application import ApplicationFixtures
from .table import TableFixtures
@ -9,6 +10,6 @@ from .field import FieldFixtures
from .token import TokenFixtures
class Fixtures(UserFixtures, GroupFixtures, ApplicationFixtures, TableFixtures,
ViewFixtures, FieldFixtures, TokenFixtures):
class Fixtures(UserFixtures, UserFileFixtures, GroupFixtures, ApplicationFixtures,
TableFixtures, ViewFixtures, FieldFixtures, TokenFixtures):
fake = Faker()

View file

@ -1,7 +1,8 @@
from django.db import connection
from baserow.contrib.database.fields.models import (
TextField, LongTextField, NumberField, BooleanField, DateField, LinkRowField
TextField, LongTextField, NumberField, BooleanField, DateField, LinkRowField,
FileField
)
@ -116,3 +117,20 @@ class FieldFixtures:
self.create_model_field(kwargs['table'], field)
return field
def create_file_field(self, user=None, create_field=True, **kwargs):
if 'table' not in kwargs:
kwargs['table'] = self.create_database_table(user=user)
if 'name' not in kwargs:
kwargs['name'] = self.fake.name()
if 'order' not in kwargs:
kwargs['order'] = 0
field = FileField.objects.create(**kwargs)
if create_field:
self.create_model_field(kwargs['table'], field)
return field

35
backend/tests/fixtures/user_file.py vendored Normal file
View file

@ -0,0 +1,35 @@
import pathlib
import mimetypes
from baserow.core.models import UserFile
from baserow.core.utils import random_string
class UserFileFixtures:
def create_user_file(self, **kwargs):
if 'original_name' not in kwargs:
kwargs['original_name'] = self.fake.file_name()
if 'original_extension' not in kwargs:
kwargs['original_extension'] = pathlib.Path(
kwargs['original_name']
).suffix[1:].lower()
if 'unique' not in kwargs:
kwargs['unique'] = random_string(32)
if 'size' not in kwargs:
kwargs['size'] = 100
if 'mime_type' not in kwargs:
kwargs['mime_type'] = (
mimetypes.guess_type(kwargs['original_name'])[0] or ''
)
if 'uploaded_by' not in kwargs:
kwargs['uploaded_by'] = self.create_user()
if 'sha256_hash' not in kwargs:
kwargs['sha256_hash'] = random_string(64)
return UserFile.objects.create(**kwargs)

View file

@ -1,5 +1,19 @@
# Changelog
* Added select_for_update where it was still missing.
* Fixed API docs scrollbar size issue.
* Also lint the backend tests.
* Implemented a switch to disable all filters without deleting them.
* Made it possible to order by fields via the rows listing endpoint.
* Added community chat to the readme.
* Made the cookies strict and secure.
* Removed the redundant _DOMAIN variables.
* Set un-secure lax cookie when public web frontend url isn't over a secure connection.
* Fixed bug where the sort choose field item didn't have a hover effect.
* Implemented a file field and user files upload.
* Made it impossible for the `link_row` field to be a primary field because that can
cause the primary field to be deleted.
## Released (2020-11-02)
* Highlight the row of a selected cell.
@ -11,6 +25,7 @@
* Added confirmation modals when the user wants to delete a group, application, table,
view or field.
* Fixed bug in the web-frontend URL validation where a '*' was invalidates.
* Made it possible to publicly expose the table data via a REST API.
## Released (2020-10-06)

Binary file not shown.

Before

(image error) Size: 126 KiB

After

(image error) Size: 134 KiB

View file

@ -93,9 +93,6 @@ are accepted.
* `MJML_SERVER_HOST` (default `mjml`): The hostname of the MJML TCP server. In the
development environment we use the `liminspace/mjml-tcpserver:latest` image.
* `MJML_SERVER_PORT` (default `28101`): The port of the MJML TCP server.
* `PUBLIC_BACKEND_DOMAIN` (default `localhost:8000`): The publicly accessible domain
name of the backend. For the development environment this is localhost:8000, but if
you change the port to 9000, for example, it will be `localhost:9000`.
* `PUBLIC_BACKEND_URL` (default `http://localhost:8000`): The publicly accessible URL
of the backend. For the development environment this is `http://localhost:8000`, but
if you change the port to 9000 it will be `http://localhost:9000`.
@ -107,9 +104,6 @@ are accepted.
development environment the backend container be accessed via the `backend` hostname
and because the server is also running on port 8000 inside the container, the private
backend URL should be `http://backend:8000`.
* `PUBLIC_WEB_FRONTEND_DOMAIN` (default `localhost:3000`): The publicly accessible
domain name of the web-frontend. For the development environment this is
localhost:3000, but if you change the port to 4000 it would be `localhost:4000`.
* `PUBLIC_WEB_FRONTEND_URL` (default `http://localhost:3000`): The publicly accessible
URL of the web-frontend. For the development environment this is
`http://localhost:3000`, but again you can change the port to whatever you wish. This

View file

@ -0,0 +1,17 @@
server {
listen 80;
server_name "*YOUR_DOMAIN*";
autoindex off;
gzip on;
gzip_disable "msie6";
location / {
root /baserow/media;
}
location /user_files {
root /baserow/media;
add_header Content-disposition "attachment; filename=$1";
}
}

View file

@ -2,7 +2,16 @@
nodaemon = true
[program:gunicorn]
environment = DJANGO_SETTINGS_MODULE="baserow.config.settings.base",DATABASE_PASSWORD="yourpassword",DATABASE_HOST="localhost",SECRET_KEY="SOMETHING_SECRET"
environment =
DJANGO_SETTINGS_MODULE="baserow.config.settings.base",
DATABASE_HOST="localhost",
DATABASE_PASSWORD="yourpassword",
SECRET_KEY="SOMETHING_SECRET",
PRIVATE_BACKEND_URL='http://localhost:8000',
PUBLIC_WEB_FRONTEND_URL='https://FRONTEND_DOMAIN',
PUBLIC_BACKEND_URL='https://BACKEND_DOMAIN',
MEDIA_ROOT='/baserow/media',
MEDIA_URL='https://MEDIA_DOMAIN'
command = /baserow/backend/env/bin/gunicorn -w 5 -b 127.0.0.1:8000 baserow.config.wsgi:application --log-level=debug --chdir=/baserow
stdout_logfile=/var/log/baserow/backend.log
stderr_logfile=/var/log/baserow/backend.error

View file

@ -1,8 +1,11 @@
[supervisord]
nodaemon = true
environment=PRIVATE_BACKEND_URL='http://localhost',PUBLIC_WEB_FRONTEND_URL='https://FRONTEND_DOMAIN',PUBLIC_BACKEND_URL='https://BACKEND_DOMAIN',PUBLIC_WEB_FRONTEND_DOMAIN='FRONTEND_DOMAIN',PUBLIC_BACKEND_DOMAIN='BACKEND_DOMAIN'
[program:nuxt]
environment =
PRIVATE_BACKEND_URL='http://localhost:8000',
PUBLIC_WEB_FRONTEND_URL='https://FRONTEND_DOMAIN',
PUBLIC_BACKEND_URL='https://BACKEND_DOMAIN'
directory = /baserow/web-frontend
command = sh -c './node_modules/.bin/nuxt start --hostname 127.0.0.1 --config-file ./config/nuxt.config.demo.js'
stdout_logfile=/var/log/baserow/frontend.log

View file

@ -74,10 +74,15 @@ it for when you need it later.
## Install dependencies for & setup Baserow
In order to use the Baserow application, we will need to create a virtual environment
and install some more dependencies like: NodeJS, Yarn, Python 3.
In order to use the Baserow application, we will need to create a media directory for
the uploaded user files, a virtual environment and install some more dependencies
like: NodeJS, Yarn, Python 3.
```bash
# Create uploaded user files and media directory
$ mkdir media
$ chmod 0755 media
# Install python3, pip & virtualenv
$ apt install python3 python3-pip virtualenv libpq-dev libmysqlclient-dev -y
@ -111,8 +116,8 @@ $ yarn install
$ ./node_modules/nuxt/bin/nuxt.js build --config-file config/nuxt.config.demo.js
```
## Install NGINX
Baserow uses NGINX as a reverse proxy for it's frontend and backend. Through that, you
can easily add SSL Certificates and add more applications to your server if you want
to.
@ -127,6 +132,7 @@ $ service nginx start
```
## Setup NGINX
If you're unfamiliar with NGINX: NGINX uses so called "virtualhosts" to direct web
traffic from outside of your network to the correct application on your server. These
virtual hosts are defined in `.conf` files which are put into the
@ -136,7 +142,8 @@ the `server_name` value in both of the files. The server name is the domain unde
which you want Baserow to be reachable.
Make sure that in the following commands you replace `api.domain.com` with your own
backend domain and that you replace `baserow.domain.com` with your frontend domain.
backend domain, that you replace `baserow.domain.com` with your frontend domain and
replace `media.baserow.com` with your domain to serve the user files.
```bash
# Move virtualhost files to /etc/nginx/sites-enabled/
@ -147,62 +154,14 @@ $ rm /etc/nginx/sites-enabled/default
# Change the server_name values
$ sed -i 's/\*YOUR_DOMAIN\*/api.domain.com/g' /etc/nginx/sites-enabled/baserow-backend.conf
$ sed -i 's/\*YOUR_DOMAIN\*/baserow.domain.com/g' /etc/nginx/sites-enabled/baserow-frontend.conf
$ sed -i 's/\*YOUR_DOMAIN\*/media.domain.com/g' /etc/nginx/sites-enabled/baserow-media.conf
# Then restart nginx so that it processes the configuration files
$ service nginx restart
```
## Baserow Configuration
## Import relations into database
### Configuration
Baserow needs a few environment variables to be set in order to work properly. Here is
a list of the environment variables with explanations for them. This list is solely
for reference, there is no need to set these variables because they will be set
through `supervisor` later on. This list does not describe all environment variables
that can be set. For a better understanding of the available environment variables,
take a look at `/baserow/backend/src/config/settings/base.py`.
We discourage changing the content of the `base.py` file since it might be overridden
through a future update with `git pull`. It is only mentioned in this guide so that
you're able to modify your Baserow instance as easily as possible with environment
variables.
```
# Backend Domain & URL
PUBLIC_BACKEND_DOMAIN="api.domain.com"
PUBLIC_BACKEND_URL="https://api.domain.com"
# Frontend Domain & URL
PUBLIC_WEB_FRONTEND_DOMAIN="baserow.domain.com"
PUBLIC_WEB_FRONTEND_URL="https://baserow.domain.com"
# Private Backend URL & Database Password & Database Host
PRIVATE_BACKEND_URL="http://localhost"
DATABASE_PASSWORD="yourpassword"
DATABASE_HOST="localhost"
# Django Settings Module & Python Path
DJANGO_SETTINGS_MODULE='baserow.config.settings.base'
PYTHONPATH=/baserow:/baserow/plugins/saas/backend/src
# Secret Key
SECRET_KEY="Something_Secret"
```
Baserow uses the secret key to generate a variety of tokens (e.g. password reset
token, ...). In order to generate a unique secret key, you can simply run the following
command.
```bash
$ cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1
```
The output will be a alphanumeric string with 80 characters. You can shorten or
lengthen the string by changing the number value in `fold -w 80` to a length you're
satisfied with.
### Import relations into database
In the "*Install & Setup PostgreSQL*" Section, we created a database called `baserow`
for the application. Since we didn't do anything with that database it is still empty,
which will result in a non-working application since Baserow expects certain tables
@ -214,7 +173,7 @@ commands:
$ source backend/env/bin/activate
$ export DJANGO_SETTINGS_MODULE='baserow.config.settings.base'
$ export DATABASE_PASSWORD="yourpassword"
$ export DATABASE_HOST="localhost"
$ export DATABASE_HOST="localhost"
# Create database schema
$ baserow migrate
@ -223,6 +182,7 @@ $ deactivate
```
## Install & Configure Supervisor
Supervisor is an application that starts and keeps track of processes and will restart
them if the process finishes. For Baserow this is used to reduce downtime and in order
to restart the application in the unlikely event of an unforseen termination. You can
@ -239,23 +199,24 @@ $ mkdir /var/log/baserow/
$ cd /baserow
$ cp docs/guides/installation/configuration-files/supervisor/* /etc/supervisor/conf.d/
```
You will need to edit the `baserow-frontend.conf` and `baserow-backend.conf` files
(located now at `/etc/supervisor/conf.d/`) in order to set the necessary environment
variables. You will need to change at least the following variables which can be found
in the `environment=` section.
in the `environment =` section.
**Frontend**
**Web frontend and backend**
- `PUBLIC_WEB_FRONTEND_URL`: The URL under which your frontend can be reached from the
internet (HTTP or HTTPS)
- `PUBLIC_BACKEND_URL`: The URL under which your backend can be reached from the
internet (HTTP or HTTPS)
- `PUBLIC_WEB_FRONTEND_DOMAIN`: The domain under which you frontend can be reached from
the internet (same as URL but without `https://`)
- `PUBLIC_BACKEND_DOMAIN`: The domain under which you backend can be reached from the
internet (same as URL but without `https://`)
**Backend**
- `SECRET_KEY`: The secret key that is used to generate tokens and other random strings
- `SECRET_KEY`: The secret key that is used to generate tokens and other random
strings. You can generate one with the following commands:
```bash
$ cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1
```
- `DATABASE_PASSWORD`: The password of the `baserow` database user
- `DATABASE_HOST`: The host computer that runs the database (usually `localhost`)

View file

@ -31,9 +31,7 @@ services:
- db
- mjml
environment:
- PUBLIC_BACKEND_DOMAIN=localhost:8001
- PUBLIC_BACKEND_URL=http://localhost:8001
- PUBLIC_WEB_FRONTEND_DOMAIN=localhost:3001
- PUBLIC_WEB_FRONTEND_URL=http://localhost:3001
web-frontend:

Some files were not shown because too many files have changed in this diff Show more