mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-11 16:01:20 +00:00
Merge branch 'develop'
This commit is contained in:
commit
978d9462ed
307 changed files with 15461 additions and 2311 deletions
.gitignore.gitlab-ci.ymlCONTRIBUTING.mdREADME.md
backend
Dockerfile.demoapplication_types.pyconfig.pydatabase_routers.pyconftest.py
requirements
setup.pysrc/baserow
api
config/settings
contrib/database
api
fields
rows
views
db
fields
migrations
models.pytable
views
core
ws
templates
tests
baserow
api
contrib/database
api
fields
rows
views
db
field
test_boolean_field_type.pytest_date_field_type.pytest_field_filters.pytest_field_handler.pytest_field_types.pytest_link_row_field_type.pytest_number_field_type.pytest_single_select_field_type.py
table
test_database_application_type.pyview
core
fixtures
templates
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -91,3 +91,9 @@ __pycache__
|
|||
*.egg-info
|
||||
|
||||
docker-compose.override.yml
|
||||
|
||||
# python virtual envs
|
||||
venv/
|
||||
|
||||
# intellij config files
|
||||
*.iml
|
||||
|
|
|
@ -64,3 +64,5 @@ backend-setup:
|
|||
script:
|
||||
- pip install -e ./backend
|
||||
- python -c 'import baserow'
|
||||
- export DJANGO_SETTINGS_MODULE='baserow.config.settings.demo'
|
||||
- timeout --preserve-status 10s gunicorn --workers=1 -b 0.0.0.0:8000 -k uvicorn.workers.UvicornWorker baserow.config.asgi:application
|
||||
|
|
|
@ -20,6 +20,9 @@ repository on GitHub, but this is not the official one.
|
|||
1. Make the changes described in the issue.
|
||||
1. Ensure that your code meets the quality standards.
|
||||
1. Submit your merge request!
|
||||
1. Usually we enable the following Gitlab merge options:
|
||||
1. "Delete source branch when merge request is accepted. "
|
||||
1. "Squash commits when merge request is accepted."
|
||||
1. A maintainer will review your code and merge your code.
|
||||
|
||||
## Quality standards
|
||||
|
@ -29,9 +32,9 @@ repository on GitHub, but this is not the official one.
|
|||
* In code Python docs must be in reStructured style.
|
||||
* SCSS code must be compliant with BEM.
|
||||
* Javascript code must be compliant with the eslint:recommended rules.
|
||||
* In code documentation is required for every function or class that is not self
|
||||
evident.
|
||||
* In code documentation is required for every function or class that is not self-evident.
|
||||
* Documentation for every concept that can used by a plugin.
|
||||
* [changelog.md](changelog.md) should be updated with any new features.
|
||||
* The pipeline must pass.
|
||||
|
||||
## Any contributions you make will be under the MIT Software License
|
||||
|
|
11
README.md
11
README.md
|
@ -72,6 +72,15 @@ Visit http://localhost:8000/api/groups/ in your browser and you should see a JSO
|
|||
response containing "Authentication credentials were not provided.". This means that it
|
||||
is working!
|
||||
|
||||
In order to process asynchronous tasks you also need to start a Celery worker this is
|
||||
mainly used for the real time collaboration. Open a new tab or window and execute the
|
||||
following commands.
|
||||
|
||||
```
|
||||
$ docker exec -it backend bash
|
||||
$ watchmedo auto-restart --directory=./ --pattern=*.py --recursive -- celery -A baserow worker -l INFO
|
||||
```
|
||||
|
||||
In order to start the web-frontend environment you may execute the following commands.
|
||||
|
||||
```
|
||||
|
@ -109,7 +118,7 @@ Created by Bram Wiepjes (Baserow) - bram@baserow.io.
|
|||
|
||||
Distributes under the MIT license. See `LICENSE` for more information.
|
||||
|
||||
Version: 1.0.1
|
||||
Version: 1.1.0
|
||||
|
||||
The official repository can be found at https://gitlab.com/bramw/baserow.
|
||||
|
||||
|
|
|
@ -15,5 +15,6 @@ RUN apt-get -y install gnupg2
|
|||
RUN make install-dependencies
|
||||
|
||||
ENTRYPOINT python src/baserow/manage.py migrate && \
|
||||
python src/baserow/manage.py sync_templates && \
|
||||
celery -A baserow worker -l INFO --detach && \
|
||||
gunicorn --workers=3 -b 0.0.0.0:8000 -k uvicorn.workers.UvicornWorker baserow.config.asgi:application
|
||||
|
|
|
@ -14,6 +14,8 @@ requests==2.25.0
|
|||
itsdangerous==1.1.0
|
||||
drf-spectacular==0.13.1
|
||||
Pillow==8.0.1
|
||||
asgiref==3.3.1
|
||||
channels==3.0.3
|
||||
channels-redis==3.2.0
|
||||
celery[redis]==5.0.5
|
||||
advocate==1.0.0
|
||||
|
|
|
@ -6,7 +6,7 @@ from setuptools import find_packages, setup
|
|||
|
||||
PROJECT_DIR = os.path.dirname(__file__)
|
||||
REQUIREMENTS_DIR = os.path.join(PROJECT_DIR, 'requirements')
|
||||
VERSION = '1.0.1'
|
||||
VERSION = '1.1.0'
|
||||
|
||||
|
||||
def get_requirements(env):
|
||||
|
|
|
@ -2,7 +2,7 @@ from django.db import transaction
|
|||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
||||
|
@ -80,6 +80,12 @@ class AllApplicationsView(APIView):
|
|||
class ApplicationsView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == 'GET':
|
||||
return [AllowAny()]
|
||||
|
||||
return super().get_permissions()
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
|
@ -94,9 +100,10 @@ class ApplicationsView(APIView):
|
|||
operation_id='list_applications',
|
||||
description=(
|
||||
'Lists all the applications of the group related to the provided '
|
||||
'`group_id` parameter if the authorized user is in that group. The '
|
||||
'properties that belong to the application can differ per type. An '
|
||||
'application always belongs to a single group.'
|
||||
'`group_id` parameter if the authorized user is in that group. If the'
|
||||
'group is related to a template, then this endpoint will be publicly '
|
||||
'accessible. The properties that belong to the application can differ per '
|
||||
'type. An application always belongs to a single group.'
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
|
@ -120,7 +127,7 @@ class ApplicationsView(APIView):
|
|||
"""
|
||||
|
||||
group = CoreHandler().get_group(group_id)
|
||||
group.has_user(request.user, raise_error=True)
|
||||
group.has_user(request.user, raise_error=True, allow_if_template=True)
|
||||
|
||||
applications = Application.objects.select_related(
|
||||
'content_type', 'group'
|
||||
|
|
|
@ -148,7 +148,7 @@ def allowed_includes(*allowed):
|
|||
"""
|
||||
A view method decorator that checks which allowed includes are in the GET
|
||||
parameters of the request. The allowed arguments are going to be added to the
|
||||
view method kwargs and if they are in the includes GET parameter the value will
|
||||
view method kwargs and if they are in the `include` GET parameter the value will
|
||||
be True.
|
||||
|
||||
Imagine this request:
|
||||
|
@ -174,7 +174,7 @@ def allowed_includes(*allowed):
|
|||
def validate_decorator(func):
|
||||
def func_wrapper(*args, **kwargs):
|
||||
request = get_request(args)
|
||||
raw_include = request.GET.get('includes', None)
|
||||
raw_include = request.GET.get('include', None)
|
||||
includes = raw_include.split(',') if raw_include else []
|
||||
|
||||
for include in allowed:
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from baserow.contrib.database.api.views.grid.serializers import \
|
||||
GridViewFieldOptionsField
|
||||
|
||||
def get_example_pagination_serializer_class(results_serializer_class):
|
||||
|
||||
def get_example_pagination_serializer_class(results_serializer_class,
|
||||
add_field_options=False):
|
||||
"""
|
||||
Generates a pagination like response serializer that has the provided serializer
|
||||
class as results. It is only used for example purposes in combination with the
|
||||
|
@ -9,26 +13,35 @@ def get_example_pagination_serializer_class(results_serializer_class):
|
|||
|
||||
:param results_serializer_class: The serializer class that needs to be added as
|
||||
results.
|
||||
:param add_field_options: When true will include the field_options field on the
|
||||
returned serializer.
|
||||
:type results_serializer_class: Serializer
|
||||
:return: The generated pagination serializer.
|
||||
:rtype: Serializer
|
||||
"""
|
||||
|
||||
fields = {
|
||||
'count': serializers.IntegerField(help_text='The total amount of results.'),
|
||||
'next': serializers.URLField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
help_text='URL to the next page.'
|
||||
),
|
||||
'previous': serializers.URLField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
help_text='URL to the previous page.'
|
||||
),
|
||||
'results': results_serializer_class(many=True)
|
||||
}
|
||||
|
||||
serializer_name = 'PaginationSerializer'
|
||||
if add_field_options:
|
||||
fields['field_options'] = GridViewFieldOptionsField(required=False)
|
||||
serializer_name = serializer_name + 'WithFieldOptions'
|
||||
|
||||
return type(
|
||||
'PaginationSerializer' + results_serializer_class.__name__,
|
||||
serializer_name + results_serializer_class.__name__,
|
||||
(serializers.Serializer,),
|
||||
{
|
||||
'count': serializers.IntegerField(help_text='The total amount of results.'),
|
||||
'next': serializers.URLField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
help_text='URL to the next page.'
|
||||
),
|
||||
'previous': serializers.URLField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
help_text='URL to the previous page.'
|
||||
),
|
||||
'results': results_serializer_class(many=True)
|
||||
}
|
||||
fields
|
||||
)
|
||||
|
|
0
backend/src/baserow/api/templates/__init__.py
Normal file
0
backend/src/baserow/api/templates/__init__.py
Normal file
13
backend/src/baserow/api/templates/errors.py
Normal file
13
backend/src/baserow/api/templates/errors.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
from rest_framework.status import HTTP_404_NOT_FOUND, HTTP_400_BAD_REQUEST
|
||||
|
||||
|
||||
ERROR_TEMPLATE_DOES_NOT_EXIST = (
|
||||
'ERROR_TEMPLATE_DOES_NOT_EXIST',
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested template does not exist.'
|
||||
)
|
||||
ERROR_TEMPLATE_FILE_DOES_NOT_EXIST = (
|
||||
'ERROR_TEMPLATE_FILE_DOES_NOT_EXIST',
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The requested template file does not exist anymore.'
|
||||
)
|
32
backend/src/baserow/api/templates/serializers.py
Normal file
32
backend/src/baserow/api/templates/serializers.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from baserow.core.models import TemplateCategory, Template
|
||||
|
||||
|
||||
class TemplateSerializer(serializers.ModelSerializer):
|
||||
is_default = serializers.SerializerMethodField(
|
||||
help_text='Indicates if the template must be selected by default. The '
|
||||
'web-frontend automatically selects the first `is_default` template '
|
||||
'that it can find.'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Template
|
||||
fields = ('id', 'name', 'icon', 'keywords', 'group_id', 'is_default')
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_is_default(self, instance):
|
||||
return instance.slug == settings.DEFAULT_APPLICATION_TEMPLATE
|
||||
|
||||
|
||||
class TemplateCategoriesSerializer(serializers.ModelSerializer):
|
||||
templates = TemplateSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = TemplateCategory
|
||||
fields = ('id', 'name', 'templates')
|
16
backend/src/baserow/api/templates/urls.py
Normal file
16
backend/src/baserow/api/templates/urls.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
from django.conf.urls import url
|
||||
|
||||
from .views import TemplatesView, InstallTemplateView
|
||||
|
||||
|
||||
app_name = 'baserow.api.templates'
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(
|
||||
r'install/(?P<group_id>[0-9]+)/(?P<template_id>[0-9]+)/$',
|
||||
InstallTemplateView.as_view(),
|
||||
name='install'
|
||||
),
|
||||
url(r'$', TemplatesView.as_view(), name='list'),
|
||||
]
|
114
backend/src/baserow/api/templates/views.py
Normal file
114
backend/src/baserow/api/templates/views.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
||||
|
||||
from baserow.api.templates.serializers import TemplateCategoriesSerializer
|
||||
from baserow.api.decorators import map_exceptions
|
||||
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP, ERROR_GROUP_DOES_NOT_EXIST
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.api.utils import PolymorphicMappingSerializer
|
||||
from baserow.api.applications.serializers import get_application_serializer
|
||||
from baserow.api.applications.views import application_type_serializers
|
||||
from baserow.core.models import TemplateCategory
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroupError, GroupDoesNotExist, TemplateDoesNotExist,
|
||||
TemplateFileDoesNotExist
|
||||
)
|
||||
|
||||
from .errors import ERROR_TEMPLATE_DOES_NOT_EXIST, ERROR_TEMPLATE_FILE_DOES_NOT_EXIST
|
||||
|
||||
|
||||
class TemplatesView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Templates'],
|
||||
operation_id='list_templates',
|
||||
description=(
|
||||
'Lists all the template categories and the related templates that are in '
|
||||
'that category. The template\'s `group_id` can be used for previewing '
|
||||
'purposes because that group contains the applications that are in the '
|
||||
'template. All the `get` and `list` endpoints related to that group are '
|
||||
'publicly accessible.'
|
||||
),
|
||||
responses={
|
||||
200: TemplateCategoriesSerializer(many=True)
|
||||
}
|
||||
)
|
||||
def get(self, request):
|
||||
"""Responds with a list of all template categories and templates."""
|
||||
|
||||
categories = TemplateCategory.objects.all().prefetch_related('templates')
|
||||
serializer = TemplateCategoriesSerializer(categories, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class InstallTemplateView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Templates'],
|
||||
operation_id='install_template',
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='The id related to the group where the template '
|
||||
'applications must be installed into.'
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='template_id',
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='The id related to the template that must be installed.'
|
||||
)
|
||||
],
|
||||
description=(
|
||||
'Installs the applications of the given template into the given group if '
|
||||
'the user has access to that group. The response contains those newly '
|
||||
'created applications.'
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
'Applications',
|
||||
application_type_serializers,
|
||||
many=True
|
||||
),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_TEMPLATE_FILE_DOES_NOT_EXIST'
|
||||
]),
|
||||
404: get_error_schema([
|
||||
'ERROR_GROUP_DOES_NOT_EXIST',
|
||||
'ERROR_TEMPLATE_DOES_NOT_EXIST'
|
||||
])
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroupError: ERROR_USER_NOT_IN_GROUP,
|
||||
TemplateDoesNotExist: ERROR_TEMPLATE_DOES_NOT_EXIST,
|
||||
TemplateFileDoesNotExist: ERROR_TEMPLATE_FILE_DOES_NOT_EXIST
|
||||
})
|
||||
def get(self, request, group_id, template_id):
|
||||
"""Install a template into a group."""
|
||||
|
||||
handler = CoreHandler()
|
||||
group = handler.get_group(group_id)
|
||||
template = handler.get_template(template_id)
|
||||
applications, id_mapping = handler.install_template(
|
||||
request.user,
|
||||
group,
|
||||
template
|
||||
)
|
||||
|
||||
data = [
|
||||
get_application_serializer(application).data
|
||||
for application in applications
|
||||
]
|
||||
return Response(data)
|
|
@ -8,6 +8,7 @@ from .settings import urls as settings_urls
|
|||
from .user import urls as user_urls
|
||||
from .user_files import urls as user_files_urls
|
||||
from .groups import urls as group_urls
|
||||
from .templates import urls as templates_urls
|
||||
from .applications import urls as application_urls
|
||||
|
||||
|
||||
|
@ -24,5 +25,6 @@ urlpatterns = [
|
|||
path('user/', include(user_urls, namespace='user')),
|
||||
path('user-files/', include(user_files_urls, namespace='user_files')),
|
||||
path('groups/', include(group_urls, namespace='groups')),
|
||||
path('templates/', include(templates_urls, namespace='templates')),
|
||||
path('applications/', include(application_urls, namespace='applications'))
|
||||
] + application_type_registry.api_urls + plugin_registry.api_urls
|
||||
|
|
|
@ -18,6 +18,11 @@ ERROR_FILE_URL_COULD_NOT_BE_REACHED = (
|
|||
HTTP_400_BAD_REQUEST,
|
||||
'The provided URL could not be reached.'
|
||||
)
|
||||
ERROR_INVALID_FILE_URL = (
|
||||
'ERROR_INVALID_FILE_URL',
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The provided URL is not valid.'
|
||||
)
|
||||
ERROR_INVALID_USER_FILE_NAME_ERROR = (
|
||||
'ERROR_INVALID_USER_FILE_NAME_ERROR',
|
||||
HTTP_400_BAD_REQUEST,
|
||||
|
|
|
@ -11,13 +11,15 @@ from rest_framework.permissions import IsAuthenticated
|
|||
from baserow.api.decorators import map_exceptions, validate_body
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.user_files.exceptions import (
|
||||
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached
|
||||
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached,
|
||||
InvalidFileURLError
|
||||
)
|
||||
from baserow.core.user_files.handler import UserFileHandler
|
||||
|
||||
from .serializers import UserFileSerializer, UserFileUploadViaURLRequestSerializer
|
||||
from .errors import (
|
||||
ERROR_INVALID_FILE, ERROR_FILE_SIZE_TOO_LARGE, ERROR_FILE_URL_COULD_NOT_BE_REACHED
|
||||
ERROR_INVALID_FILE, ERROR_FILE_SIZE_TOO_LARGE, ERROR_FILE_URL_COULD_NOT_BE_REACHED,
|
||||
ERROR_INVALID_FILE_URL
|
||||
)
|
||||
|
||||
|
||||
|
@ -70,7 +72,8 @@ class UploadViaURLView(APIView):
|
|||
400: get_error_schema([
|
||||
'ERROR_INVALID_FILE',
|
||||
'ERROR_FILE_SIZE_TOO_LARGE',
|
||||
'ERROR_FILE_URL_COULD_NOT_BE_REACHED'
|
||||
'ERROR_FILE_URL_COULD_NOT_BE_REACHED',
|
||||
'ERROR_INVALID_FILE_URL'
|
||||
])
|
||||
}
|
||||
)
|
||||
|
@ -78,7 +81,8 @@ class UploadViaURLView(APIView):
|
|||
@map_exceptions({
|
||||
InvalidFileStreamError: ERROR_INVALID_FILE,
|
||||
FileSizeTooLargeError: ERROR_FILE_SIZE_TOO_LARGE,
|
||||
FileURLCouldNotBeReached: ERROR_FILE_URL_COULD_NOT_BE_REACHED
|
||||
FileURLCouldNotBeReached: ERROR_FILE_URL_COULD_NOT_BE_REACHED,
|
||||
InvalidFileURLError: ERROR_INVALID_FILE_URL
|
||||
})
|
||||
@validate_body(UserFileUploadViaURLRequestSerializer)
|
||||
def post(self, request, data):
|
||||
|
|
|
@ -182,7 +182,7 @@ SPECTACULAR_SETTINGS = {
|
|||
'name': 'MIT',
|
||||
'url': 'https://gitlab.com/bramw/baserow/-/blob/master/LICENSE'
|
||||
},
|
||||
'VERSION': '1.0.1',
|
||||
'VERSION': '1.1.0',
|
||||
'SERVE_INCLUDE_SCHEMA': False,
|
||||
'TAGS': [
|
||||
{'name': 'Settings'},
|
||||
|
@ -190,6 +190,7 @@ SPECTACULAR_SETTINGS = {
|
|||
{'name': 'User files'},
|
||||
{'name': 'Groups'},
|
||||
{'name': 'Group invitations'},
|
||||
{'name': 'Templates'},
|
||||
{'name': 'Applications'},
|
||||
{'name': 'Database tables'},
|
||||
{'name': 'Database table fields'},
|
||||
|
@ -258,3 +259,11 @@ USER_THUMBNAILS = {
|
|||
'tiny': [None, 21],
|
||||
'small': [48, 48]
|
||||
}
|
||||
|
||||
# The directory that contains the all the templates in JSON format. When for example
|
||||
# the `sync_templates` management command is called, then the templates in the
|
||||
# database will be synced with these files.
|
||||
APPLICATION_TEMPLATES_DIR = os.path.join(BASE_DIR, '../../../templates')
|
||||
# The template that must be selected when the user first opens the templates select
|
||||
# modal.
|
||||
DEFAULT_APPLICATION_TEMPLATE = 'project-management'
|
||||
|
|
|
@ -2,7 +2,8 @@ from django.db import transaction
|
|||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
from rest_framework.decorators import permission_classes as method_permission_classes
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
||||
|
@ -35,6 +36,12 @@ from .serializers import (
|
|||
class FieldsView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == 'GET':
|
||||
return [AllowAny()]
|
||||
|
||||
return super().get_permissions()
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
|
@ -49,10 +56,11 @@ class FieldsView(APIView):
|
|||
operation_id='list_database_table_fields',
|
||||
description=(
|
||||
'Lists all the fields of the table related to the provided parameter if '
|
||||
'the user has access to the related database\'s group. A table consists of '
|
||||
'fields and each field can have a different type. Each type can have '
|
||||
'different properties. A field is comparable with a regular table\'s '
|
||||
'column.'
|
||||
'the user has access to the related database\'s group. If the group is '
|
||||
'related to a template, then this endpoint will be publicly accessible. A '
|
||||
'table consists of fields and each field can have a different type. Each '
|
||||
'type can have different properties. A field is comparable with a regular '
|
||||
'table\'s column.'
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicCustomFieldRegistrySerializer(
|
||||
|
@ -68,6 +76,7 @@ class FieldsView(APIView):
|
|||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroupError: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@method_permission_classes([AllowAny])
|
||||
def get(self, request, table_id):
|
||||
"""
|
||||
Responds with a list of serialized fields that belong to the table if the user
|
||||
|
@ -75,7 +84,8 @@ class FieldsView(APIView):
|
|||
"""
|
||||
|
||||
table = TableHandler().get_table(table_id)
|
||||
table.database.group.has_user(request.user, raise_error=True)
|
||||
table.database.group.has_user(request.user, raise_error=True,
|
||||
allow_if_template=True)
|
||||
fields = Field.objects.filter(table=table).select_related('content_type')
|
||||
|
||||
data = [
|
||||
|
|
|
@ -7,7 +7,6 @@ from baserow.api.serializers import get_example_pagination_serializer_class
|
|||
from baserow.core.utils import model_default_values, dict_to_object
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -133,3 +132,6 @@ def get_example_row_serializer_class(add_id=False):
|
|||
example_pagination_row_serializer_class = get_example_pagination_serializer_class(
|
||||
get_example_row_serializer_class(True)
|
||||
)
|
||||
example_pagination_row_serializer_class_with_field_options = \
|
||||
get_example_pagination_serializer_class(
|
||||
get_example_row_serializer_class(True), add_field_options=True)
|
||||
|
|
|
@ -1,32 +1,28 @@
|
|||
from django.db import transaction
|
||||
from django.conf import settings
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from django.db import transaction
|
||||
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from baserow.api.utils import validate_data
|
||||
from baserow.api.decorators import map_exceptions
|
||||
from baserow.api.pagination import PageNumberPagination
|
||||
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
|
||||
from baserow.api.pagination import PageNumberPagination
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.api.user_files.errors import ERROR_USER_FILE_DOES_NOT_EXIST
|
||||
from baserow.core.exceptions import UserNotInGroupError
|
||||
from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
||||
from baserow.contrib.database.api.tokens.authentications import TokenAuthentication
|
||||
from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.rows.errors import ERROR_ROW_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
example_pagination_row_serializer_class
|
||||
)
|
||||
from baserow.contrib.database.api.tokens.errors import ERROR_NO_PERMISSION_TO_TABLE
|
||||
from baserow.api.utils import validate_data
|
||||
from baserow.contrib.database.api.fields.errors import (
|
||||
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE, ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
ERROR_FILTER_FIELD_NOT_FOUND
|
||||
)
|
||||
from baserow.contrib.database.api.rows.errors import ERROR_ROW_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
example_pagination_row_serializer_class
|
||||
)
|
||||
from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.tokens.authentications import TokenAuthentication
|
||||
from baserow.contrib.database.api.tokens.errors import ERROR_NO_PERMISSION_TO_TABLE
|
||||
from baserow.contrib.database.api.views.errors import (
|
||||
ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ERROR_VIEW_FILTER_TYPE_NOT_ALLOWED_FOR_FIELD
|
||||
|
@ -34,21 +30,23 @@ from baserow.contrib.database.api.views.errors import (
|
|||
from baserow.contrib.database.fields.exceptions import (
|
||||
OrderByFieldNotFound, OrderByFieldNotPossible, FilterFieldNotFound
|
||||
)
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.contrib.database.table.exceptions import TableDoesNotExist
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.rows.exceptions import RowDoesNotExist
|
||||
from baserow.contrib.database.tokens.handler import TokenHandler
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.table.exceptions import TableDoesNotExist
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.contrib.database.tokens.exceptions import NoPermissionToTable
|
||||
from baserow.contrib.database.views.models import FILTER_TYPE_AND, FILTER_TYPE_OR
|
||||
from baserow.contrib.database.tokens.handler import TokenHandler
|
||||
from baserow.contrib.database.views.exceptions import (
|
||||
ViewFilterTypeNotAllowedForField, ViewFilterTypeDoesNotExist
|
||||
)
|
||||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
|
||||
from baserow.core.exceptions import UserNotInGroupError
|
||||
from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
||||
from .serializers import (
|
||||
RowSerializer, get_example_row_serializer_class, get_row_serializer_class
|
||||
)
|
||||
from baserow.contrib.database.fields.field_filters import FILTER_TYPE_AND, \
|
||||
FILTER_TYPE_OR
|
||||
|
||||
|
||||
class RowsView(APIView):
|
||||
|
|
|
@ -17,6 +17,14 @@ grid_view_field_options_schema = {
|
|||
'example': True,
|
||||
'description': 'Whether or not the field should be hidden in the '
|
||||
'current view.'
|
||||
},
|
||||
'order': {
|
||||
'type': 'integer',
|
||||
'example': 0,
|
||||
'description': 'The position that the field has within the view, '
|
||||
'lowest first. If there is another field with the '
|
||||
'same order value then the field with the lowest '
|
||||
'id must be shown first.'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -101,7 +101,7 @@ class GridViewSerializer(serializers.ModelSerializer):
|
|||
class GridViewFieldOptionsSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = GridViewFieldOptions
|
||||
fields = ('width', 'hidden')
|
||||
fields = ('width', 'hidden', 'order')
|
||||
|
||||
|
||||
class GridViewFilterSerializer(serializers.Serializer):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
from rest_framework.pagination import LimitOffsetPagination
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
|
@ -12,10 +12,11 @@ from baserow.api.pagination import PageNumberPagination
|
|||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.exceptions import UserNotInGroupError
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
get_row_serializer_class, RowSerializer
|
||||
get_row_serializer_class, RowSerializer,
|
||||
example_pagination_row_serializer_class_with_field_options
|
||||
)
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
get_example_row_serializer_class, example_pagination_row_serializer_class
|
||||
get_example_row_serializer_class
|
||||
)
|
||||
from baserow.contrib.database.api.views.grid.serializers import GridViewSerializer
|
||||
from baserow.contrib.database.views.exceptions import (
|
||||
|
@ -31,6 +32,12 @@ from .serializers import GridViewFilterSerializer
|
|||
class GridViewView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == 'GET':
|
||||
return [AllowAny()]
|
||||
|
||||
return super().get_permissions()
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
|
@ -70,7 +77,14 @@ class GridViewView(APIView):
|
|||
name='size', location=OpenApiParameter.QUERY, type=OpenApiTypes.INT,
|
||||
description='Can only be used in combination with the `page` parameter '
|
||||
'and defines how many rows should be returned.'
|
||||
)
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='search',
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description='If provided only rows with data that matches the search '
|
||||
'query are going to be returned.'
|
||||
),
|
||||
],
|
||||
tags=['Database table grid view'],
|
||||
operation_id='list_database_table_grid_view_rows',
|
||||
|
@ -92,7 +106,7 @@ class GridViewView(APIView):
|
|||
'`list_database_table_view_sortings` endpoints.'
|
||||
),
|
||||
responses={
|
||||
200: example_pagination_row_serializer_class,
|
||||
200: example_pagination_row_serializer_class_with_field_options,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_GRID_DOES_NOT_EXIST'])
|
||||
}
|
||||
|
@ -109,12 +123,15 @@ class GridViewView(APIView):
|
|||
else the page number pagination.
|
||||
|
||||
Optionally the field options can also be included in the response if the the
|
||||
`field_options` are provided in the includes GET parameter.
|
||||
`field_options` are provided in the include GET parameter.
|
||||
"""
|
||||
|
||||
search = request.GET.get('search')
|
||||
|
||||
view_handler = ViewHandler()
|
||||
view = view_handler.get_view(view_id, GridView)
|
||||
view.table.database.group.has_user(request.user, raise_error=True)
|
||||
view.table.database.group.has_user(request.user, raise_error=True,
|
||||
allow_if_template=True)
|
||||
|
||||
model = view.table.get_model()
|
||||
queryset = model.objects.all().enhance_by_fields()
|
||||
|
@ -122,6 +139,8 @@ class GridViewView(APIView):
|
|||
# Applies the view filters and sortings to the queryset if there are any.
|
||||
queryset = view_handler.apply_filters(view, queryset)
|
||||
queryset = view_handler.apply_sorting(view, queryset)
|
||||
if search:
|
||||
queryset = queryset.search_all_fields(search)
|
||||
|
||||
if 'count' in request.GET:
|
||||
return Response({'count': queryset.count()})
|
||||
|
@ -144,7 +163,8 @@ class GridViewView(APIView):
|
|||
# but when added to the context the fields don't have to be fetched from
|
||||
# the database again when checking if they exist.
|
||||
context = {'fields': [o['field'] for o in model._field_objects.values()]}
|
||||
response.data.update(**GridViewSerializer(view, context=context).data)
|
||||
serialized_view = GridViewSerializer(view, context=context).data
|
||||
response.data['field_options'] = serialized_view['field_options']
|
||||
|
||||
return response
|
||||
|
||||
|
|
|
@ -82,8 +82,8 @@ class UpdateViewSortSerializer(serializers.ModelSerializer):
|
|||
class ViewSerializer(serializers.ModelSerializer):
|
||||
type = serializers.SerializerMethodField()
|
||||
table = TableSerializer()
|
||||
filters = ViewFilterSerializer(many=True, source='viewfilter_set')
|
||||
sortings = ViewSortSerializer(many=True, source='viewsort_set')
|
||||
filters = ViewFilterSerializer(many=True, source='viewfilter_set', required=False)
|
||||
sortings = ViewSortSerializer(many=True, source='viewsort_set', required=False)
|
||||
|
||||
class Meta:
|
||||
model = View
|
||||
|
@ -95,15 +95,23 @@ class ViewSerializer(serializers.ModelSerializer):
|
|||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
include_filters = kwargs.pop('filters') if 'filters' in kwargs else False
|
||||
include_sortings = kwargs.pop('sortings') if 'sortings' in kwargs else False
|
||||
context = kwargs.setdefault("context", {})
|
||||
context['include_filters'] = kwargs.pop('filters', False)
|
||||
context['include_sortings'] = kwargs.pop('sortings', False)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not include_filters:
|
||||
self.fields.pop('filters')
|
||||
def to_representation(self, instance):
|
||||
# We remove the fields in to_representation rather than __init__ as otherwise
|
||||
# drf-spectacular will not know that filters and sortings exist as optional
|
||||
# return fields. This way the fields are still dynamic and also show up in the
|
||||
# OpenAPI specification.
|
||||
if not self.context['include_filters']:
|
||||
self.fields.pop('filters', None)
|
||||
|
||||
if not include_sortings:
|
||||
self.fields.pop('sortings')
|
||||
if not self.context['include_sortings']:
|
||||
self.fields.pop('sortings', None)
|
||||
|
||||
return super().to_representation(instance)
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_type(self, instance):
|
||||
|
|
|
@ -2,7 +2,7 @@ from django.db import transaction
|
|||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
||||
|
@ -46,6 +46,12 @@ from .errors import (
|
|||
class ViewsView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == 'GET':
|
||||
return [AllowAny()]
|
||||
|
||||
return super().get_permissions()
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
|
@ -54,18 +60,31 @@ class ViewsView(APIView):
|
|||
type=OpenApiTypes.INT,
|
||||
description='Returns only views of the table related to the provided '
|
||||
'value.'
|
||||
)
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='include',
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'A comma separated list of extra attributes to include on each '
|
||||
'view in the response. The supported attributes are `filters` and '
|
||||
'`sortings`. For example `include=filters,sortings` will add the '
|
||||
'attributes `filters` and `sortings` to every returned view, '
|
||||
'containing a list of the views filters and sortings respectively.'
|
||||
)
|
||||
),
|
||||
],
|
||||
tags=['Database table views'],
|
||||
operation_id='list_database_table_views',
|
||||
description=(
|
||||
'Lists all views of the table related to the provided `table_id` if the '
|
||||
'user has access to the related database\'s group. A table can have '
|
||||
'multiple views. Each view can display the data in a different way. For '
|
||||
'example the `grid` view shows the in a spreadsheet like way. That type '
|
||||
'has custom endpoints for data retrieval and manipulation. In the future '
|
||||
'other views types like a calendar or Kanban are going to be added. Each '
|
||||
'type can have different properties.'
|
||||
'user has access to the related database\'s group. If the group is '
|
||||
'related to a template, then this endpoint will be publicly accessible. A '
|
||||
'table can have multiple views. Each view can display the data in a '
|
||||
'different way. For example the `grid` view shows the in a spreadsheet '
|
||||
'like way. That type has custom endpoints for data retrieval and '
|
||||
'manipulation. In the future other views types like a calendar or Kanban '
|
||||
'are going to be added. Each type can have different properties.'
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicCustomFieldRegistrySerializer(
|
||||
|
@ -89,7 +108,8 @@ class ViewsView(APIView):
|
|||
"""
|
||||
|
||||
table = TableHandler().get_table(table_id)
|
||||
table.database.group.has_user(request.user, raise_error=True)
|
||||
table.database.group.has_user(request.user, raise_error=True,
|
||||
allow_if_template=True)
|
||||
views = View.objects.filter(table=table).select_related('content_type')
|
||||
|
||||
if filters:
|
||||
|
@ -117,7 +137,20 @@ class ViewsView(APIView):
|
|||
type=OpenApiTypes.INT,
|
||||
description='Creates a view for the table related to the provided '
|
||||
'value.'
|
||||
)
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='include',
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'A comma separated list of extra attributes to include on each '
|
||||
'view in the response. The supported attributes are `filters` and '
|
||||
'`sortings`. '
|
||||
'For example `include=filters,sortings` will add the attributes '
|
||||
'`filters` and `sortings` to every returned view, containing '
|
||||
'a list of the views filters and sortings respectively.'
|
||||
)
|
||||
),
|
||||
],
|
||||
tags=['Database table views'],
|
||||
operation_id='create_database_table_view',
|
||||
|
@ -176,7 +209,20 @@ class ViewView(APIView):
|
|||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns the view related to the provided value.'
|
||||
)
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='include',
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'A comma separated list of extra attributes to include on the '
|
||||
'returned view. The supported attributes are are `filters` and '
|
||||
'`sortings`. '
|
||||
'For example `include=filters,sortings` will add the attributes '
|
||||
'`filters` and `sortings` to every returned view, containing '
|
||||
'a list of the views filters and sortings respectively.'
|
||||
)
|
||||
),
|
||||
],
|
||||
tags=['Database table views'],
|
||||
operation_id='get_database_table_view',
|
||||
|
@ -219,7 +265,20 @@ class ViewView(APIView):
|
|||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the view related to the provided value.'
|
||||
)
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='include',
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'A comma separated list of extra attributes to include on the '
|
||||
'returned view. The supported attributes are are `filters` and '
|
||||
'`sortings`. '
|
||||
'For example `include=filters,sortings` will add the attributes '
|
||||
'`filters` and `sortings` to every returned view, containing '
|
||||
'a list of the views filters and sortings respectively.'
|
||||
)
|
||||
),
|
||||
],
|
||||
tags=['Database table views'],
|
||||
operation_id='update_database_table_view',
|
||||
|
|
|
@ -1,8 +1,13 @@
|
|||
from django.core.management.color import no_style
|
||||
from django.urls import path, include
|
||||
from django.db import connections
|
||||
from django.conf import settings
|
||||
|
||||
from baserow.core.registries import ApplicationType
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
|
||||
from .models import Database
|
||||
from .models import Database, Table
|
||||
from .table.handler import TableHandler
|
||||
from .api.serializers import DatabaseSerializer
|
||||
|
||||
|
@ -12,7 +17,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
model_class = Database
|
||||
instance_serializer_class = DatabaseSerializer
|
||||
|
||||
def pre_delete(self, user, database):
|
||||
def pre_delete(self, database):
|
||||
"""
|
||||
When a database is deleted we must also delete the related tables via the table
|
||||
handler.
|
||||
|
@ -22,7 +27,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
table_handler = TableHandler()
|
||||
|
||||
for table in database_tables:
|
||||
table_handler.delete_table(user, table)
|
||||
table_handler._delete_table(table)
|
||||
|
||||
def get_api_urls(self):
|
||||
from .api import urls as api_urls
|
||||
|
@ -30,3 +35,163 @@ class DatabaseApplicationType(ApplicationType):
|
|||
return [
|
||||
path('database/', include(api_urls, namespace=self.type)),
|
||||
]
|
||||
|
||||
def export_serialized(self, database):
|
||||
"""
|
||||
Exports the database application type to a serialized format that can later be
|
||||
be imported via the `import_serialized`.
|
||||
"""
|
||||
|
||||
tables = database.table_set.all().prefetch_related(
|
||||
'field_set',
|
||||
'view_set',
|
||||
'view_set__viewfilter_set',
|
||||
'view_set__viewsort_set'
|
||||
)
|
||||
serialized_tables = []
|
||||
for table in tables:
|
||||
fields = table.field_set.all()
|
||||
serialized_fields = []
|
||||
for f in fields:
|
||||
field = f.specific
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
serialized_fields.append(field_type.export_serialized(field))
|
||||
|
||||
serialized_views = []
|
||||
for v in table.view_set.all():
|
||||
view = v.specific
|
||||
view_type = view_type_registry.get_by_model(view)
|
||||
serialized_views.append(view_type.export_serialized(view))
|
||||
|
||||
model = table.get_model(fields=fields)
|
||||
serialized_rows = []
|
||||
table_cache = {}
|
||||
for row in model.objects.all():
|
||||
serialized_row = {
|
||||
'id': row.id,
|
||||
'order': str(row.order)
|
||||
}
|
||||
for field_object in model._field_objects.values():
|
||||
field_name = field_object['name']
|
||||
field_type = field_object['type']
|
||||
serialized_row[field_name] = field_type.get_export_serialized_value(
|
||||
row,
|
||||
field_name,
|
||||
table_cache
|
||||
)
|
||||
serialized_rows.append(serialized_row)
|
||||
|
||||
serialized_tables.append({
|
||||
'id': table.id,
|
||||
'name': table.name,
|
||||
'order': table.order,
|
||||
'fields': serialized_fields,
|
||||
'views': serialized_views,
|
||||
'rows': serialized_rows,
|
||||
})
|
||||
|
||||
serialized = super().export_serialized(database)
|
||||
serialized['tables'] = serialized_tables
|
||||
return serialized
|
||||
|
||||
def import_serialized(self, group, serialized_values, id_mapping):
|
||||
"""
|
||||
Imports a database application exported by the `export_serialized` method.
|
||||
"""
|
||||
|
||||
if 'database_tables' not in id_mapping:
|
||||
id_mapping['database_tables'] = {}
|
||||
|
||||
tables = serialized_values.pop('tables')
|
||||
database = super().import_serialized(group, serialized_values, id_mapping)
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
|
||||
# First, we want to create all the table instances because it could be that
|
||||
# field or view properties depend on the existence of a table.
|
||||
for table in tables:
|
||||
table_object = Table.objects.create(
|
||||
database=database,
|
||||
name=table['name'],
|
||||
order=table['order'],
|
||||
)
|
||||
id_mapping['database_tables'][table['id']] = table_object.id
|
||||
table['_object'] = table_object
|
||||
table['_field_objects'] = []
|
||||
|
||||
# Because view properties might depend on fields, we first want to create all
|
||||
# the fields.
|
||||
for table in tables:
|
||||
for field in table['fields']:
|
||||
field_type = field_type_registry.get(field['type'])
|
||||
field_object = field_type.import_serialized(
|
||||
table['_object'],
|
||||
field,
|
||||
id_mapping
|
||||
)
|
||||
|
||||
if field_object:
|
||||
table['_field_objects'].append(field_object)
|
||||
|
||||
# Now that the all tables and fields exist, we can create the views and create
|
||||
# the table schema in the database.
|
||||
for table in tables:
|
||||
for view in table['views']:
|
||||
view_type = view_type_registry.get(view['type'])
|
||||
view_type.import_serialized(table['_object'], view, id_mapping)
|
||||
|
||||
# We don't need to create all the fields individually because the schema
|
||||
# editor can handle the creation of the table schema in one go.
|
||||
with connection.schema_editor() as schema_editor:
|
||||
model = table['_object'].get_model(
|
||||
fields=table['_field_objects'],
|
||||
field_ids=[]
|
||||
)
|
||||
schema_editor.create_model(model)
|
||||
|
||||
# Now that everything is in place we can start filling the table with the rows
|
||||
# in an efficient matter by using the bulk_create functionality.
|
||||
for table in tables:
|
||||
model = table['_object'].get_model(
|
||||
fields=table['_field_objects'],
|
||||
field_ids=[]
|
||||
)
|
||||
field_ids = [field_object.id for field_object in table['_field_objects']]
|
||||
rows_to_be_inserted = []
|
||||
|
||||
for row in table['rows']:
|
||||
row_object = model(id=row['id'], order=row['order'])
|
||||
|
||||
for field in table['fields']:
|
||||
field_type = field_type_registry.get(field['type'])
|
||||
new_field_id = id_mapping["database_fields"][field["id"]]
|
||||
|
||||
# If the new field id is not present in the field_ids then we don't
|
||||
# want to set that value on the row. This is because upon creation
|
||||
# of the field there could be a deliberate choice not to populate
|
||||
# that field. This is for example the case with the related field
|
||||
# of the `link_row` field which would result in duplicates if we
|
||||
# would populate.
|
||||
if new_field_id in field_ids:
|
||||
field_type.set_import_serialized_value(
|
||||
row_object,
|
||||
f'field_{id_mapping["database_fields"][field["id"]]}',
|
||||
row[f'field_{field["id"]}'],
|
||||
id_mapping
|
||||
)
|
||||
|
||||
rows_to_be_inserted.append(row_object)
|
||||
|
||||
# We want to insert the rows in bulk because there could potentially be
|
||||
# hundreds of thousands of rows in there and this will result in better
|
||||
# performance.
|
||||
model.objects.bulk_create(rows_to_be_inserted)
|
||||
|
||||
# When the rows are inserted we keep the provide the old ids and because of
|
||||
# that the auto increment is still set at `1`. This needs to be set to the
|
||||
# maximum value because otherwise creating a new row could later fail.
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
sequence_sql = connection.ops.sequence_reset_sql(no_style(), [model])
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sequence_sql[0])
|
||||
|
||||
return database
|
||||
|
|
|
@ -3,9 +3,6 @@ from django.apps import AppConfig
|
|||
from baserow.core.registries import plugin_registry, application_type_registry
|
||||
from baserow.ws.registries import page_registry
|
||||
|
||||
from .views.registries import view_type_registry, view_filter_type_registry
|
||||
from .fields.registries import field_type_registry, field_converter_registry
|
||||
|
||||
|
||||
class DatabaseConfig(AppConfig):
|
||||
name = 'baserow.contrib.database'
|
||||
|
@ -41,13 +38,16 @@ class DatabaseConfig(AppConfig):
|
|||
def ready(self):
|
||||
self.prevent_generated_model_for_registering()
|
||||
|
||||
from .views.registries import view_type_registry, view_filter_type_registry
|
||||
from .fields.registries import field_type_registry, field_converter_registry
|
||||
|
||||
from .plugins import DatabasePlugin
|
||||
plugin_registry.register(DatabasePlugin())
|
||||
|
||||
from .fields.field_types import (
|
||||
TextFieldType, LongTextFieldType, URLFieldType, NumberFieldType,
|
||||
BooleanFieldType, DateFieldType, LinkRowFieldType, EmailFieldType,
|
||||
FileFieldType, SingleSelectFieldType
|
||||
FileFieldType, SingleSelectFieldType, PhoneNumberFieldType
|
||||
)
|
||||
field_type_registry.register(TextFieldType())
|
||||
field_type_registry.register(LongTextFieldType())
|
||||
|
@ -59,6 +59,7 @@ class DatabaseConfig(AppConfig):
|
|||
field_type_registry.register(LinkRowFieldType())
|
||||
field_type_registry.register(FileFieldType())
|
||||
field_type_registry.register(SingleSelectFieldType())
|
||||
field_type_registry.register(PhoneNumberFieldType())
|
||||
|
||||
from .fields.field_converters import LinkRowFieldConverter, FileFieldConverter
|
||||
field_converter_registry.register(LinkRowFieldConverter())
|
||||
|
|
|
@ -10,11 +10,18 @@ class TablesDatabaseRouter(object):
|
|||
|
||||
@staticmethod
|
||||
def user_table_database_if_generated_table_database(model):
|
||||
return (
|
||||
settings.USER_TABLE_DATABASE
|
||||
if hasattr(model, '_generated_table_model') else
|
||||
None
|
||||
)
|
||||
# If the model is generated by the Table model then we want to use the
|
||||
# USER_TABLE_DATABASE because it could be that the user data does not live in
|
||||
# the default database. This is also the case when the model is automatically
|
||||
# created by a generated table model.
|
||||
if (
|
||||
hasattr(model, '_generated_table_model') or
|
||||
(
|
||||
model._meta.auto_created and
|
||||
hasattr(model._meta.auto_created, '_generated_table_model')
|
||||
)
|
||||
):
|
||||
return settings.USER_TABLE_DATABASE
|
||||
|
||||
def db_for_read(self, model, **hints):
|
||||
return self.user_table_database_if_generated_table_database(model)
|
||||
|
|
|
@ -10,9 +10,9 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
format. If the casting still fails the value will be set to null.
|
||||
"""
|
||||
|
||||
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s " \
|
||||
"USING pg_temp.try_cast(%(column)s::text)"
|
||||
sql_drop_try_cast = "DROP FUNCTION IF EXISTS pg_temp.try_cast(text, int)"
|
||||
sql_alter_column_type = 'ALTER COLUMN %(column)s TYPE %(type)s ' \
|
||||
'USING pg_temp.try_cast(%(column)s::text)'
|
||||
sql_drop_try_cast = 'DROP FUNCTION IF EXISTS pg_temp.try_cast(text, int)'
|
||||
sql_create_try_cast = """
|
||||
create or replace function pg_temp.try_cast(
|
||||
p_in text,
|
||||
|
@ -35,16 +35,20 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
"""
|
||||
|
||||
def __init__(self, *args, alter_column_prepare_old_value='',
|
||||
alter_column_prepare_new_value=''):
|
||||
alter_column_prepare_new_value='',
|
||||
force_alter_column=False):
|
||||
self.alter_column_prepare_old_value = alter_column_prepare_old_value
|
||||
self.alter_column_prepare_new_value = alter_column_prepare_new_value
|
||||
self.force_alter_column = force_alter_column
|
||||
super().__init__(*args)
|
||||
|
||||
def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
||||
old_db_params, new_db_params, strict=False):
|
||||
if self.force_alter_column:
|
||||
old_type = f'{old_type}_forced'
|
||||
|
||||
if old_type != new_type:
|
||||
variables = {}
|
||||
|
||||
if isinstance(self.alter_column_prepare_old_value, tuple):
|
||||
alter_column_prepare_old_value, v = self.alter_column_prepare_old_value
|
||||
variables = {**variables, **v}
|
||||
|
@ -57,12 +61,13 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
else:
|
||||
alter_column_prepare_new_value = self.alter_column_prepare_new_value
|
||||
|
||||
quoted_column_name = self.quote_name(new_field.column)
|
||||
self.execute(self.sql_drop_try_cast)
|
||||
self.execute(self.sql_create_try_cast % {
|
||||
"column": self.quote_name(new_field.column),
|
||||
"type": new_type,
|
||||
"alter_column_prepare_old_value": alter_column_prepare_old_value,
|
||||
"alter_column_prepare_new_value": alter_column_prepare_new_value
|
||||
'column': quoted_column_name,
|
||||
'type': new_type,
|
||||
'alter_column_prepare_old_value': alter_column_prepare_old_value,
|
||||
'alter_column_prepare_new_value': alter_column_prepare_new_value
|
||||
}, variables)
|
||||
|
||||
return super()._alter_field(model, old_field, new_field, old_type, new_type,
|
||||
|
@ -71,7 +76,8 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
|
||||
@contextlib.contextmanager
|
||||
def lenient_schema_editor(connection, alter_column_prepare_old_value=None,
|
||||
alter_column_prepare_new_value=None):
|
||||
alter_column_prepare_new_value=None,
|
||||
force_alter_column=False):
|
||||
"""
|
||||
A contextual function that yields a modified version of the connection's schema
|
||||
editor. This temporary version is more lenient then the regular editor. Normally
|
||||
|
@ -89,6 +95,9 @@ def lenient_schema_editor(connection, alter_column_prepare_old_value=None,
|
|||
:param alter_column_prepare_new_value: Optionally a query statement converting the
|
||||
`p_in` text value to the new type.
|
||||
:type alter_column_prepare_new_value: None or str
|
||||
:param force_alter_column: When true forces the schema editor to run an alter
|
||||
column statement using the previous two alter_column_prepare parameters.
|
||||
:type force_alter_column: bool
|
||||
:raises ValueError: When the provided connection is not supported. For now only
|
||||
`postgresql` is supported.
|
||||
"""
|
||||
|
@ -109,7 +118,9 @@ def lenient_schema_editor(connection, alter_column_prepare_old_value=None,
|
|||
|
||||
connection.SchemaEditorClass = schema_editor_class
|
||||
|
||||
kwargs = {}
|
||||
kwargs = {
|
||||
'force_alter_column': force_alter_column
|
||||
}
|
||||
|
||||
if alter_column_prepare_old_value:
|
||||
kwargs['alter_column_prepare_old_value'] = alter_column_prepare_old_value
|
||||
|
|
155
backend/src/baserow/contrib/database/fields/field_filters.py
Normal file
155
backend/src/baserow/contrib/database/fields/field_filters.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
from typing import Dict, Any, Union
|
||||
|
||||
from django.db.models import Q, BooleanField
|
||||
from django.db.models.expressions import RawSQL
|
||||
|
||||
FILTER_TYPE_AND = 'AND'
|
||||
FILTER_TYPE_OR = 'OR'
|
||||
|
||||
|
||||
class AnnotatedQ:
|
||||
"""
|
||||
A simple wrapper class combining a params for a Queryset.annotate call with a
|
||||
django Q object to be used in combination with FilterBuilder to dynamically build up
|
||||
filters which also require annotations.
|
||||
"""
|
||||
|
||||
def __init__(self, annotation: Dict[str, Any], q: Union[Q, Dict[str, Any]]):
|
||||
"""
|
||||
:param annotation: A dictionary which can be unpacked into a django
|
||||
Queryset.annotate call. This will only happen when using
|
||||
FilterBuilder.apply_to_queryset.
|
||||
:param q: a Q object or kwargs which will used to create a Q object.
|
||||
"""
|
||||
|
||||
self.annotation = annotation or {}
|
||||
if isinstance(q, Q):
|
||||
self.q = q
|
||||
else:
|
||||
self.q = Q(**q)
|
||||
|
||||
def __invert__(self):
|
||||
return AnnotatedQ(self.annotation, ~self.q)
|
||||
|
||||
|
||||
OptionallyAnnotatedQ = Union[Q, AnnotatedQ]
|
||||
|
||||
|
||||
class FilterBuilder:
|
||||
"""
|
||||
Combines together multiple Q or AnnotatedQ filters into a single filter which
|
||||
will AND or OR the provided filters together based on the filter_type
|
||||
parameter. When applied to a queryset it will also annotate he queryset
|
||||
prior to filtering with the merged annotations from AnnotatedQ filters.
|
||||
"""
|
||||
|
||||
def __init__(self, filter_type: str):
|
||||
"""
|
||||
|
||||
:param filter_type: Either field_filters.FILTER_TYPE_AND or
|
||||
field_filters.FILTER_TYPE_OR which dictates how provided Q or AnnotatedQ
|
||||
filters will be combined together.
|
||||
For type OR they will be ORed together when applied to a filter set,
|
||||
for type AND they will be ANDed together.
|
||||
"""
|
||||
|
||||
if filter_type not in [FILTER_TYPE_AND, FILTER_TYPE_OR]:
|
||||
raise ValueError(f'Unknown filter type {filter_type}.')
|
||||
|
||||
self._annotation = {}
|
||||
self._q_filters = Q()
|
||||
self._filter_type = filter_type
|
||||
|
||||
def filter(self, q: OptionallyAnnotatedQ) -> 'FilterBuilder':
|
||||
"""
|
||||
Adds a Q or AnnotatedQ filter into this builder to be joined together with
|
||||
existing filters based on the builders `filter_type`.
|
||||
|
||||
Annotations on provided AnnotatedQ's are merged together with any previously
|
||||
supplied annotations via dict unpacking and merging.
|
||||
|
||||
:param q: A Q or Annotated Q
|
||||
:return: The updated FilterBuilder with the provided filter applied.
|
||||
"""
|
||||
|
||||
if isinstance(q, AnnotatedQ):
|
||||
self._annotate(q.annotation)
|
||||
self._filter(q.q)
|
||||
else:
|
||||
self._filter(q)
|
||||
return self
|
||||
|
||||
def apply_to_queryset(self, queryset):
|
||||
"""
|
||||
Applies all of the Q and AnnotatedQ filters previously given to this
|
||||
FilterBuilder by first applying all annotations from AnnotatedQ's and then
|
||||
filtering with a Q filter resulting from the combination of all filters ANDed or
|
||||
ORed depending on the filter_type attribute.
|
||||
|
||||
:param queryset: The queryset to annotate and filter.
|
||||
:return: The annotated and filtered queryset.
|
||||
"""
|
||||
|
||||
return queryset.annotate(**self._annotation).filter(self._q_filters)
|
||||
|
||||
def _annotate(self, annotation_dict: Dict[str, Any]) -> 'FilterBuilder':
|
||||
self._annotation = {**self._annotation, **annotation_dict}
|
||||
|
||||
def _filter(self, q_filter: Q) -> 'FilterBuilder':
|
||||
if self._filter_type == FILTER_TYPE_AND:
|
||||
self._q_filters &= q_filter
|
||||
elif self._filter_type == FILTER_TYPE_OR:
|
||||
self._q_filters |= q_filter
|
||||
else:
|
||||
raise ValueError(f'Unknown filter type {self._filter_type}.')
|
||||
|
||||
|
||||
def contains_filter(field_name, value, model_field, _) -> OptionallyAnnotatedQ:
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
return Q()
|
||||
# Check if the model_field accepts the value.
|
||||
try:
|
||||
model_field.get_prep_value(value)
|
||||
return Q(**{f'{field_name}__icontains': value})
|
||||
except Exception:
|
||||
pass
|
||||
return Q()
|
||||
|
||||
|
||||
def filename_contains_filter(field_name, value, _, field) -> OptionallyAnnotatedQ:
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
return Q()
|
||||
# Check if the model_field has a file which matches the provided filter value.
|
||||
annotation_query = _build_filename_contains_raw_query(field, value)
|
||||
return AnnotatedQ(annotation={
|
||||
f'{field_name}_matches_visible_names': annotation_query
|
||||
}, q={
|
||||
f'{field_name}_matches_visible_names': True
|
||||
})
|
||||
|
||||
|
||||
def _build_filename_contains_raw_query(field, value):
|
||||
# It is not possible to use Django's ORM to query for if one item in a JSONB
|
||||
# list has has a key which contains a specified value.
|
||||
#
|
||||
# The closest thing the Django ORM provides is:
|
||||
# queryset.filter(your_json_field__contains=[{"key":"value"}])
|
||||
# However this is an exact match, so in the above example [{"key":"value_etc"}]
|
||||
# would not match the filter.
|
||||
#
|
||||
# Instead we have to resort to RawSQL to use various built in PostgreSQL JSON
|
||||
# Array manipulation functions to be able to 'iterate' over a JSONB list
|
||||
# performing `like` on individual keys in said list.
|
||||
num_files_with_name_like_value = f"""
|
||||
EXISTS(
|
||||
SELECT attached_files ->> 'visible_name'
|
||||
FROM JSONB_ARRAY_ELEMENTS("field_{field.id}") as attached_files
|
||||
WHERE UPPER(attached_files ->> 'visible_name') LIKE UPPER(%s)
|
||||
)
|
||||
"""
|
||||
return RawSQL(num_files_with_name_like_value, params=[f"%{value}%"],
|
||||
output_field=BooleanField())
|
|
@ -1,43 +1,46 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from pytz import timezone
|
||||
from random import randrange, randint
|
||||
|
||||
from dateutil import parser
|
||||
from dateutil.parser import ParserError
|
||||
from datetime import datetime, date
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Case, When
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
from django.core.validators import URLValidator, EmailValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator, EmailValidator, RegexValidator
|
||||
from django.db import models
|
||||
from django.db.models import Case, When, Q, F, Func, Value, CharField
|
||||
from django.db.models.expressions import RawSQL
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
from pytz import timezone
|
||||
from rest_framework import serializers
|
||||
|
||||
from baserow.core.models import UserFile
|
||||
from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
||||
from baserow.contrib.database.api.fields.serializers import (
|
||||
LinkRowValueSerializer, FileFieldRequestSerializer, FileFieldResponseSerializer,
|
||||
SelectOptionSerializer
|
||||
)
|
||||
from baserow.contrib.database.api.fields.errors import (
|
||||
ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE, ERROR_LINK_ROW_TABLE_NOT_PROVIDED,
|
||||
ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE
|
||||
)
|
||||
|
||||
from .handler import FieldHandler
|
||||
from .registries import FieldType, field_type_registry
|
||||
from .models import (
|
||||
NUMBER_TYPE_INTEGER, NUMBER_TYPE_DECIMAL, DATE_FORMAT, DATE_TIME_FORMAT,
|
||||
TextField, LongTextField, URLField, NumberField, BooleanField, DateField,
|
||||
LinkRowField, EmailField, FileField,
|
||||
SingleSelectField, SelectOption
|
||||
from baserow.contrib.database.api.fields.serializers import (
|
||||
LinkRowValueSerializer, FileFieldRequestSerializer, FileFieldResponseSerializer,
|
||||
SelectOptionSerializer
|
||||
)
|
||||
from baserow.core.models import UserFile
|
||||
from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
||||
from .exceptions import (
|
||||
LinkRowTableNotInSameDatabase, LinkRowTableNotProvided,
|
||||
IncompatiblePrimaryFieldTypeError
|
||||
)
|
||||
from .field_filters import contains_filter, AnnotatedQ, filename_contains_filter
|
||||
from .fields import SingleSelectForeignKey
|
||||
from .handler import FieldHandler
|
||||
from .models import (
|
||||
NUMBER_TYPE_INTEGER, NUMBER_TYPE_DECIMAL, TextField, LongTextField, URLField,
|
||||
NumberField, BooleanField, DateField,
|
||||
LinkRowField, EmailField, FileField,
|
||||
SingleSelectField, SelectOption, PhoneNumberField
|
||||
)
|
||||
from .registries import FieldType, field_type_registry
|
||||
|
||||
|
||||
class TextFieldType(FieldType):
|
||||
|
@ -57,6 +60,9 @@ class TextFieldType(FieldType):
|
|||
def random_value(self, instance, fake, cache):
|
||||
return fake.name()
|
||||
|
||||
def contains_query(self, *args):
|
||||
return contains_filter(*args)
|
||||
|
||||
|
||||
class LongTextFieldType(FieldType):
|
||||
type = 'long_text'
|
||||
|
@ -72,6 +78,9 @@ class LongTextFieldType(FieldType):
|
|||
def random_value(self, instance, fake, cache):
|
||||
return fake.text()
|
||||
|
||||
def contains_query(self, *args):
|
||||
return contains_filter(*args)
|
||||
|
||||
|
||||
class URLFieldType(FieldType):
|
||||
type = 'url'
|
||||
|
@ -108,6 +117,9 @@ class URLFieldType(FieldType):
|
|||
return super().get_alter_column_prepare_new_value(connection, from_field,
|
||||
to_field)
|
||||
|
||||
def contains_query(self, *args):
|
||||
return contains_filter(*args)
|
||||
|
||||
|
||||
class NumberFieldType(FieldType):
|
||||
MAX_DIGITS = 50
|
||||
|
@ -187,25 +199,15 @@ class NumberFieldType(FieldType):
|
|||
return super().get_alter_column_prepare_new_value(connection, from_field,
|
||||
to_field)
|
||||
|
||||
def after_update(self, from_field, to_field, from_model, to_model, user, connection,
|
||||
altered_column, before):
|
||||
"""
|
||||
The allowing of negative values isn't stored in the database field type. If
|
||||
the type hasn't changed, but the allowing of negative values has it means that
|
||||
the column data hasn't been converted to positive values yet. We need to do
|
||||
this here. All the negatives values are set to 0.
|
||||
"""
|
||||
def force_same_type_alter_column(self, from_field, to_field):
|
||||
return not to_field.number_negative and from_field.number_negative
|
||||
|
||||
if (
|
||||
not altered_column
|
||||
and not to_field.number_negative
|
||||
and from_field.number_negative
|
||||
):
|
||||
to_model.objects.filter(**{
|
||||
f'field_{to_field.id}__lt': 0
|
||||
}).update(**{
|
||||
f'field_{to_field.id}': 0
|
||||
})
|
||||
def contains_query(self, *args):
|
||||
return contains_filter(*args)
|
||||
|
||||
def get_export_serialized_value(self, row, field_name, cache):
|
||||
value = getattr(row, field_name)
|
||||
return value if value is None else str(value)
|
||||
|
||||
|
||||
class BooleanFieldType(FieldType):
|
||||
|
@ -221,6 +223,12 @@ class BooleanFieldType(FieldType):
|
|||
def random_value(self, instance, fake, cache):
|
||||
return fake.pybool()
|
||||
|
||||
def get_export_serialized_value(self, row, field_name, cache):
|
||||
return 'true' if getattr(row, field_name) else 'false'
|
||||
|
||||
def set_import_serialized_value(self, row, field_name, value, id_mapping):
|
||||
setattr(row, field_name, value == 'true')
|
||||
|
||||
|
||||
class DateFieldType(FieldType):
|
||||
type = 'date'
|
||||
|
@ -299,18 +307,33 @@ class DateFieldType(FieldType):
|
|||
|
||||
to_field_type = field_type_registry.get_by_model(to_field)
|
||||
if to_field_type.type != self.type and connection.vendor == 'postgresql':
|
||||
sql_type = 'date'
|
||||
sql_format = DATE_FORMAT[from_field.date_format]['sql']
|
||||
|
||||
if from_field.date_include_time:
|
||||
sql_type = 'timestamp'
|
||||
sql_format += ' ' + DATE_TIME_FORMAT[from_field.date_time_format]['sql']
|
||||
|
||||
sql_format = from_field.get_psql_format()
|
||||
sql_type = from_field.get_psql_type()
|
||||
return f"""p_in = TO_CHAR(p_in::{sql_type}, '{sql_format}');"""
|
||||
|
||||
return super().get_alter_column_prepare_old_value(connection, from_field,
|
||||
to_field)
|
||||
|
||||
def contains_query(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
return Q()
|
||||
return AnnotatedQ(
|
||||
annotation={
|
||||
f"formatted_date_{field_name}":
|
||||
Coalesce(
|
||||
Func(
|
||||
F(field_name),
|
||||
Value(field.get_psql_format()),
|
||||
function='to_char',
|
||||
output_field=CharField()
|
||||
),
|
||||
Value(''))
|
||||
},
|
||||
q={f'formatted_date_{field_name}__icontains': value}
|
||||
)
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
"""
|
||||
If the field type has changed into a date field then we want to parse the old
|
||||
|
@ -321,22 +344,48 @@ class DateFieldType(FieldType):
|
|||
|
||||
from_field_type = field_type_registry.get_by_model(from_field)
|
||||
if from_field_type.type != self.type and connection.vendor == 'postgresql':
|
||||
sql_function = 'TO_DATE'
|
||||
sql_format = DATE_FORMAT[to_field.date_format]['sql']
|
||||
|
||||
if to_field.date_include_time:
|
||||
sql_function = 'TO_TIMESTAMP'
|
||||
sql_format += ' ' + DATE_TIME_FORMAT[to_field.date_time_format]['sql']
|
||||
sql_function = to_field.get_psql_type_convert_function()
|
||||
sql_format = to_field.get_psql_format()
|
||||
sql_type = to_field.get_psql_type()
|
||||
|
||||
return f"""
|
||||
begin
|
||||
p_in = {sql_function}(p_in::text, 'FM{sql_format}');
|
||||
exception when others then end;
|
||||
IF char_length(p_in::text) < 5 THEN
|
||||
p_in = null;
|
||||
ELSEIF p_in IS NULL THEN
|
||||
p_in = null;
|
||||
ELSE
|
||||
p_in = GREATEST(
|
||||
{sql_function}(p_in::text, 'FM{sql_format}'),
|
||||
'0001-01-01'::{sql_type}
|
||||
);
|
||||
END IF;
|
||||
exception when others then
|
||||
begin
|
||||
p_in = GREATEST(p_in::{sql_type}, '0001-01-01'::{sql_type});
|
||||
exception when others then
|
||||
p_in = p_default;
|
||||
end;
|
||||
end;
|
||||
"""
|
||||
|
||||
return super().get_alter_column_prepare_old_value(connection, from_field,
|
||||
to_field)
|
||||
|
||||
def get_export_serialized_value(self, row, field_name, cache):
|
||||
value = getattr(row, field_name)
|
||||
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return value.isoformat()
|
||||
|
||||
def set_import_serialized_value(self, row, field_name, value, id_mapping):
|
||||
if not value:
|
||||
return value
|
||||
|
||||
setattr(row, field_name, datetime.fromisoformat(value))
|
||||
|
||||
|
||||
class LinkRowFieldType(FieldType):
|
||||
"""
|
||||
|
@ -644,6 +693,75 @@ class LinkRowFieldType(FieldType):
|
|||
|
||||
return values
|
||||
|
||||
def export_serialized(self, field):
|
||||
serialized = super().export_serialized(field, False)
|
||||
serialized['link_row_table_id'] = field.link_row_table_id
|
||||
serialized['link_row_related_field_id'] = field.link_row_related_field_id
|
||||
return serialized
|
||||
|
||||
def import_serialized(self, table, serialized_values, id_mapping):
|
||||
serialized_copy = serialized_values.copy()
|
||||
serialized_copy['link_row_table_id'] = (
|
||||
id_mapping['database_tables'][serialized_copy['link_row_table_id']]
|
||||
)
|
||||
link_row_related_field_id = serialized_copy.pop('link_row_related_field_id')
|
||||
related_field_found = (
|
||||
'database_fields' in id_mapping and
|
||||
link_row_related_field_id in id_mapping['database_fields']
|
||||
)
|
||||
|
||||
if related_field_found:
|
||||
# If the related field is found, it means that it has already been
|
||||
# imported. In that case, we can directly set the `link_row_relation_id`
|
||||
# when creating the current field.
|
||||
serialized_copy['link_row_related_field_id'] = (
|
||||
id_mapping['database_fields'][link_row_related_field_id]
|
||||
)
|
||||
related_field = LinkRowField.objects.get(
|
||||
pk=serialized_copy['link_row_related_field_id']
|
||||
)
|
||||
serialized_copy['link_row_relation_id'] = related_field.link_row_relation_id
|
||||
|
||||
field = super().import_serialized(table, serialized_copy, id_mapping)
|
||||
|
||||
if related_field_found:
|
||||
# If the related field is found, it means that when creating that field
|
||||
# the `link_row_relation_id` was not yet set because this field,
|
||||
# where the relation is being made to, did not yet exist. So we need to
|
||||
# set it right now.
|
||||
related_field.link_row_related_field_id = field.id
|
||||
related_field.save()
|
||||
# By returning None, the field is ignored when creating the table schema
|
||||
# and inserting the data, which is exactly what we want because the
|
||||
# through table has already been created and will result in an error if
|
||||
# we do it again.
|
||||
return None
|
||||
|
||||
return field
|
||||
|
||||
def get_export_serialized_value(self, row, field_name, cache):
|
||||
cache_entry = f'{field_name}_relations'
|
||||
if cache_entry not in cache:
|
||||
# In order to prevent a lot of lookup queries in the through table,
|
||||
# we want to fetch all the relations and add it to a temporary in memory
|
||||
# cache containing a mapping of the old ids to the new ids. Every relation
|
||||
# can use the cached mapped relations to find the correct id.
|
||||
cache[cache_entry] = defaultdict(list)
|
||||
through_model = row._meta.get_field(field_name).remote_field.through
|
||||
through_model_fields = through_model._meta.get_fields()
|
||||
current_field_name = through_model_fields[1].name
|
||||
relation_field_name = through_model_fields[2].name
|
||||
for relation in through_model.objects.all():
|
||||
cache[cache_entry][getattr(
|
||||
relation,
|
||||
f'{current_field_name}_id'
|
||||
)].append(getattr(relation, f'{relation_field_name}_id'))
|
||||
|
||||
return cache[cache_entry][row.id]
|
||||
|
||||
def set_import_serialized_value(self, row, field_name, value, id_mapping):
|
||||
getattr(row, field_name).set(value)
|
||||
|
||||
|
||||
class EmailFieldType(FieldType):
|
||||
type = 'email'
|
||||
|
@ -684,6 +802,9 @@ class EmailFieldType(FieldType):
|
|||
return super().get_alter_column_prepare_new_value(connection, from_field,
|
||||
to_field)
|
||||
|
||||
def contains_query(self, *args):
|
||||
return contains_filter(*args)
|
||||
|
||||
|
||||
class FileFieldType(FieldType):
|
||||
type = 'file'
|
||||
|
@ -782,6 +903,15 @@ class FileFieldType(FieldType):
|
|||
|
||||
return values
|
||||
|
||||
def contains_query(self, *args):
|
||||
return filename_contains_filter(*args)
|
||||
|
||||
def get_export_serialized_value(self, row, field_name, cache):
|
||||
raise NotImplementedError('@TODO file field type export')
|
||||
|
||||
def set_import_serialized_value(self, row, field_name, value, id_mapping):
|
||||
raise NotImplementedError('@TODO file field type import')
|
||||
|
||||
|
||||
class SingleSelectFieldType(FieldType):
|
||||
type = 'single_select'
|
||||
|
@ -965,3 +1095,140 @@ class SingleSelectFieldType(FieldType):
|
|||
random_choice = randint(0, len(select_options) - 1)
|
||||
|
||||
return select_options[random_choice]
|
||||
|
||||
def contains_query(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
return Q()
|
||||
|
||||
option_value_mappings = []
|
||||
option_values = []
|
||||
# We have to query for all option values here as the user table we are
|
||||
# constructing a search query for could be in a different database from the
|
||||
# SingleOption. In such a situation if we just tried to do a cross database
|
||||
# join django would crash, so we must look up the values in a separate query.
|
||||
|
||||
for option in field.select_options.all():
|
||||
option_values.append(option.value)
|
||||
option_value_mappings.append(
|
||||
f"(lower(%s), {int(option.id)})"
|
||||
)
|
||||
|
||||
# If there are no values then there is no way this search could match this
|
||||
# field.
|
||||
if len(option_value_mappings) == 0:
|
||||
return Q()
|
||||
|
||||
convert_rows_select_id_to_value_sql = f"""(
|
||||
SELECT key FROM (
|
||||
VALUES {','.join(option_value_mappings)}
|
||||
) AS values (key, value)
|
||||
WHERE value = "field_{field.id}"
|
||||
)
|
||||
"""
|
||||
|
||||
query = RawSQL(convert_rows_select_id_to_value_sql, params=option_values,
|
||||
output_field=models.CharField())
|
||||
return AnnotatedQ(
|
||||
annotation={
|
||||
f"select_option_value_{field_name}":
|
||||
Coalesce(query, Value(''))
|
||||
},
|
||||
q={f'select_option_value_{field_name}__icontains': value}
|
||||
)
|
||||
|
||||
def get_export_serialized_value(self, row, field_name, cache):
|
||||
return getattr(row, field_name + '_id')
|
||||
|
||||
def set_import_serialized_value(self, row, field_name, value, id_mapping):
|
||||
if not value:
|
||||
return
|
||||
|
||||
setattr(
|
||||
row,
|
||||
field_name + '_id',
|
||||
id_mapping['database_field_select_options'][value]
|
||||
)
|
||||
|
||||
|
||||
class PhoneNumberFieldType(FieldType):
|
||||
"""
|
||||
A simple wrapper around a TextField which ensures any entered data is a
|
||||
simple phone number.
|
||||
|
||||
See `docs/decisions/001-phone-number-field-validation.md` for context
|
||||
as to why the phone number validation was implemented using a simple regex.
|
||||
"""
|
||||
|
||||
type = 'phone_number'
|
||||
model_class = PhoneNumberField
|
||||
|
||||
MAX_PHONE_NUMBER_LENGTH = 100
|
||||
"""
|
||||
According to the E.164 (https://en.wikipedia.org/wiki/E.164) standard for
|
||||
international numbers the max length of an E.164 number without formatting is 15
|
||||
characters. However we allow users to store formatting characters, spaces and
|
||||
expect them to be entering numbers not in the E.164 standard but instead a
|
||||
wide range of local standards which might support longer numbers.
|
||||
This is why we have picked a very generous 100 character length to support heavily
|
||||
formatted local numbers.
|
||||
"""
|
||||
|
||||
PHONE_NUMBER_REGEX = rf'^[0-9NnXx,+._*()#=;/ -]{{1,{MAX_PHONE_NUMBER_LENGTH}}}$'
|
||||
"""
|
||||
Allow common punctuation used in phone numbers and spaces to allow formatting,
|
||||
but otherwise don't allow text as the phone number should work as a link on mobile
|
||||
devices.
|
||||
Duplicated in the frontend code at, please keep in sync:
|
||||
web-frontend/modules/core/utils/string.js#isSimplePhoneNumber
|
||||
"""
|
||||
|
||||
simple_phone_number_validator = RegexValidator(
|
||||
regex=PHONE_NUMBER_REGEX)
|
||||
|
||||
def prepare_value_for_db(self, instance, value):
|
||||
if value == '' or value is None:
|
||||
return ''
|
||||
self.simple_phone_number_validator(value)
|
||||
|
||||
return value
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
return serializers.CharField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
allow_blank=True,
|
||||
validators=[self.simple_phone_number_validator],
|
||||
max_length=self.MAX_PHONE_NUMBER_LENGTH,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def get_model_field(self, instance, **kwargs):
|
||||
return models.CharField(
|
||||
default='',
|
||||
blank=True,
|
||||
null=True,
|
||||
max_length=self.MAX_PHONE_NUMBER_LENGTH,
|
||||
validators=[
|
||||
self.simple_phone_number_validator],
|
||||
**kwargs)
|
||||
|
||||
def random_value(self, instance, fake, cache):
|
||||
return fake.phone_number()
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
if connection.vendor == 'postgresql':
|
||||
return f'''p_in = (
|
||||
case
|
||||
when p_in::text ~* '{self.PHONE_NUMBER_REGEX}'
|
||||
then p_in::text
|
||||
else ''
|
||||
end
|
||||
);'''
|
||||
|
||||
return super().get_alter_column_prepare_new_value(connection, from_field,
|
||||
to_field)
|
||||
|
||||
def contains_query(self, *args):
|
||||
return contains_filter(*args)
|
||||
|
|
|
@ -1,23 +1,21 @@
|
|||
import logging
|
||||
from copy import deepcopy
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connections
|
||||
from django.db.utils import ProgrammingError, DataError
|
||||
from django.conf import settings
|
||||
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from baserow.contrib.database.db.schema import lenient_schema_editor
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from .exceptions import (
|
||||
PrimaryFieldAlreadyExists, CannotDeletePrimaryField, CannotChangeFieldType,
|
||||
FieldDoesNotExist, IncompatiblePrimaryFieldTypeError
|
||||
)
|
||||
from .registries import field_type_registry, field_converter_registry
|
||||
from .models import Field, SelectOption
|
||||
from .registries import field_type_registry, field_converter_registry
|
||||
from .signals import field_created, field_updated, field_deleted
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -159,7 +157,8 @@ class FieldHandler:
|
|||
# If the provided field type does not match with the current one we need to
|
||||
# migrate the field to the new type. Because the type has changed we also need
|
||||
# to remove all view filters.
|
||||
if new_type_name and field_type.type != new_type_name:
|
||||
baserow_field_type_changed = new_type_name and field_type.type != new_type_name
|
||||
if baserow_field_type_changed:
|
||||
field_type = field_type_registry.get(new_type_name)
|
||||
|
||||
if field.primary and not field_type.can_be_primary_field:
|
||||
|
@ -217,6 +216,17 @@ class FieldHandler:
|
|||
connection
|
||||
)
|
||||
else:
|
||||
if baserow_field_type_changed:
|
||||
# If the baserow type has changed we always want to force run any alter
|
||||
# column SQL as otherwise it might not run if the two baserow fields
|
||||
# share the same underlying database column type.
|
||||
force_alter_column = True
|
||||
else:
|
||||
force_alter_column = field_type.force_same_type_alter_column(
|
||||
old_field,
|
||||
field
|
||||
)
|
||||
|
||||
# If no field converter is found we are going to alter the field using the
|
||||
# the lenient schema editor.
|
||||
with lenient_schema_editor(
|
||||
|
@ -225,7 +235,8 @@ class FieldHandler:
|
|||
connection, old_field, field),
|
||||
field_type.get_alter_column_prepare_new_value(
|
||||
connection, old_field, field
|
||||
)
|
||||
),
|
||||
force_alter_column
|
||||
) as schema_editor:
|
||||
try:
|
||||
schema_editor.alter_field(from_model, from_model_field,
|
||||
|
|
|
@ -6,7 +6,6 @@ from baserow.core.mixins import (
|
|||
OrderableMixin, PolymorphicContentTypeMixin, CreatedAndUpdatedOnMixin
|
||||
)
|
||||
|
||||
|
||||
NUMBER_TYPE_INTEGER = 'INTEGER'
|
||||
NUMBER_TYPE_DECIMAL = 'DECIMAL'
|
||||
NUMBER_TYPE_CHOICES = (
|
||||
|
@ -205,9 +204,45 @@ class DateField(Field):
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
date_format = DATE_FORMAT[self.date_format]['format']
|
||||
time_format = DATE_TIME_FORMAT[self.date_time_format]['format']
|
||||
return self._get_format('format')
|
||||
|
||||
def get_psql_format(self):
|
||||
"""
|
||||
Returns the sql datetime format as a string based on the field's properties.
|
||||
This could for example be 'YYYY-MM-DD HH12:MIAM'.
|
||||
|
||||
:return: The sql datetime format based on the field's properties.
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
return self._get_format('sql')
|
||||
|
||||
def get_psql_type(self):
|
||||
"""
|
||||
Returns the postgresql column type used by this field depending on if it is a
|
||||
date or datetime.
|
||||
|
||||
:return: The postgresql column type either 'timestamp' or 'date'
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
return 'timestamp' if self.date_include_time else 'date'
|
||||
|
||||
def get_psql_type_convert_function(self):
|
||||
"""
|
||||
Returns the postgresql function that can be used to coerce another postgresql
|
||||
type to the correct type used by this field.
|
||||
|
||||
:return: The postgresql type conversion function, either 'TO_TIMESTAMP' or
|
||||
'TO_DATE'
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
return 'TO_TIMESTAMP' if self.date_include_time else 'TO_DATE'
|
||||
|
||||
def _get_format(self, format_type):
|
||||
date_format = DATE_FORMAT[self.date_format][format_type]
|
||||
time_format = DATE_TIME_FORMAT[self.date_time_format][format_type]
|
||||
if self.date_include_time:
|
||||
return f'{date_format} {time_format}'
|
||||
else:
|
||||
|
@ -237,10 +272,7 @@ class LinkRowField(Field):
|
|||
"""
|
||||
|
||||
if self.link_row_relation_id is None:
|
||||
last_id = LinkRowField.objects.all().aggregate(
|
||||
largest=models.Max('link_row_relation_id')
|
||||
)['largest'] or 0
|
||||
self.link_row_relation_id = last_id + 1
|
||||
self.link_row_relation_id = self.get_new_relation_id()
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
@ -258,6 +290,13 @@ class LinkRowField(Field):
|
|||
|
||||
return f'database_relation_{self.link_row_relation_id}'
|
||||
|
||||
@staticmethod
|
||||
def get_new_relation_id():
|
||||
last_id = LinkRowField.objects.all().aggregate(
|
||||
largest=models.Max('link_row_relation_id')
|
||||
)['largest'] or 0
|
||||
return last_id + 1
|
||||
|
||||
|
||||
class EmailField(Field):
|
||||
pass
|
||||
|
@ -269,3 +308,7 @@ class FileField(Field):
|
|||
|
||||
class SingleSelectField(Field):
|
||||
pass
|
||||
|
||||
|
||||
class PhoneNumberField(Field):
|
||||
pass
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
from django.db.models import Q
|
||||
|
||||
from baserow.core.registry import (
|
||||
Instance, Registry, ModelInstanceMixin, ModelRegistryMixin,
|
||||
CustomFieldsInstanceMixin, CustomFieldsRegistryMixin, MapAPIExceptionsInstanceMixin,
|
||||
APIUrlsRegistryMixin, APIUrlsInstanceMixin
|
||||
APIUrlsRegistryMixin, APIUrlsInstanceMixin, ImportExportMixin
|
||||
)
|
||||
|
||||
from .exceptions import FieldTypeAlreadyRegistered, FieldTypeDoesNotExist
|
||||
from .models import SelectOption
|
||||
|
||||
|
||||
class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
||||
CustomFieldsInstanceMixin, ModelInstanceMixin, Instance):
|
||||
CustomFieldsInstanceMixin, ModelInstanceMixin, ImportExportMixin,
|
||||
Instance):
|
||||
"""
|
||||
This abstract class represents a custom field type that can be added to the
|
||||
field type registry. It must be extended so customisation can be done. Each field
|
||||
|
@ -87,6 +91,26 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
|
||||
return queryset
|
||||
|
||||
def contains_query(self, field_name, value, model_field, field):
|
||||
"""
|
||||
Returns a Q or AnnotatedQ filter which performs a contains filter over the
|
||||
provided field for this specific type of field.
|
||||
|
||||
:param field_name: The name of the field.
|
||||
:type field_name: str
|
||||
:param value: The value to check if this field contains or not.
|
||||
:type value: str
|
||||
:param model_field: The field's actual django field model instance.
|
||||
:type model_field: models.Field
|
||||
:param field: The related field's instance.
|
||||
:type field: Field
|
||||
:return: A Q or AnnotatedQ filter.
|
||||
given value.
|
||||
:rtype: OptionallyAnnotatedQ
|
||||
"""
|
||||
|
||||
return Q()
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
"""
|
||||
Should return the serializer field based on the custom model instance
|
||||
|
@ -193,6 +217,9 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
"""
|
||||
Can return an SQL statement to convert the `p_in` variable to a readable text
|
||||
format for the new field.
|
||||
This SQL will not be run when converting between two fields of the same
|
||||
baserow type which share the same underlying database column type.
|
||||
If you require this then implement force_same_type_alter_column.
|
||||
|
||||
Example: return "p_in = lower(p_in);"
|
||||
|
||||
|
@ -214,8 +241,11 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
"""
|
||||
Can return a SQL statement to convert the `p_in` variable from text to a
|
||||
desired format for the new field.
|
||||
This SQL will not be run when converting between two fields of the same
|
||||
baserow type which share the same underlying database column type.
|
||||
If you require this then implement force_same_type_alter_column.
|
||||
|
||||
Example when a string is converted to a number, to statement could be:
|
||||
Example: when a string is converted to a number, to statement could be:
|
||||
`REGEXP_REPLACE(p_in, '[^0-9]', '', 'g')` which would remove all non numeric
|
||||
characters. The p_in variable is the old value as a string.
|
||||
|
||||
|
@ -388,6 +418,152 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
|
||||
return None
|
||||
|
||||
def force_same_type_alter_column(self, from_field, to_field):
|
||||
"""
|
||||
Defines whether the sql provided by the get_alter_column_prepare_{old,new}_value
|
||||
hooks should be forced to run when converting between two fields of this field
|
||||
type which have the same database column type.
|
||||
You only need to implement this when when you have validation and/or data
|
||||
manipulation running as part of your alter_column_prepare SQL which must be
|
||||
run even when from_field and to_field are the same Baserow field type and sql
|
||||
column type. If your field has the same baserow type but will convert into
|
||||
different sql column types then the alter sql will be run automatically and you
|
||||
do not need to use this override.
|
||||
|
||||
:param from_field: The old field instance. It is not recommended to call the
|
||||
save function as this will undo part of the changes that have been made.
|
||||
This is just for comparing values.
|
||||
:type from_field: Field
|
||||
:param to_field: The updated field instance.
|
||||
:type: to_field: Field
|
||||
:return: Whether the alter column sql should be forced to run.
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
return False
|
||||
|
||||
def export_serialized(self, field, include_allowed_fields=True):
|
||||
"""
|
||||
Exports the field to a serialized dict that can be imported by the
|
||||
`import_serialized` method. This dict is also JSON serializable.
|
||||
|
||||
:param field: The field instance that must be exported.
|
||||
:type field: Field
|
||||
:param include_allowed_fields: Indicates whether or not the allowed fields
|
||||
should automatically be added to the serialized object.
|
||||
:type include_allowed_fields: bool
|
||||
:return: The exported field in as serialized dict.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
serialized = {
|
||||
'id': field.id,
|
||||
'type': self.type,
|
||||
'name': field.name,
|
||||
'order': field.order,
|
||||
'primary': field.primary
|
||||
}
|
||||
|
||||
if include_allowed_fields:
|
||||
for field_name in self.allowed_fields:
|
||||
serialized[field_name] = getattr(field, field_name)
|
||||
|
||||
if self.can_have_select_options:
|
||||
serialized['select_options'] = [
|
||||
{
|
||||
'id': select_option.id,
|
||||
'value': select_option.value,
|
||||
'color': select_option.color,
|
||||
'order': select_option.order,
|
||||
}
|
||||
for select_option in field.select_options.all()
|
||||
]
|
||||
|
||||
return serialized
|
||||
|
||||
def import_serialized(self, table, serialized_values, id_mapping):
|
||||
"""
|
||||
Imported an exported serialized field dict that was exported via the
|
||||
`export_serialized` method.
|
||||
|
||||
:param table: The table where the field should be added to.
|
||||
:type table: Table
|
||||
:param serialized_values: The exported serialized field values that need to
|
||||
be imported.
|
||||
:type serialized_values: dict
|
||||
:param id_mapping: The map of exported ids to newly created ids that must be
|
||||
updated when a new instance has been created.
|
||||
:type id_mapping: dict
|
||||
:return: The newly created field instance.
|
||||
:rtype: Field
|
||||
"""
|
||||
|
||||
if 'database_fields' not in id_mapping:
|
||||
id_mapping['database_fields'] = {}
|
||||
id_mapping['database_field_select_options'] = {}
|
||||
|
||||
serialized_copy = serialized_values.copy()
|
||||
field_id = serialized_copy.pop('id')
|
||||
serialized_copy.pop('type')
|
||||
select_options = (
|
||||
serialized_copy.pop('select_options')
|
||||
if self.can_have_select_options else
|
||||
[]
|
||||
)
|
||||
field = self.model_class.objects.create(table=table, **serialized_copy)
|
||||
|
||||
id_mapping['database_fields'][field_id] = field.id
|
||||
|
||||
if self.can_have_select_options:
|
||||
for select_option in select_options:
|
||||
select_option_copy = select_option.copy()
|
||||
select_option_id = select_option_copy.pop('id')
|
||||
select_option_object = SelectOption.objects.create(
|
||||
field=field,
|
||||
**select_option_copy
|
||||
)
|
||||
id_mapping['database_field_select_options'][select_option_id] = (
|
||||
select_option_object.id
|
||||
)
|
||||
|
||||
return field
|
||||
|
||||
def get_export_serialized_value(self, row, field_name, cache):
|
||||
"""
|
||||
Exports the value to a the value of a row to serialized value that is also JSON
|
||||
serializable.
|
||||
|
||||
:param row: The row instance that the value must be exported from.
|
||||
:type row: Object
|
||||
:param field_name: The name of the field that must be exported.
|
||||
:type field_name: str
|
||||
:param cache: An in memory dictionary that is shared between all fields while
|
||||
exporting the table. This is for example used by the link row field type
|
||||
to prefetch all relations.
|
||||
:type cache: dict
|
||||
:return: The exported value.
|
||||
:rtype: Object
|
||||
"""
|
||||
|
||||
return getattr(row, field_name)
|
||||
|
||||
def set_import_serialized_value(self, row, field_name, value, id_mapping):
|
||||
"""
|
||||
Sets an imported and serialized value on a row instance.
|
||||
|
||||
:param row: The row instance where the value be set on.
|
||||
:type row: Object
|
||||
:param field_name: The name of the field that must be set.
|
||||
:type field_name: str
|
||||
:param value: The value that must be set.
|
||||
:type value: Object
|
||||
:param id_mapping: The map of exported ids to newly created ids that must be
|
||||
updated when a new instance has been created.
|
||||
:type id_mapping: dict
|
||||
"""
|
||||
|
||||
setattr(row, field_name, value)
|
||||
|
||||
|
||||
class FieldTypeRegistry(APIUrlsRegistryMixin, CustomFieldsRegistryMixin,
|
||||
ModelRegistryMixin, Registry):
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 2.2.11 on 2021-03-09 18:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('database', '0026_auto_20210125_1454'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='gridviewfieldoptions',
|
||||
name='order',
|
||||
field=models.SmallIntegerField(default=32767),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 2.2.11 on 2021-01-25 14:54
|
||||
|
||||
from django.db import migrations, connections
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
DateField = apps.get_model('database', 'DateField')
|
||||
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
with connection.schema_editor() as tables_schema_editor:
|
||||
# We need to stop the transaction because we might need to lock a lot of tables
|
||||
# which could result in an out of memory exception.
|
||||
tables_schema_editor.atomic.__exit__(None, None, None)
|
||||
|
||||
for field in DateField.objects.all():
|
||||
table_name = f'database_table_{field.table.id}'
|
||||
field_name = f'field_{field.id}'
|
||||
tables_schema_editor.execute(
|
||||
f"""
|
||||
UPDATE {table_name} SET {field_name} = '0001-01-01'::date
|
||||
WHERE {field_name} < '0001-01-01'::date
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('database', '0027_gridviewfieldoptions_order'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forward, migrations.RunPython.noop),
|
||||
]
|
|
@ -0,0 +1,29 @@
|
|||
# Generated by Django 2.2.11 on 2021-03-15 09:28
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('database', '0028_fix_negative_date'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='PhoneNumberField',
|
||||
fields=[
|
||||
('field_ptr', models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True, primary_key=True,
|
||||
serialize=False,
|
||||
to='database.Field'
|
||||
)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('database.field',),
|
||||
),
|
||||
]
|
|
@ -4,7 +4,7 @@ from .table.models import Table
|
|||
from .views.models import View, GridView, GridViewFieldOptions, ViewFilter
|
||||
from .fields.models import (
|
||||
Field, TextField, NumberField, LongTextField, BooleanField, DateField, LinkRowField,
|
||||
URLField, EmailField
|
||||
URLField, EmailField, PhoneNumberField
|
||||
)
|
||||
from .tokens.models import Token, TokenPermission
|
||||
|
||||
|
@ -13,7 +13,7 @@ __all__ = [
|
|||
'Table',
|
||||
'View', 'GridView', 'GridViewFieldOptions', 'ViewFilter',
|
||||
'Field', 'TextField', 'NumberField', 'LongTextField', 'BooleanField', 'DateField',
|
||||
'LinkRowField', 'URLField', 'EmailField',
|
||||
'LinkRowField', 'URLField', 'EmailField', 'PhoneNumberField',
|
||||
'Token', 'TokenPermission'
|
||||
]
|
||||
|
||||
|
|
|
@ -241,7 +241,8 @@ class TableHandler:
|
|||
|
||||
def delete_table(self, user, table):
|
||||
"""
|
||||
Deletes an existing table instance.
|
||||
Deletes an existing table instance if the user has access to the related group.
|
||||
The table deleted signals are also fired.
|
||||
|
||||
:param user: The user on whose behalf the table is deleted.
|
||||
:type user: User
|
||||
|
@ -256,12 +257,16 @@ class TableHandler:
|
|||
table.database.group.has_user(user, raise_error=True)
|
||||
table_id = table.id
|
||||
|
||||
# Delete the table schema from the database.
|
||||
self._delete_table(table)
|
||||
|
||||
table_deleted.send(self, table_id=table_id, table=table, user=user)
|
||||
|
||||
def _delete_table(self, table):
|
||||
"""Deletes the table schema and instance."""
|
||||
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
with connection.schema_editor() as schema_editor:
|
||||
model = table.get_model()
|
||||
schema_editor.delete_model(model)
|
||||
|
||||
table.delete()
|
||||
|
||||
table_deleted.send(self, table_id=table_id, table=table, user=user)
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
import re
|
||||
from decimal import Decimal, DecimalException
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
|
||||
from baserow.core.mixins import OrderableMixin, CreatedAndUpdatedOnMixin
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
OrderByFieldNotFound, OrderByFieldNotPossible, FilterFieldNotFound
|
||||
)
|
||||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
from baserow.contrib.database.fields.field_filters import FilterBuilder, \
|
||||
FILTER_TYPE_AND, FILTER_TYPE_OR
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.views.models import FILTER_TYPE_AND, FILTER_TYPE_OR
|
||||
from baserow.contrib.database.views.exceptions import ViewFilterTypeNotAllowedForField
|
||||
|
||||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
from baserow.core.mixins import OrderableMixin, CreatedAndUpdatedOnMixin
|
||||
|
||||
deconstruct_filter_key_regex = re.compile(
|
||||
r'filter__field_([0-9]+)__([a-zA-Z0-9_]*)$'
|
||||
|
@ -41,9 +40,10 @@ class TableModelQuerySet(models.QuerySet):
|
|||
|
||||
def search_all_fields(self, search):
|
||||
"""
|
||||
Searches very broad in all supported fields with the given search query. If the
|
||||
primary key value matches then that result would be returned and if a char/text
|
||||
field contains the search query then that result would be returned.
|
||||
Performs a very broad search across all supported fields with the given search
|
||||
query. If the primary key value matches then that result will be returned
|
||||
otherwise all field types other than link row and boolean fields are currently
|
||||
searched.
|
||||
|
||||
:param search: The search query.
|
||||
:type search: str
|
||||
|
@ -51,39 +51,22 @@ class TableModelQuerySet(models.QuerySet):
|
|||
:rtype: QuerySet
|
||||
"""
|
||||
|
||||
search_queries = models.Q()
|
||||
excluded = ('order', 'created_on', 'updated_on')
|
||||
filter_builder = FilterBuilder(filter_type=FILTER_TYPE_OR).filter(
|
||||
Q(id__contains=search)
|
||||
)
|
||||
for field_object in self.model._field_objects.values():
|
||||
field_name = field_object['name']
|
||||
model_field = self.model._meta.get_field(field_name)
|
||||
|
||||
for field in self.model._meta.get_fields():
|
||||
if field.name in excluded:
|
||||
continue
|
||||
sub_filter = field_object['type'].contains_query(
|
||||
field_name,
|
||||
search,
|
||||
model_field,
|
||||
field_object['field']
|
||||
)
|
||||
filter_builder.filter(sub_filter)
|
||||
|
||||
if (
|
||||
isinstance(field, models.CharField) or
|
||||
isinstance(field, models.TextField)
|
||||
):
|
||||
search_queries = search_queries | models.Q(**{
|
||||
f'{field.name}__icontains': search
|
||||
})
|
||||
elif (
|
||||
isinstance(field, models.AutoField) or
|
||||
isinstance(field, models.IntegerField)
|
||||
):
|
||||
try:
|
||||
search_queries = search_queries | models.Q(**{
|
||||
f'{field.name}': int(search)
|
||||
})
|
||||
except ValueError:
|
||||
pass
|
||||
elif isinstance(field, models.DecimalField):
|
||||
try:
|
||||
search_queries = search_queries | models.Q(**{
|
||||
f'{field.name}': Decimal(search)
|
||||
})
|
||||
except (ValueError, DecimalException):
|
||||
pass
|
||||
|
||||
return self.filter(search_queries) if len(search_queries) > 0 else self
|
||||
return filter_builder.apply_to_queryset(self)
|
||||
|
||||
def order_by_fields_string(self, order_string):
|
||||
"""
|
||||
|
@ -165,7 +148,7 @@ class TableModelQuerySet(models.QuerySet):
|
|||
if filter_type not in [FILTER_TYPE_AND, FILTER_TYPE_OR]:
|
||||
raise ValueError(f'Unknown filter type {filter_type}.')
|
||||
|
||||
q_filters = Q()
|
||||
filter_builder = FilterBuilder(filter_type=filter_type)
|
||||
|
||||
for key, values in filter_object.items():
|
||||
matches = deconstruct_filter_key_regex.match(key)
|
||||
|
@ -180,8 +163,9 @@ class TableModelQuerySet(models.QuerySet):
|
|||
field_id, f'Field {field_id} does not exist.'
|
||||
)
|
||||
|
||||
field_name = self.model._field_objects[field_id]['name']
|
||||
field_type = self.model._field_objects[field_id]['type'].type
|
||||
field_object = self.model._field_objects[field_id]
|
||||
field_name = field_object['name']
|
||||
field_type = field_object['type'].type
|
||||
model_field = self.model._meta.get_field(field_name)
|
||||
view_filter_type = view_filter_type_registry.get(matches[2])
|
||||
|
||||
|
@ -195,27 +179,16 @@ class TableModelQuerySet(models.QuerySet):
|
|||
values = [values]
|
||||
|
||||
for value in values:
|
||||
q_filter = view_filter_type.get_filter(
|
||||
field_name,
|
||||
value,
|
||||
model_field
|
||||
filter_builder.filter(
|
||||
view_filter_type.get_filter(
|
||||
field_name,
|
||||
value,
|
||||
model_field,
|
||||
field_object['field']
|
||||
)
|
||||
)
|
||||
|
||||
view_filter_annotation = view_filter_type.get_annotation(
|
||||
field_name,
|
||||
value
|
||||
)
|
||||
if view_filter_annotation:
|
||||
self = self.annotate(**view_filter_annotation)
|
||||
|
||||
# Depending on filter type we are going to combine the Q either as
|
||||
# AND or as OR.
|
||||
if filter_type == FILTER_TYPE_AND:
|
||||
q_filters &= q_filter
|
||||
elif filter_type == FILTER_TYPE_OR:
|
||||
q_filters |= q_filter
|
||||
|
||||
return self.filter(q_filters)
|
||||
return filter_builder.apply_to_queryset(self)
|
||||
|
||||
|
||||
class TableModelManager(models.Manager):
|
||||
|
|
|
@ -1,24 +1,24 @@
|
|||
from django.db.models import Q, F
|
||||
from django.db.models import F
|
||||
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.exceptions import FieldNotInTable
|
||||
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from .exceptions import (
|
||||
ViewDoesNotExist, UnrelatedFieldError, ViewFilterDoesNotExist,
|
||||
ViewFilterNotSupported, ViewFilterTypeNotAllowedForField, ViewSortDoesNotExist,
|
||||
ViewSortNotSupported, ViewSortFieldAlreadyExist, ViewSortFieldNotSupported
|
||||
)
|
||||
from .registries import view_type_registry, view_filter_type_registry
|
||||
from .models import (
|
||||
View, GridViewFieldOptions, ViewFilter, ViewSort, FILTER_TYPE_AND, FILTER_TYPE_OR
|
||||
View, GridViewFieldOptions, ViewFilter, ViewSort
|
||||
)
|
||||
from .registries import view_type_registry, view_filter_type_registry
|
||||
from .signals import (
|
||||
view_created, view_updated, view_deleted, view_filter_created, view_filter_updated,
|
||||
view_filter_deleted, view_sort_created, view_sort_updated, view_sort_deleted,
|
||||
grid_view_field_options_updated
|
||||
)
|
||||
from baserow.contrib.database.fields.field_filters import FilterBuilder
|
||||
|
||||
|
||||
class ViewHandler:
|
||||
|
@ -160,7 +160,7 @@ class ViewHandler:
|
|||
:param user: The user on whose behalf the request is made.
|
||||
:type user: User
|
||||
:param grid_view: The grid view for which the field options need to be updated.
|
||||
:type grid_view: Model
|
||||
:type grid_view: GridView
|
||||
:param field_options: A dict with the field ids as the key and a dict
|
||||
containing the values that need to be updated as value.
|
||||
:type field_options: dict
|
||||
|
@ -236,7 +236,7 @@ class ViewHandler:
|
|||
if view.filters_disabled:
|
||||
return queryset
|
||||
|
||||
q_filters = Q()
|
||||
filter_builder = FilterBuilder(filter_type=view.filter_type)
|
||||
|
||||
for view_filter in view.viewfilter_set.all():
|
||||
# If the to be filtered field is not present in the `_field_objects` we
|
||||
|
@ -245,32 +245,21 @@ class ViewHandler:
|
|||
raise ValueError(f'The table model does not contain field '
|
||||
f'{view_filter.field_id}.')
|
||||
|
||||
field_name = model._field_objects[view_filter.field_id]['name']
|
||||
field_object = model._field_objects[view_filter.field_id]
|
||||
field_name = field_object['name']
|
||||
model_field = model._meta.get_field(field_name)
|
||||
view_filter_type = view_filter_type_registry.get(view_filter.type)
|
||||
q_filter = view_filter_type.get_filter(
|
||||
field_name,
|
||||
view_filter.value,
|
||||
model_field
|
||||
|
||||
filter_builder.filter(
|
||||
view_filter_type.get_filter(
|
||||
field_name,
|
||||
view_filter.value,
|
||||
model_field,
|
||||
field_object['field']
|
||||
)
|
||||
)
|
||||
|
||||
view_filter_annotation = view_filter_type.get_annotation(
|
||||
field_name,
|
||||
view_filter.value
|
||||
)
|
||||
if view_filter_annotation:
|
||||
queryset = queryset.annotate(**view_filter_annotation)
|
||||
|
||||
# Depending on filter type we are going to combine the Q either as AND or
|
||||
# as OR.
|
||||
if view.filter_type == FILTER_TYPE_AND:
|
||||
q_filters &= q_filter
|
||||
elif view.filter_type == FILTER_TYPE_OR:
|
||||
q_filters |= q_filter
|
||||
|
||||
queryset = queryset.filter(q_filters)
|
||||
|
||||
return queryset
|
||||
return filter_builder.apply_to_queryset(queryset)
|
||||
|
||||
def get_filter(self, user, view_filter_id, base_queryset=None):
|
||||
"""
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
|
||||
from baserow.contrib.database.fields.field_filters import (
|
||||
FILTER_TYPE_AND, FILTER_TYPE_OR
|
||||
)
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.core.mixins import (
|
||||
OrderableMixin, PolymorphicContentTypeMixin, CreatedAndUpdatedOnMixin
|
||||
)
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
|
||||
|
||||
FILTER_TYPE_AND = 'AND'
|
||||
FILTER_TYPE_OR = 'OR'
|
||||
FILTER_TYPES = (
|
||||
(FILTER_TYPE_AND, 'And'),
|
||||
(FILTER_TYPE_OR, 'Or')
|
||||
|
@ -159,6 +159,9 @@ class GridViewFieldOptions(models.Model):
|
|||
# abstraction in the web-frontend.
|
||||
width = models.PositiveIntegerField(default=200)
|
||||
hidden = models.BooleanField(default=False)
|
||||
# The default value is the maximum value of the small integer field because a newly
|
||||
# created field must always be last.
|
||||
order = models.SmallIntegerField(default=32767)
|
||||
|
||||
class Meta:
|
||||
ordering = ('field_id',)
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
from baserow.core.registry import (
|
||||
Instance, Registry, ModelInstanceMixin, ModelRegistryMixin,
|
||||
CustomFieldsInstanceMixin, CustomFieldsRegistryMixin, APIUrlsRegistryMixin,
|
||||
APIUrlsInstanceMixin
|
||||
APIUrlsInstanceMixin, ImportExportMixin
|
||||
)
|
||||
from .exceptions import (
|
||||
ViewTypeAlreadyRegistered, ViewTypeDoesNotExist, ViewFilterTypeAlreadyRegistered,
|
||||
ViewFilterTypeDoesNotExist
|
||||
)
|
||||
from baserow.contrib.database.fields.field_filters import OptionallyAnnotatedQ
|
||||
|
||||
|
||||
class ViewType(APIUrlsInstanceMixin, CustomFieldsInstanceMixin, ModelInstanceMixin,
|
||||
Instance):
|
||||
ImportExportMixin, Instance):
|
||||
"""
|
||||
This abstract class represents a custom view type that can be added to the
|
||||
view type registry. It must be extended so customisation can be done. Each view type
|
||||
|
@ -57,6 +58,120 @@ class ViewType(APIUrlsInstanceMixin, CustomFieldsInstanceMixin, ModelInstanceMix
|
|||
sort to the view.
|
||||
"""
|
||||
|
||||
def export_serialized(self, view):
|
||||
"""
|
||||
Exports the view to a serialized dict that can be imported by the
|
||||
`import_serialized` method. This dict is also JSON serializable.
|
||||
|
||||
:param view: The view instance that must be exported.
|
||||
:type view: View
|
||||
:return: The exported view.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
serialized = {
|
||||
'id': view.id,
|
||||
'type': self.type,
|
||||
'name': view.name,
|
||||
'order': view.order
|
||||
}
|
||||
|
||||
if self.can_filter:
|
||||
serialized['filter_type'] = view.filter_type
|
||||
serialized['filters_disabled'] = view.filters_disabled
|
||||
serialized['filters'] = [
|
||||
{
|
||||
'id': view_filter.id,
|
||||
'field_id': view_filter.field_id,
|
||||
'type': view_filter.type,
|
||||
'value': view_filter_type_registry.get(
|
||||
view_filter.type
|
||||
).get_export_serialized_value(view_filter.value)
|
||||
}
|
||||
for view_filter in view.viewfilter_set.all()
|
||||
]
|
||||
|
||||
if self.can_sort:
|
||||
serialized['sortings'] = [
|
||||
{
|
||||
'id': sort.id,
|
||||
'field_id': sort.field_id,
|
||||
'order': sort.order
|
||||
}
|
||||
for sort in view.viewsort_set.all()
|
||||
]
|
||||
|
||||
return serialized
|
||||
|
||||
def import_serialized(self, table, serialized_values, id_mapping):
|
||||
"""
|
||||
Imported an exported serialized view dict that was exported via the
|
||||
`export_serialized` method. Note that all the fields must be imported first
|
||||
because we depend on the new field id to be in the mapping.
|
||||
|
||||
:param table: The table where the view should be added to.
|
||||
:type table: Table
|
||||
:param serialized_values: The exported serialized view values that need to
|
||||
be imported.
|
||||
:type serialized_values: dict
|
||||
:param id_mapping: The map of exported ids to newly created ids that must be
|
||||
updated when a new instance has been created.
|
||||
:type id_mapping: dict
|
||||
:return: The newly created view instance.
|
||||
:rtype: View
|
||||
"""
|
||||
|
||||
from .models import ViewFilter, ViewSort
|
||||
|
||||
if 'database_views' not in id_mapping:
|
||||
id_mapping['database_views'] = {}
|
||||
id_mapping['database_view_filters'] = {}
|
||||
id_mapping['database_view_sortings'] = {}
|
||||
|
||||
serialized_copy = serialized_values.copy()
|
||||
view_id = serialized_copy.pop('id')
|
||||
serialized_copy.pop('type')
|
||||
filters = serialized_copy.pop('filters') if self.can_filter else []
|
||||
sortings = serialized_copy.pop('sortings') if self.can_sort else []
|
||||
view = self.model_class.objects.create(table=table, **serialized_copy)
|
||||
id_mapping['database_views'][view_id] = view.id
|
||||
|
||||
if self.can_filter:
|
||||
for view_filter in filters:
|
||||
view_filter_type = view_filter_type_registry.get(view_filter['type'])
|
||||
view_filter_copy = view_filter.copy()
|
||||
view_filter_id = view_filter_copy.pop('id')
|
||||
view_filter_copy['field_id'] = (
|
||||
id_mapping['database_fields'][view_filter_copy['field_id']]
|
||||
)
|
||||
view_filter_copy['value'] = (
|
||||
view_filter_type.set_import_serialized_value(
|
||||
view_filter_copy['value'],
|
||||
id_mapping
|
||||
)
|
||||
)
|
||||
view_filter_object = ViewFilter.objects.create(
|
||||
view=view,
|
||||
**view_filter_copy
|
||||
)
|
||||
id_mapping['database_view_filters'][view_filter_id] = (
|
||||
view_filter_object.id
|
||||
)
|
||||
|
||||
if self.can_sort:
|
||||
for view_sort in sortings:
|
||||
view_sort_copy = view_sort.copy()
|
||||
view_sort_id = view_sort_copy.pop('id')
|
||||
view_sort_copy['field_id'] = (
|
||||
id_mapping['database_fields'][view_sort_copy['field_id']]
|
||||
)
|
||||
view_sort_object = ViewSort.objects.create(view=view, **view_sort_copy)
|
||||
id_mapping['database_view_sortings'][view_sort_id] = (
|
||||
view_sort_object.id
|
||||
)
|
||||
|
||||
return view
|
||||
|
||||
|
||||
class ViewTypeRegistry(APIUrlsRegistryMixin, CustomFieldsRegistryMixin,
|
||||
ModelRegistryMixin, Registry):
|
||||
|
@ -102,42 +217,54 @@ class ViewFilterType(Instance):
|
|||
can be used in combination with the field.
|
||||
"""
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field) -> OptionallyAnnotatedQ:
|
||||
"""
|
||||
Should return a Q object containing the requested filtering based on the
|
||||
provided arguments.
|
||||
Should return either a Q object or and AnnotatedQ containing the requested
|
||||
filtering and annotations based on the provided arguments.
|
||||
|
||||
:param field_name: The name of the field that needs to be filtered.
|
||||
:type field_name: str
|
||||
:param value: The value that the field must be compared to.
|
||||
:type value: str
|
||||
:param model_field: The field extracted form the model.
|
||||
:param model_field: The field extracted from the model.
|
||||
:type model_field: models.Field
|
||||
:return: The Q object that does the filtering. This will later be added to the
|
||||
queryset in the correct way.
|
||||
:rtype: Q
|
||||
:param field: The instance of the underlying baserow field.
|
||||
:type field: Field
|
||||
:return: A Q or AnnotatedQ filter for this specific field, which will be then
|
||||
later combined with other filters to generate the final total view filter.
|
||||
"""
|
||||
|
||||
raise NotImplementedError('Each must have his own get_filter method.')
|
||||
|
||||
def get_annotation(self, field_name, value):
|
||||
def get_export_serialized_value(self, value) -> str:
|
||||
"""
|
||||
Optional method allowing this ViewFilterType to annotate the queryset prior to
|
||||
the application of any Q filters returned by ViewFilterType.get_filter.
|
||||
This method is called before the filter value is exported. Here it can
|
||||
optionally be modified.
|
||||
|
||||
Should return a dictionary which can be unpacked into an annotate call or None
|
||||
if you do not wish any annotation to be applied by your filter.
|
||||
|
||||
:param field_name: The name of the field that needs to be filtered.
|
||||
:type field_name: str
|
||||
:param value: The value that the field must be compared to.
|
||||
:param value: The original value.
|
||||
:type value: str
|
||||
:return: The dict object that will be unpacked into an annotate call or None if
|
||||
no annotation needs to be done.
|
||||
:rtype: None or dict
|
||||
:return: The updated value.
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
return None
|
||||
return value
|
||||
|
||||
def set_import_serialized_value(self, value, id_mapping) -> str:
|
||||
"""
|
||||
This method is called before a field is imported. It can optionally be
|
||||
modified. If the value for example points to a field or select option id, it
|
||||
can be replaced with the correct value by doing a lookup in the id_mapping.
|
||||
|
||||
:param value: The original exported value.
|
||||
:type value: str
|
||||
:param id_mapping: The map of exported ids to newly created ids that must be
|
||||
updated when a new instance has been created.
|
||||
:type id_mapping: dict
|
||||
:return: The new value that will be imported.
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class ViewFilterTypeRegistry(Registry):
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
from math import floor, ceil
|
||||
from pytz import timezone
|
||||
from decimal import Decimal
|
||||
from math import floor, ceil
|
||||
|
||||
from dateutil import parser
|
||||
from dateutil.parser import ParserError
|
||||
|
||||
from django.db.models import Q, IntegerField, BooleanField
|
||||
from django.db.models.expressions import RawSQL
|
||||
from django.db.models.fields.related import ManyToManyField, ForeignKey
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
from django.db.models import Q, IntegerField, BooleanField
|
||||
from django.db.models.fields.related import ManyToManyField, ForeignKey
|
||||
from pytz import timezone
|
||||
|
||||
from baserow.contrib.database.fields.field_filters import filename_contains_filter, \
|
||||
OptionallyAnnotatedQ
|
||||
from baserow.contrib.database.fields.field_types import (
|
||||
TextFieldType, LongTextFieldType, URLFieldType, NumberFieldType, DateFieldType,
|
||||
LinkRowFieldType, BooleanFieldType, EmailFieldType, FileFieldType,
|
||||
SingleSelectFieldType
|
||||
SingleSelectFieldType, PhoneNumberFieldType
|
||||
)
|
||||
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from .registries import ViewFilterType
|
||||
|
||||
|
||||
|
@ -39,10 +39,11 @@ class EqualViewFilterType(ViewFilterType):
|
|||
URLFieldType.type,
|
||||
NumberFieldType.type,
|
||||
BooleanFieldType.type,
|
||||
EmailFieldType.type
|
||||
EmailFieldType.type,
|
||||
PhoneNumberFieldType.type
|
||||
]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
|
@ -75,44 +76,8 @@ class FilenameContainsViewFilterType(ViewFilterType):
|
|||
FileFieldType.type
|
||||
]
|
||||
|
||||
def get_annotation(self, field_name, value):
|
||||
value = value.strip()
|
||||
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
return None
|
||||
|
||||
# It is not possible to use Django's ORM to query for if one item in a JSONB
|
||||
# list has has a key which contains a specified value.
|
||||
#
|
||||
# The closest thing the Django ORM provides is:
|
||||
# queryset.filter(your_json_field__contains=[{"key":"value"}])
|
||||
# However this is an exact match, so in the above example [{"key":"value_etc"}]
|
||||
# would not match the filter.
|
||||
#
|
||||
# Instead we have to resort to RawSQL to use various built in PostgreSQL JSON
|
||||
# Array manipulation functions to be able to 'iterate' over a JSONB list
|
||||
# performing `like` on individual keys in said list.
|
||||
num_files_with_name_like_value = f"""
|
||||
EXISTS(
|
||||
SELECT attached_files ->> 'visible_name'
|
||||
FROM JSONB_ARRAY_ELEMENTS("{field_name}") as attached_files
|
||||
WHERE UPPER(attached_files ->> 'visible_name') LIKE UPPER(%s)
|
||||
)
|
||||
"""
|
||||
query = RawSQL(num_files_with_name_like_value, params=[f"%{value}%"],
|
||||
output_field=BooleanField())
|
||||
return {f"{field_name}_matches_visible_names": query}
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
value = value.strip()
|
||||
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
return Q()
|
||||
|
||||
# Check if the model_field has a file which matches the provided filter value.
|
||||
return Q(**{f'{field_name}_matches_visible_names': True})
|
||||
def get_filter(self, *args):
|
||||
return filename_contains_filter(*args)
|
||||
|
||||
|
||||
class ContainsViewFilterType(ViewFilterType):
|
||||
|
@ -126,24 +91,16 @@ class ContainsViewFilterType(ViewFilterType):
|
|||
TextFieldType.type,
|
||||
LongTextFieldType.type,
|
||||
URLFieldType.type,
|
||||
EmailFieldType.type
|
||||
EmailFieldType.type,
|
||||
PhoneNumberFieldType.type,
|
||||
DateFieldType.type,
|
||||
SingleSelectFieldType.type,
|
||||
NumberFieldType.type,
|
||||
]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
value = value.strip()
|
||||
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
return Q()
|
||||
|
||||
# Check if the model_field accepts the value.
|
||||
try:
|
||||
model_field.get_prep_value(value)
|
||||
return Q(**{f'{field_name}__icontains': value})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return Q()
|
||||
def get_filter(self, field_name, value, model_field, field) -> OptionallyAnnotatedQ:
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
return field_type.contains_query(field_name, value, model_field, field)
|
||||
|
||||
|
||||
class ContainsNotViewFilterType(NotViewFilterTypeMixin, ContainsViewFilterType):
|
||||
|
@ -160,7 +117,7 @@ class HigherThanViewFilterType(ViewFilterType):
|
|||
type = 'higher_than'
|
||||
compatible_field_types = [NumberFieldType.type]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
|
@ -191,7 +148,7 @@ class LowerThanViewFilterType(ViewFilterType):
|
|||
type = 'lower_than'
|
||||
compatible_field_types = [NumberFieldType.type]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
|
@ -222,7 +179,7 @@ class DateEqualViewFilterType(ViewFilterType):
|
|||
type = 'date_equal'
|
||||
compatible_field_types = [DateFieldType.type]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
"""
|
||||
Parses the provided value string and converts it to an aware datetime object.
|
||||
That object will used to make a comparison with the provided field name.
|
||||
|
@ -267,7 +224,7 @@ class SingleSelectEqualViewFilterType(ViewFilterType):
|
|||
type = 'single_select_equal'
|
||||
compatible_field_types = [SingleSelectFieldType.type]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
|
||||
if value == '':
|
||||
|
@ -279,6 +236,14 @@ class SingleSelectEqualViewFilterType(ViewFilterType):
|
|||
except Exception:
|
||||
return Q()
|
||||
|
||||
def set_import_serialized_value(self, value, id_mapping):
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
return ''
|
||||
|
||||
return str(id_mapping['database_field_select_options'].get(value, ''))
|
||||
|
||||
|
||||
class SingleSelectNotEqualViewFilterType(NotViewFilterTypeMixin,
|
||||
SingleSelectEqualViewFilterType):
|
||||
|
@ -296,7 +261,7 @@ class BooleanViewFilterType(ViewFilterType):
|
|||
type = 'boolean'
|
||||
compatible_field_types = [BooleanFieldType.type]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
value = value.strip().lower()
|
||||
value = value in [
|
||||
'y',
|
||||
|
@ -335,10 +300,11 @@ class EmptyViewFilterType(ViewFilterType):
|
|||
LinkRowFieldType.type,
|
||||
EmailFieldType.type,
|
||||
FileFieldType.type,
|
||||
SingleSelectFieldType.type
|
||||
SingleSelectFieldType.type,
|
||||
PhoneNumberFieldType.type
|
||||
]
|
||||
|
||||
def get_filter(self, field_name, value, model_field):
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
# If the model_field is a ManyToMany field we only have to check if it is None.
|
||||
if (
|
||||
isinstance(model_field, ManyToManyField) or
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from django.urls import path, include
|
||||
|
||||
from .registries import ViewType
|
||||
from .models import GridView
|
||||
from .models import GridView, GridViewFieldOptions
|
||||
|
||||
|
||||
class GridViewType(ViewType):
|
||||
|
@ -14,3 +14,51 @@ class GridViewType(ViewType):
|
|||
return [
|
||||
path('grid/', include(api_urls, namespace=self.type)),
|
||||
]
|
||||
|
||||
def export_serialized(self, grid):
|
||||
"""
|
||||
Adds the serialized grid view options to the exported dict.
|
||||
"""
|
||||
|
||||
serialized = super().export_serialized(grid)
|
||||
|
||||
serialized_field_options = []
|
||||
for field_option in grid.get_field_options():
|
||||
serialized_field_options.append({
|
||||
'id': field_option.id,
|
||||
'field_id': field_option.field_id,
|
||||
'width': field_option.width,
|
||||
'hidden': field_option.hidden,
|
||||
'order': field_option.order
|
||||
})
|
||||
|
||||
serialized['field_options'] = serialized_field_options
|
||||
return serialized
|
||||
|
||||
def import_serialized(self, table, serialized_values, id_mapping):
|
||||
"""
|
||||
Imports the serialized grid view field options.
|
||||
"""
|
||||
|
||||
serialized_copy = serialized_values.copy()
|
||||
field_options = serialized_copy.pop('field_options')
|
||||
grid_view = super().import_serialized(table, serialized_copy, id_mapping)
|
||||
|
||||
if 'database_grid_view_field_options' not in id_mapping:
|
||||
id_mapping['database_grid_view_field_options'] = {}
|
||||
|
||||
for field_option in field_options:
|
||||
field_option_copy = field_option.copy()
|
||||
field_option_id = field_option_copy.pop('id')
|
||||
field_option_copy['field_id'] = (
|
||||
id_mapping['database_fields'][field_option['field_id']]
|
||||
)
|
||||
field_option_object = GridViewFieldOptions.objects.create(
|
||||
grid_view=grid_view,
|
||||
**field_option_copy
|
||||
)
|
||||
id_mapping['database_grid_view_field_options'][field_option_id] = (
|
||||
field_option_object.id
|
||||
)
|
||||
|
||||
return grid_view
|
||||
|
|
|
@ -91,3 +91,16 @@ class GroupInvitationEmailMismatch(Exception):
|
|||
"""
|
||||
Raised when the group invitation email is not the expected email address.
|
||||
"""
|
||||
|
||||
|
||||
class TemplateDoesNotExist(Exception):
|
||||
"""
|
||||
Raised when the requested template does not exist in the database.
|
||||
"""
|
||||
|
||||
|
||||
class TemplateFileDoesNotExist(Exception):
|
||||
"""
|
||||
Raised when the JSON template file does not exist in the
|
||||
APPLICATION_TEMPLATE_DIRS directory.
|
||||
"""
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
import os
|
||||
import json
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse, urljoin
|
||||
from itsdangerous import URLSafeSerializer
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db.models import Q, Count
|
||||
|
||||
from baserow.core.user.utils import normalize_email_address
|
||||
|
||||
from .models import (
|
||||
Settings, Group, GroupUser, GroupInvitation, Application,
|
||||
GROUP_USER_PERMISSION_CHOICES, GROUP_USER_PERMISSION_ADMIN
|
||||
Settings, Group, GroupUser, GroupInvitation, Application, Template,
|
||||
TemplateCategory, GROUP_USER_PERMISSION_CHOICES, GROUP_USER_PERMISSION_ADMIN
|
||||
)
|
||||
from .exceptions import (
|
||||
GroupDoesNotExist, ApplicationDoesNotExist, BaseURLHostnameNotAllowed,
|
||||
GroupInvitationEmailMismatch, GroupInvitationDoesNotExist, GroupUserDoesNotExist,
|
||||
GroupUserAlreadyExists, IsNotAdminError
|
||||
GroupUserAlreadyExists, IsNotAdminError, TemplateFileDoesNotExist,
|
||||
TemplateDoesNotExist
|
||||
)
|
||||
from .utils import extract_allowed, set_allowed_attrs
|
||||
from .registries import application_type_registry
|
||||
|
@ -23,6 +30,9 @@ from .signals import (
|
|||
from .emails import GroupInvitationEmail
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class CoreHandler:
|
||||
def get_settings(self):
|
||||
"""
|
||||
|
@ -159,17 +169,22 @@ class CoreHandler:
|
|||
group_id = group.id
|
||||
group_users = list(group.users.all())
|
||||
|
||||
self._delete_group(group)
|
||||
|
||||
group_deleted.send(self, group_id=group_id, group=group,
|
||||
group_users=group_users, user=user)
|
||||
|
||||
def _delete_group(self, group):
|
||||
"""Deletes the provided group."""
|
||||
|
||||
# Select all the applications so we can delete them via the handler which is
|
||||
# needed in order to call the pre_delete method for each application.
|
||||
applications = group.application_set.all().select_related('group')
|
||||
for application in applications:
|
||||
self.delete_application(user, application)
|
||||
self._delete_application(application)
|
||||
|
||||
group.delete()
|
||||
|
||||
group_deleted.send(self, group_id=group_id, group=group,
|
||||
group_users=group_users, user=user)
|
||||
|
||||
def order_groups(self, user, group_ids):
|
||||
"""
|
||||
Changes the order of groups for a user.
|
||||
|
@ -617,7 +632,8 @@ class CoreHandler:
|
|||
|
||||
def delete_application(self, user, application):
|
||||
"""
|
||||
Deletes an existing application instance.
|
||||
Deletes an existing application instance if the user has access to the
|
||||
related group. The `application_deleted` signal is also called.
|
||||
|
||||
:param user: The user on whose behalf the application is deleted.
|
||||
:type user: User
|
||||
|
@ -632,11 +648,251 @@ class CoreHandler:
|
|||
application.group.has_user(user, raise_error=True)
|
||||
|
||||
application_id = application.id
|
||||
application = application.specific
|
||||
application_type = application_type_registry.get_by_model(application)
|
||||
application_type.pre_delete(user, application)
|
||||
|
||||
application.delete()
|
||||
application = self._delete_application(application)
|
||||
|
||||
application_deleted.send(self, application_id=application_id,
|
||||
application=application, user=user)
|
||||
|
||||
def _delete_application(self, application):
|
||||
"""Deletes an application and the related relations in the correct way."""
|
||||
|
||||
application = application.specific
|
||||
application_type = application_type_registry.get_by_model(application)
|
||||
application_type.pre_delete(application)
|
||||
application.delete()
|
||||
return application
|
||||
|
||||
def export_group_applications(self, group):
|
||||
"""
|
||||
Exports the applications of a group to a list. They can later be imported via
|
||||
the `import_application_to_group` method. The result can be serialized to JSON.
|
||||
|
||||
@TODO look into speed optimizations by streaming to a JSON file instead of
|
||||
generating the entire file in memory.
|
||||
|
||||
:param group: The group of which the applications must be exported.
|
||||
:type group: Group
|
||||
:return: A list containing the exported applications.
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
exported_applications = []
|
||||
applications = group.application_set.all()
|
||||
for a in applications:
|
||||
application = a.specific
|
||||
application_type = application_type_registry.get_by_model(application)
|
||||
exported_application = application_type.export_serialized(application)
|
||||
exported_applications.append(exported_application)
|
||||
|
||||
return exported_applications
|
||||
|
||||
def import_application_to_group(self, group, exported_applications):
|
||||
"""
|
||||
Imports multiple exported applications into the given group. It is compatible
|
||||
with an export of the `export_group_applications` method.
|
||||
|
||||
@TODO look into speed optimizations by streaming from a JSON file instead of
|
||||
loading the entire file into memory.
|
||||
|
||||
:param group: The group that the applications must be imported to.
|
||||
:type group: Group
|
||||
:param exported_applications: A list containing the applications generated by
|
||||
the `export_group_applications` method.
|
||||
:type exported_applications: list
|
||||
:return: The newly created applications based on the import and a dict
|
||||
containing a mapping of old ids to new ids.
|
||||
:rtype: list, dict
|
||||
"""
|
||||
|
||||
id_mapping = {}
|
||||
imported_applications = []
|
||||
for application in exported_applications:
|
||||
application_type = application_type_registry.get(application['type'])
|
||||
imported_application = application_type.import_serialized(
|
||||
group,
|
||||
application,
|
||||
id_mapping
|
||||
)
|
||||
imported_applications.append(imported_application)
|
||||
|
||||
return imported_applications, id_mapping
|
||||
|
||||
def get_template(self, template_id, base_queryset=None):
|
||||
"""
|
||||
Selects a template with the given id from the database.
|
||||
|
||||
:param template_id: The identifier of the template that must be returned.
|
||||
:type template_id: int
|
||||
:param base_queryset: The base queryset from where to select the group
|
||||
object. This can for example be used to do a `prefetch_related`.
|
||||
:type base_queryset: Queryset
|
||||
:raises TemplateDoesNotExist: When the group with the provided id does not
|
||||
exist.
|
||||
:return: The requested template instance related to the provided id.
|
||||
:rtype: Template
|
||||
"""
|
||||
|
||||
if not base_queryset:
|
||||
base_queryset = Template.objects
|
||||
|
||||
try:
|
||||
template = base_queryset.get(id=template_id)
|
||||
except Template.DoesNotExist:
|
||||
raise TemplateDoesNotExist(f'The template with id {template_id} does not '
|
||||
f'exist.')
|
||||
|
||||
return template
|
||||
|
||||
def sync_templates(self):
|
||||
"""
|
||||
Synchronizes the JSON template files with the templates stored in the database.
|
||||
We need to have a copy in the database so that the user can live preview a
|
||||
template before installing. It will also make sure that the right categories
|
||||
exist and that old ones are deleted.
|
||||
|
||||
If the template doesn't exist, a group can be created and we can import the
|
||||
export in that group. If the template already exists we check if the
|
||||
`export_hash` has changed, if so it means the export has changed. Because we
|
||||
don't have updating capability, we delete the old group and create a new one
|
||||
where we can import the export into.
|
||||
"""
|
||||
|
||||
installed_templates = Template.objects.all().prefetch_related(
|
||||
'categories'
|
||||
).select_related('group')
|
||||
installed_categories = list(TemplateCategory.objects.all())
|
||||
|
||||
# Loop over the JSON template files in the directory to see which database
|
||||
# templates need to be created or updated.
|
||||
templates = list(Path(settings.APPLICATION_TEMPLATES_DIR).glob('*.json'))
|
||||
for template_file_path in templates:
|
||||
content = Path(template_file_path).read_text()
|
||||
parsed_json = json.loads(content)
|
||||
|
||||
if 'baserow_template_version' not in parsed_json:
|
||||
continue
|
||||
|
||||
slug = '.'.join(template_file_path.name.split('.')[:-1])
|
||||
installed_template = next(
|
||||
(t for t in installed_templates if t.slug == slug), None
|
||||
)
|
||||
hash_json = json.dumps(parsed_json['export'])
|
||||
export_hash = hashlib.sha256(hash_json.encode("utf-8")).hexdigest()
|
||||
keywords = (
|
||||
','.join(parsed_json['keywords']) if 'keywords' in parsed_json else ''
|
||||
)
|
||||
|
||||
# If the installed template and group exist, and if there is a hash
|
||||
# mismatch, we need to delete the old group and all the related
|
||||
# applications in it. This is because a new group will be created.
|
||||
if (
|
||||
installed_template and
|
||||
installed_template.group and
|
||||
installed_template.export_hash != export_hash
|
||||
):
|
||||
self._delete_group(installed_template.group)
|
||||
|
||||
# If the installed template does not yet exist or if there is a export
|
||||
# hash mismatch, which means the group has already been deleted, we can
|
||||
# create a new group and import the exported applications into that group.
|
||||
if not installed_template or installed_template.export_hash != export_hash:
|
||||
group = Group.objects.create(name=parsed_json['name'])
|
||||
self.import_application_to_group(group, parsed_json['export'])
|
||||
else:
|
||||
group = installed_template.group
|
||||
group.name = parsed_json['name']
|
||||
group.save()
|
||||
|
||||
kwargs = {
|
||||
'name': parsed_json['name'],
|
||||
'icon': parsed_json['icon'],
|
||||
'export_hash': export_hash,
|
||||
'keywords': keywords,
|
||||
'group': group
|
||||
}
|
||||
|
||||
if not installed_template:
|
||||
installed_template = Template.objects.create(slug=slug, **kwargs)
|
||||
else:
|
||||
# If the installed template already exists, we only need to update the
|
||||
# values to the latest version according to the JSON template.
|
||||
for key, value in kwargs.items():
|
||||
setattr(installed_template, key, value)
|
||||
installed_template.save()
|
||||
|
||||
# Loop over the categories related to the template and check which ones
|
||||
# already exist and which need to be created. Based on that we can create
|
||||
# a list of category ids that we can set for the template.
|
||||
template_category_ids = []
|
||||
for category_name in parsed_json['categories']:
|
||||
installed_category = next(
|
||||
(c for c in installed_categories if c.name == category_name), None
|
||||
)
|
||||
if not installed_category:
|
||||
installed_category = TemplateCategory.objects.create(
|
||||
name=category_name
|
||||
)
|
||||
installed_categories.append(installed_category)
|
||||
template_category_ids.append(installed_category.id)
|
||||
|
||||
installed_template.categories.set(template_category_ids)
|
||||
|
||||
# Delete all the installed templates that were installed, but don't exist in
|
||||
# the template directory anymore.
|
||||
slugs = [
|
||||
'.'.join(template_file_path.name.split('.')[:-1])
|
||||
for template_file_path in templates
|
||||
]
|
||||
for template in Template.objects.filter(~Q(slug__in=slugs)):
|
||||
self._delete_group(template.group)
|
||||
template.delete()
|
||||
|
||||
# Delete all the categories that don't have any templates anymore.
|
||||
TemplateCategory.objects.annotate(
|
||||
num_templates=Count('templates')
|
||||
).filter(num_templates=0).delete()
|
||||
|
||||
def install_template(self, user, group, template):
|
||||
"""
|
||||
Installs the exported applications of a template into the given group if the
|
||||
provided user has access to that group.
|
||||
|
||||
:param user: The user on whose behalf the template installed.
|
||||
:type user: User
|
||||
:param group: The group where the template applications must be imported into.
|
||||
:type group: Group
|
||||
:param template: The template that must be installed.
|
||||
:type template: Template
|
||||
:return: The imported applications.
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
group.has_user(user, raise_error=True)
|
||||
|
||||
file_name = f'{template.slug}.json'
|
||||
template_path = Path(os.path.join(
|
||||
settings.APPLICATION_TEMPLATES_DIR,
|
||||
file_name
|
||||
))
|
||||
|
||||
if not template_path.exists():
|
||||
raise TemplateFileDoesNotExist(
|
||||
f'The template with file name {file_name} does not exist. You might '
|
||||
f'need to run the `sync_templates` management command.'
|
||||
)
|
||||
|
||||
content = template_path.read_text()
|
||||
parsed_json = json.loads(content)
|
||||
applications, id_mapping = self.import_application_to_group(
|
||||
group,
|
||||
parsed_json['export']
|
||||
)
|
||||
|
||||
# Because a user has initiated the creation of applications, we need to
|
||||
# call the `application_created` signal for each created application.
|
||||
for application in applications:
|
||||
application_type = application_type_registry.get_by_model(application)
|
||||
application_created.send(self, application=application, user=user,
|
||||
type_name=application_type.type)
|
||||
|
||||
return applications, id_mapping
|
||||
|
|
0
backend/src/baserow/core/management/commands/__init__.py
Normal file
0
backend/src/baserow/core/management/commands/__init__.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
import sys
|
||||
import json
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from baserow.core.models import Group
|
||||
from baserow.core.handler import CoreHandler
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Exports all the application of a group to a JSON file that can later be '
|
||||
'imported via the `import_group_applications` management command. This export '
|
||||
'can also be used as a template.'
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'group_id',
|
||||
type=int,
|
||||
help='The id of the group that must be exported.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--indent',
|
||||
action='store_true',
|
||||
help='Indicates if the JSON must be formatted and indented to improve '
|
||||
'readability.',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
group_id = options['group_id']
|
||||
indent = options['indent']
|
||||
|
||||
try:
|
||||
group = Group.objects.get(pk=group_id)
|
||||
except Group.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f'The group with id {group_id} was not '
|
||||
f'found.'))
|
||||
sys.exit(1)
|
||||
|
||||
exported_applications = CoreHandler().export_group_applications(group)
|
||||
exported_json = json.dumps(exported_applications, indent=4 if indent else None)
|
||||
self.stdout.write(exported_json)
|
|
@ -0,0 +1,48 @@
|
|||
import sys
|
||||
import json
|
||||
import argparse
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from baserow.core.models import Group
|
||||
from baserow.core.handler import CoreHandler
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Imports applications in JSON format and adds them to a group. Exports '
|
||||
'generated by the `export_group_applications` management command are '
|
||||
'compatible.'
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'group_id',
|
||||
type=int,
|
||||
help='The id of the group where the newly created applications must be '
|
||||
'added to.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'json',
|
||||
nargs='?',
|
||||
type=argparse.FileType('r'),
|
||||
default=sys.stdin
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
group_id = options['group_id']
|
||||
json_file = options['json']
|
||||
|
||||
try:
|
||||
group = Group.objects.get(pk=group_id)
|
||||
except Group.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f'The group with id {group_id} was not '
|
||||
f'found.'))
|
||||
sys.exit(1)
|
||||
|
||||
content = json.load(json_file)
|
||||
handler = CoreHandler()
|
||||
applications, _ = handler.import_application_to_group(group, content)
|
||||
self.stdout.write(f'{len(applications)} applications have been imported.')
|
|
@ -0,0 +1,16 @@
|
|||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Synchronizes all the templates stored in the database with the JSON files in '
|
||||
'the templates directory. This command must be ran everytime a template '
|
||||
'changes.'
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
CoreHandler().sync_templates()
|
|
@ -0,0 +1,103 @@
|
|||
# Generated by Django 2.2.11 on 2021-04-04 13:01
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0005_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='TemplateCategory',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name='ID'
|
||||
)
|
||||
),
|
||||
('name', models.CharField(max_length=32)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Template',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name='ID'
|
||||
)
|
||||
),
|
||||
('name', models.CharField(max_length=64)),
|
||||
(
|
||||
'slug',
|
||||
models.SlugField(
|
||||
help_text='The template slug that is used to match the '
|
||||
'template with the JSON file name.'
|
||||
)
|
||||
),
|
||||
(
|
||||
'icon',
|
||||
models.CharField(
|
||||
help_text='The font awesome class name that can be used for '
|
||||
'displaying purposes.',
|
||||
max_length=32
|
||||
)
|
||||
),
|
||||
(
|
||||
'categories',
|
||||
models.ManyToManyField(
|
||||
related_name='templates',
|
||||
to='core.TemplateCategory'
|
||||
)
|
||||
),
|
||||
(
|
||||
'group',
|
||||
models.ForeignKey(
|
||||
help_text='The group containing the applications related to '
|
||||
'the template. The read endpoints related to that '
|
||||
'group are publicly accessible for preview '
|
||||
'purposes.',
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
null=True,
|
||||
to='core.Group'
|
||||
)
|
||||
),
|
||||
(
|
||||
'export_hash',
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text='The export hash that is used to compare if the '
|
||||
'exported group applications have changed when '
|
||||
'syncing the templates.',
|
||||
max_length=64
|
||||
)
|
||||
),
|
||||
(
|
||||
'keywords',
|
||||
models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='Keywords related to the template that can be used '
|
||||
'for search.'
|
||||
)
|
||||
)
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
},
|
||||
),
|
||||
]
|
|
@ -2,6 +2,8 @@ from django.db import models
|
|||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
|
||||
from baserow.core.user_files.models import UserFile
|
||||
|
||||
from .managers import GroupQuerySet
|
||||
|
@ -50,7 +52,8 @@ class Group(CreatedAndUpdatedOnMixin, models.Model):
|
|||
|
||||
objects = GroupQuerySet.as_manager()
|
||||
|
||||
def has_user(self, user, permissions=None, raise_error=False):
|
||||
def has_user(self, user, permissions=None, raise_error=False,
|
||||
allow_if_template=False):
|
||||
"""
|
||||
Checks if the provided user belongs to the group.
|
||||
|
||||
|
@ -62,6 +65,9 @@ class Group(CreatedAndUpdatedOnMixin, models.Model):
|
|||
:param raise_error: If True an error will be raised when the user does not
|
||||
belong to the group or doesn't have the right permissions.
|
||||
:type raise_error: bool
|
||||
:param allow_if_template: If true and if the group is related to a template,
|
||||
then True is always returned and no exception will be raised.
|
||||
:type allow_if_template: bool
|
||||
:raises UserNotInGroupError: If the user does not belong to the group.
|
||||
:raises UserInvalidGroupPermissionsError: If the user does belong to the group,
|
||||
but doesn't have the right permissions.
|
||||
|
@ -72,6 +78,14 @@ class Group(CreatedAndUpdatedOnMixin, models.Model):
|
|||
if permissions and not isinstance(permissions, list):
|
||||
permissions = [permissions]
|
||||
|
||||
if allow_if_template and self.template_set.all().exists():
|
||||
return True
|
||||
elif not bool(user and user.is_authenticated):
|
||||
if raise_error:
|
||||
raise NotAuthenticated()
|
||||
else:
|
||||
return False
|
||||
|
||||
queryset = GroupUser.objects.filter(
|
||||
user_id=user.id,
|
||||
group_id=self.id
|
||||
|
@ -179,3 +193,46 @@ class Application(CreatedAndUpdatedOnMixin, OrderableMixin,
|
|||
def get_last_order(cls, group):
|
||||
queryset = Application.objects.filter(group=group)
|
||||
return cls.get_highest_order_of_queryset(queryset) + 1
|
||||
|
||||
|
||||
class TemplateCategory(models.Model):
|
||||
name = models.CharField(max_length=32)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class Template(models.Model):
|
||||
name = models.CharField(max_length=64)
|
||||
slug = models.SlugField(
|
||||
help_text='The template slug that is used to match the template with the JSON '
|
||||
'file name.'
|
||||
)
|
||||
icon = models.CharField(
|
||||
max_length=32,
|
||||
help_text='The font awesome class name that can be used for displaying '
|
||||
'purposes.'
|
||||
)
|
||||
categories = models.ManyToManyField(TemplateCategory, related_name='templates')
|
||||
group = models.ForeignKey(
|
||||
Group,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
help_text='The group containing the applications related to the template. The '
|
||||
'read endpoints related to that group are publicly accessible for '
|
||||
'preview purposes.'
|
||||
)
|
||||
export_hash = models.CharField(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
help_text='The export hash that is used to compare if the exported group '
|
||||
'applications have changed when syncing the templates.'
|
||||
)
|
||||
keywords = models.TextField(
|
||||
default='',
|
||||
blank=True,
|
||||
help_text='Keywords related to the template that can be used for search.'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from .registry import (
|
||||
Instance, Registry, ModelInstanceMixin, ModelRegistryMixin, APIUrlsRegistryMixin,
|
||||
APIUrlsInstanceMixin
|
||||
APIUrlsInstanceMixin, ImportExportMixin
|
||||
)
|
||||
from .exceptions import ApplicationTypeAlreadyRegistered, ApplicationTypeDoesNotExist
|
||||
|
||||
|
@ -107,7 +107,8 @@ class PluginRegistry(APIUrlsRegistryMixin, Registry):
|
|||
return urls
|
||||
|
||||
|
||||
class ApplicationType(APIUrlsInstanceMixin, ModelInstanceMixin, Instance):
|
||||
class ApplicationType(APIUrlsInstanceMixin, ModelInstanceMixin, ImportExportMixin,
|
||||
Instance):
|
||||
"""
|
||||
This abstract class represents a custom application that can be added to the
|
||||
application registry. It must be extended so customisation can be done. Each
|
||||
|
@ -150,6 +151,52 @@ class ApplicationType(APIUrlsInstanceMixin, ModelInstanceMixin, Instance):
|
|||
:type application: Application
|
||||
"""
|
||||
|
||||
def export_serialized(self, application):
|
||||
"""
|
||||
Exports the application to a serialized dict that can be imported by the
|
||||
`import_serialized` method. The dict is JSON serializable.
|
||||
|
||||
:param application: The application that must be exported.
|
||||
:type application: Application
|
||||
:return: The exported and serialized application.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
return {
|
||||
'id': application.id,
|
||||
'name': application.name,
|
||||
'order': application.order,
|
||||
'type': self.type
|
||||
}
|
||||
|
||||
def import_serialized(self, group, serialized_values, id_mapping):
|
||||
"""
|
||||
Imports the exported serialized application by the `export_serialized` as a new
|
||||
application to a group.
|
||||
|
||||
:param group: The group that the application must be added to.
|
||||
:type group: Group
|
||||
:param serialized_values: The exported serialized values by the
|
||||
`export_serialized` method.
|
||||
:type serialized_values: dict`
|
||||
:param id_mapping: The map of exported ids to newly created ids that must be
|
||||
updated when a new instance has been created.
|
||||
:type id_mapping: dict
|
||||
:return: The newly created application.
|
||||
:rtype: Application
|
||||
"""
|
||||
|
||||
if 'applications' not in id_mapping:
|
||||
id_mapping['applications'] = {}
|
||||
|
||||
serialized_copy = serialized_values.copy()
|
||||
application_id = serialized_copy.pop('id')
|
||||
serialized_copy.pop('type')
|
||||
application = self.model_class.objects.create(group=group, **serialized_copy)
|
||||
id_mapping['applications'][application_id] = application.id
|
||||
|
||||
return application
|
||||
|
||||
|
||||
class ApplicationTypeRegistry(APIUrlsRegistryMixin, ModelRegistryMixin, Registry):
|
||||
"""
|
||||
|
|
|
@ -155,6 +155,43 @@ class MapAPIExceptionsInstanceMixin:
|
|||
yield
|
||||
|
||||
|
||||
class ImportExportMixin:
|
||||
def export_serialized(self, instance):
|
||||
"""
|
||||
Should return with a serialized version of the provided instance. It must be
|
||||
JSON serializable and it must be possible to the import via the
|
||||
`import_serialized` method.
|
||||
|
||||
:param instance: The instance that must be serialized and exported. Could be
|
||||
any object type because it depends on the type instance that uses this
|
||||
mixin.
|
||||
:type instance: Object
|
||||
:return: Serialized version of the instance.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
raise NotImplementedError('The export_serialized method must be implemented.')
|
||||
|
||||
def import_serialized(self, parent, serialized_values, id_mapping):
|
||||
"""
|
||||
Should import and create the correct instances in the database based on the
|
||||
serialized values exported by the `export_serialized` method. It should create
|
||||
a copy. An entry to the mapping could be made if a new instance is created.
|
||||
|
||||
:param parent: Optionally a parent instance can be provided here.
|
||||
:type parent: Object
|
||||
:param serialized_values: The values that must be inserted.
|
||||
:type serialized_values: dict
|
||||
:param id_mapping: The map of exported ids to newly created ids that must be
|
||||
updated when a new instance has been created.
|
||||
:type id_mapping: dict
|
||||
:return: The newly created instance.
|
||||
:rtype: Object
|
||||
"""
|
||||
|
||||
raise NotImplementedError('The import_serialized method must be implemented.')
|
||||
|
||||
|
||||
class Registry(object):
|
||||
name = None
|
||||
"""The unique name that is used when raising exceptions."""
|
||||
|
|
|
@ -104,6 +104,13 @@ class UserHandler:
|
|||
|
||||
user = User(first_name=name, email=email, username=email)
|
||||
user.set_password(password)
|
||||
|
||||
if not User.objects.exists():
|
||||
# This is the first ever user created in this baserow instance and
|
||||
# therefore the administrator user, lets give them staff rights so they
|
||||
# can set baserow wide settings.
|
||||
user.is_staff = True
|
||||
|
||||
user.save()
|
||||
|
||||
if group_invitation_token:
|
||||
|
|
|
@ -14,7 +14,14 @@ class FileSizeTooLargeError(Exception):
|
|||
|
||||
|
||||
class FileURLCouldNotBeReached(Exception):
|
||||
"""Raised when the provided URL could not be reached."""
|
||||
"""
|
||||
Raised when the provided URL could not be reached or points to an internal
|
||||
service.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidFileURLError(Exception):
|
||||
"""Raised when the provided file URL is invalid."""
|
||||
|
||||
|
||||
class InvalidUserFileNameError(Exception):
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import pathlib
|
||||
import mimetypes
|
||||
|
||||
from os.path import join
|
||||
from io import BytesIO
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
import advocate
|
||||
from advocate.exceptions import UnacceptableAddressException
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from PIL import Image, ImageOps
|
||||
|
@ -16,7 +19,7 @@ from baserow.core.utils import sha256_hash, stream_size, random_string, truncate
|
|||
|
||||
from .exceptions import (
|
||||
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached,
|
||||
MaximumUniqueTriesError
|
||||
MaximumUniqueTriesError, InvalidFileURLError
|
||||
)
|
||||
from .models import UserFile
|
||||
|
||||
|
@ -241,15 +244,21 @@ class UserFileHandler:
|
|||
:param storage: The storage where the file must be saved to.
|
||||
:type storage: Storage
|
||||
:raises FileURLCouldNotBeReached: If the file could not be downloaded from
|
||||
the URL.
|
||||
the URL or if it points to an internal service.
|
||||
:raises InvalidFileURLError: If the provided file url is invalid.
|
||||
:return: The newly created user file.
|
||||
:rtype: UserFile
|
||||
"""
|
||||
|
||||
parsed_url = urlparse(url)
|
||||
|
||||
if parsed_url.scheme not in ['http', 'https']:
|
||||
raise InvalidFileURLError('Only http and https are allowed.')
|
||||
|
||||
file_name = url.split('/')[-1]
|
||||
|
||||
try:
|
||||
response = requests.get(url, stream=True, timeout=10)
|
||||
response = advocate.get(url, stream=True, timeout=10)
|
||||
|
||||
if not response.ok:
|
||||
raise FileURLCouldNotBeReached('The response did not respond with an '
|
||||
|
@ -259,7 +268,7 @@ class UserFileHandler:
|
|||
settings.USER_FILE_SIZE_LIMIT + 1,
|
||||
decode_content=True
|
||||
)
|
||||
except RequestException:
|
||||
except (RequestException, UnacceptableAddressException):
|
||||
raise FileURLCouldNotBeReached('The provided URL could not be reached.')
|
||||
|
||||
file = SimpleUploadedFile(file_name, content)
|
||||
|
|
|
@ -42,6 +42,9 @@ class CoreConsumer(AsyncJsonWebsocketConsumer):
|
|||
user = self.scope['user']
|
||||
web_socket_id = self.scope['web_socket_id']
|
||||
|
||||
if not user:
|
||||
return
|
||||
|
||||
# If the user has already joined another page we need to discard that
|
||||
# page first before we can join a new one.
|
||||
await self.discard_current_page()
|
||||
|
@ -76,7 +79,7 @@ class CoreConsumer(AsyncJsonWebsocketConsumer):
|
|||
'parameters': parameters
|
||||
})
|
||||
|
||||
async def discard_current_page(self):
|
||||
async def discard_current_page(self, send_confirmation=True):
|
||||
"""
|
||||
If the user has subscribed to another page then he will be unsubscribed from
|
||||
the last page.
|
||||
|
@ -94,11 +97,12 @@ class CoreConsumer(AsyncJsonWebsocketConsumer):
|
|||
del self.scope['page']
|
||||
del self.scope['page_parameters']
|
||||
|
||||
await self.send_json({
|
||||
'type': 'page_discard',
|
||||
'page': page_type,
|
||||
'parameters': page_parameters
|
||||
})
|
||||
if send_confirmation:
|
||||
await self.send_json({
|
||||
'type': 'page_discard',
|
||||
'page': page_type,
|
||||
'parameters': page_parameters
|
||||
})
|
||||
|
||||
async def broadcast_to_users(self, event):
|
||||
"""
|
||||
|
@ -139,5 +143,5 @@ class CoreConsumer(AsyncJsonWebsocketConsumer):
|
|||
await self.send_json(payload)
|
||||
|
||||
async def disconnect(self, message):
|
||||
await self.discard_current_page()
|
||||
await self.discard_current_page(send_confirmation=False)
|
||||
await self.channel_layer.group_discard('users', self.channel_name)
|
||||
|
|
939
backend/templates/applicant-tracker.json
Normal file
939
backend/templates/applicant-tracker.json
Normal file
|
@ -0,0 +1,939 @@
|
|||
{
|
||||
"baserow_template_version": 1,
|
||||
"name": "Applicant tracker",
|
||||
"icon": "user-plus",
|
||||
"keywords": ["applicant", "vacancy", "job", "offers", "candidates"],
|
||||
"categories": ["Local Business", "Human Resources"],
|
||||
"export": [
|
||||
{
|
||||
"id": 9,
|
||||
"name": "Applicant Tracker",
|
||||
"order": 1,
|
||||
"type": "database",
|
||||
"tables": [
|
||||
{
|
||||
"id": 27,
|
||||
"name": "Applicants",
|
||||
"order": 1,
|
||||
"fields": [
|
||||
{
|
||||
"id": 151,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 160,
|
||||
"type": "single_select",
|
||||
"name": "Stage",
|
||||
"order": 1,
|
||||
"primary": false,
|
||||
"select_options": [
|
||||
{
|
||||
"id": 48,
|
||||
"value": "Make appointment",
|
||||
"color": "light-gray",
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 49,
|
||||
"value": "Interviewing",
|
||||
"color": "light-orange",
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 50,
|
||||
"value": "Decision needed",
|
||||
"color": "light-blue",
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 51,
|
||||
"value": "Hired",
|
||||
"color": "light-green",
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 63,
|
||||
"value": "Rejected",
|
||||
"color": "light-red",
|
||||
"order": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 161,
|
||||
"type": "link_row",
|
||||
"name": "Applying for",
|
||||
"order": 2,
|
||||
"primary": false,
|
||||
"link_row_table_id": 28,
|
||||
"link_row_related_field_id": 162
|
||||
},
|
||||
{
|
||||
"id": 163,
|
||||
"type": "email",
|
||||
"name": "Email address",
|
||||
"order": 3,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 164,
|
||||
"type": "phone_number",
|
||||
"name": "Phone number",
|
||||
"order": 4,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 165,
|
||||
"type": "link_row",
|
||||
"name": "Phone interviewer",
|
||||
"order": 5,
|
||||
"primary": false,
|
||||
"link_row_table_id": 29,
|
||||
"link_row_related_field_id": 166
|
||||
},
|
||||
{
|
||||
"id": 167,
|
||||
"type": "single_select",
|
||||
"name": "Phone interview score",
|
||||
"order": 6,
|
||||
"primary": false,
|
||||
"select_options": [
|
||||
{
|
||||
"id": 52,
|
||||
"value": "No hire",
|
||||
"color": "light-red",
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 53,
|
||||
"value": "Worth consideration",
|
||||
"color": "light-orange",
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 54,
|
||||
"value": "Good",
|
||||
"color": "light-blue",
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 55,
|
||||
"value": "Must hire",
|
||||
"color": "light-green",
|
||||
"order": 3
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 168,
|
||||
"type": "long_text",
|
||||
"name": "Phone interview notes",
|
||||
"order": 7,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 169,
|
||||
"type": "date",
|
||||
"name": "Phone interview",
|
||||
"order": 8,
|
||||
"primary": false,
|
||||
"date_format": "US",
|
||||
"date_include_time": true,
|
||||
"date_time_format": "12"
|
||||
},
|
||||
{
|
||||
"id": 170,
|
||||
"type": "date",
|
||||
"name": "On-site interview",
|
||||
"order": 9,
|
||||
"primary": false,
|
||||
"date_format": "US",
|
||||
"date_include_time": true,
|
||||
"date_time_format": "12"
|
||||
},
|
||||
{
|
||||
"id": 171,
|
||||
"type": "link_row",
|
||||
"name": "On-site interview",
|
||||
"order": 10,
|
||||
"primary": false,
|
||||
"link_row_table_id": 29,
|
||||
"link_row_related_field_id": 172
|
||||
},
|
||||
{
|
||||
"id": 173,
|
||||
"type": "single_select",
|
||||
"name": "On-site interview score",
|
||||
"order": 11,
|
||||
"primary": false,
|
||||
"select_options": [
|
||||
{
|
||||
"id": 56,
|
||||
"value": "No hire",
|
||||
"color": "light-red",
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 57,
|
||||
"value": "Worth consideration",
|
||||
"color": "light-orange",
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 58,
|
||||
"value": "Good",
|
||||
"color": "light-blue",
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 59,
|
||||
"value": "Must hire",
|
||||
"color": "light-green",
|
||||
"order": 3
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 174,
|
||||
"type": "long_text",
|
||||
"name": "On-site interview notes",
|
||||
"order": 12,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 36,
|
||||
"type": "grid",
|
||||
"name": "Participating candidates",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [
|
||||
{
|
||||
"id": 10,
|
||||
"field_id": 160,
|
||||
"type": "single_select_not_equal",
|
||||
"value": "63"
|
||||
}
|
||||
],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 243,
|
||||
"field_id": 151,
|
||||
"width": 167,
|
||||
"hidden": false,
|
||||
"order": 8
|
||||
},
|
||||
{
|
||||
"id": 250,
|
||||
"field_id": 160,
|
||||
"width": 162,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 251,
|
||||
"field_id": 161,
|
||||
"width": 232,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 252,
|
||||
"field_id": 163,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 253,
|
||||
"field_id": 164,
|
||||
"width": 161,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 254,
|
||||
"field_id": 165,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 5
|
||||
},
|
||||
{
|
||||
"id": 255,
|
||||
"field_id": 167,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 6
|
||||
},
|
||||
{
|
||||
"id": 256,
|
||||
"field_id": 168,
|
||||
"width": 252,
|
||||
"hidden": false,
|
||||
"order": 7
|
||||
},
|
||||
{
|
||||
"id": 257,
|
||||
"field_id": 169,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 4
|
||||
},
|
||||
{
|
||||
"id": 262,
|
||||
"field_id": 170,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 263,
|
||||
"field_id": 171,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 264,
|
||||
"field_id": 173,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 265,
|
||||
"field_id": 174,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 42,
|
||||
"type": "grid",
|
||||
"name": "All candidates",
|
||||
"order": 2,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 289,
|
||||
"field_id": 151,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 290,
|
||||
"field_id": 160,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 291,
|
||||
"field_id": 161,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 292,
|
||||
"field_id": 163,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 293,
|
||||
"field_id": 164,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 294,
|
||||
"field_id": 165,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 295,
|
||||
"field_id": 167,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 296,
|
||||
"field_id": 168,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 297,
|
||||
"field_id": 169,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 298,
|
||||
"field_id": 170,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 299,
|
||||
"field_id": 171,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 300,
|
||||
"field_id": 173,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 301,
|
||||
"field_id": 174,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 3,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_151": "Davy Bartram",
|
||||
"field_160": 63,
|
||||
"field_161": [
|
||||
6
|
||||
],
|
||||
"field_163": "davy.bartram@email.com",
|
||||
"field_164": "+310999888777",
|
||||
"field_165": [
|
||||
4
|
||||
],
|
||||
"field_167": 52,
|
||||
"field_168": "Refused to turn on his web-cam. So we decided not to move forward.",
|
||||
"field_169": "2021-03-01T00:30:00+00:00",
|
||||
"field_170": null,
|
||||
"field_171": [],
|
||||
"field_173": null,
|
||||
"field_174": ""
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_151": "Gerry Hanson",
|
||||
"field_160": 51,
|
||||
"field_161": [
|
||||
5
|
||||
],
|
||||
"field_163": "gerry.hanson@email.com",
|
||||
"field_164": "+310666444555",
|
||||
"field_165": [
|
||||
5
|
||||
],
|
||||
"field_167": 55,
|
||||
"field_168": "Knew a lot about the subject at hand.",
|
||||
"field_169": "2021-03-01T00:30:00+00:00",
|
||||
"field_170": "2021-05-12T04:00:00+00:00",
|
||||
"field_171": [
|
||||
3,
|
||||
5
|
||||
],
|
||||
"field_173": 59,
|
||||
"field_174": "Could present us with a great solution for our problem."
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_151": "Egill Attaway",
|
||||
"field_160": 49,
|
||||
"field_161": [
|
||||
4
|
||||
],
|
||||
"field_163": "egill.attaway@email.com",
|
||||
"field_164": "+310333444555",
|
||||
"field_165": [
|
||||
4
|
||||
],
|
||||
"field_167": 53,
|
||||
"field_168": "Might be a little bit to junior for this position, but still a very good match.",
|
||||
"field_169": "2021-04-06T10:35:00+00:00",
|
||||
"field_170": null,
|
||||
"field_171": [],
|
||||
"field_173": null,
|
||||
"field_174": ""
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_151": "Maxi McAfee",
|
||||
"field_160": 49,
|
||||
"field_161": [
|
||||
4
|
||||
],
|
||||
"field_163": "maxi.mcafee@email.com",
|
||||
"field_164": "+310333444666",
|
||||
"field_165": [
|
||||
4
|
||||
],
|
||||
"field_167": 54,
|
||||
"field_168": "Checks all the boxes, but did not had a good personal click.",
|
||||
"field_169": "2021-04-06T10:00:00+00:00",
|
||||
"field_170": null,
|
||||
"field_171": [],
|
||||
"field_173": null,
|
||||
"field_174": ""
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"order": "5.00000000000000000000",
|
||||
"field_151": "Amira Day",
|
||||
"field_160": 48,
|
||||
"field_161": [
|
||||
6
|
||||
],
|
||||
"field_163": "amira.day@email.com",
|
||||
"field_164": "",
|
||||
"field_165": [],
|
||||
"field_167": null,
|
||||
"field_168": "",
|
||||
"field_169": null,
|
||||
"field_170": null,
|
||||
"field_171": [],
|
||||
"field_173": null,
|
||||
"field_174": ""
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"order": "6.00000000000000000000",
|
||||
"field_151": "Gus Lyon",
|
||||
"field_160": 51,
|
||||
"field_161": [
|
||||
3
|
||||
],
|
||||
"field_163": "gus.lyon@email.com",
|
||||
"field_164": "+311333444222",
|
||||
"field_165": [
|
||||
6
|
||||
],
|
||||
"field_167": 54,
|
||||
"field_168": "Professional appearance and even suggested a couple of good ideas.",
|
||||
"field_169": "2021-04-07T16:00:00+00:00",
|
||||
"field_170": "2021-04-12T20:30:37+00:00",
|
||||
"field_171": [
|
||||
3,
|
||||
6
|
||||
],
|
||||
"field_173": 58,
|
||||
"field_174": "Good match on personal level and good technical skills. This is a no brainer."
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"order": "7.00000000000000000000",
|
||||
"field_151": "Trey Forrest",
|
||||
"field_160": 50,
|
||||
"field_161": [
|
||||
6
|
||||
],
|
||||
"field_163": "trey.forrest@email.com",
|
||||
"field_164": "+312333444555",
|
||||
"field_165": [
|
||||
7
|
||||
],
|
||||
"field_167": 53,
|
||||
"field_168": "Was more a medoir level developer.",
|
||||
"field_169": "2021-03-24T16:00:00+00:00",
|
||||
"field_170": "2021-04-01T00:00:00+00:00",
|
||||
"field_171": [
|
||||
3,
|
||||
7
|
||||
],
|
||||
"field_173": 57,
|
||||
"field_174": "We are probably going to reject him."
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"order": "8.00000000000000000000",
|
||||
"field_151": "Thelma Everly",
|
||||
"field_160": 63,
|
||||
"field_161": [
|
||||
5
|
||||
],
|
||||
"field_163": "thelma.everly@email.com",
|
||||
"field_164": "",
|
||||
"field_165": [],
|
||||
"field_167": null,
|
||||
"field_168": "",
|
||||
"field_169": null,
|
||||
"field_170": null,
|
||||
"field_171": [],
|
||||
"field_173": null,
|
||||
"field_174": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 28,
|
||||
"name": "Positions",
|
||||
"order": 2,
|
||||
"fields": [
|
||||
{
|
||||
"id": 154,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 162,
|
||||
"type": "link_row",
|
||||
"name": "Applying for position",
|
||||
"order": 1,
|
||||
"primary": false,
|
||||
"link_row_table_id": 27,
|
||||
"link_row_related_field_id": 161
|
||||
},
|
||||
{
|
||||
"id": 176,
|
||||
"type": "boolean",
|
||||
"name": "Filled",
|
||||
"order": 2,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 177,
|
||||
"type": "long_text",
|
||||
"name": "Description",
|
||||
"order": 3,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 37,
|
||||
"type": "grid",
|
||||
"name": "All positions",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 246,
|
||||
"field_id": 154,
|
||||
"width": 242,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 258,
|
||||
"field_id": 162,
|
||||
"width": 302,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 266,
|
||||
"field_id": 176,
|
||||
"width": 118,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 267,
|
||||
"field_id": 177,
|
||||
"width": 320,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 43,
|
||||
"type": "grid",
|
||||
"name": "Open positions",
|
||||
"order": 2,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [
|
||||
{
|
||||
"id": 11,
|
||||
"field_id": 176,
|
||||
"type": "boolean",
|
||||
"value": ""
|
||||
}
|
||||
],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 302,
|
||||
"field_id": 154,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 303,
|
||||
"field_id": 162,
|
||||
"width": 317,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 304,
|
||||
"field_id": 176,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 305,
|
||||
"field_id": 177,
|
||||
"width": 307,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 3,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_154": "Junior full stack software engineer",
|
||||
"field_162": [
|
||||
8
|
||||
],
|
||||
"field_176": "true",
|
||||
"field_177": "Cras pulvinar feugiat arcu, vel vestibulum odio laoreet eget. Aliquam erat volutpat. Donec a ipsum justo. Proin sed blandit velit, nec iaculis sem. Duis eget magna quis enim auctor varius id eget ante."
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"order": "1.99999999999999999999",
|
||||
"field_154": "Senior full stack software engineer",
|
||||
"field_162": [
|
||||
3,
|
||||
7,
|
||||
9
|
||||
],
|
||||
"field_176": "false",
|
||||
"field_177": "Nunc euismod massa et enim malesuada, a molestie felis vehicula. Duis aliquet ex et metus mattis, eu semper turpis placerat. Nam cursus elit ex. Pellentesque facilisis lectus ullamcorper lacinia feugiat."
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_154": "Growth Hacker",
|
||||
"field_162": [
|
||||
5,
|
||||
6
|
||||
],
|
||||
"field_176": "false",
|
||||
"field_177": "Phasellus commodo, justo quis hendrerit aliquet, ligula leo rhoncus nisi, nec consectetur velit justo fermentum ligula. Nullam venenatis gravida fringilla."
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_154": "Customer Success manager",
|
||||
"field_162": [
|
||||
4,
|
||||
10
|
||||
],
|
||||
"field_176": "true",
|
||||
"field_177": "Morbi vel diam et metus egestas lobortis tristique sit amet tortor. Nullam gravida dictum massa et cursus. Vestibulum ornare elit nec quam tristique mollis. Vivamus blandit viverra nisi lacinia elementum."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 29,
|
||||
"name": "Interviewers",
|
||||
"order": 3,
|
||||
"fields": [
|
||||
{
|
||||
"id": 157,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 166,
|
||||
"type": "link_row",
|
||||
"name": "Phone applicants",
|
||||
"order": 1,
|
||||
"primary": false,
|
||||
"link_row_table_id": 27,
|
||||
"link_row_related_field_id": 165
|
||||
},
|
||||
{
|
||||
"id": 172,
|
||||
"type": "link_row",
|
||||
"name": "On-site applicants",
|
||||
"order": 2,
|
||||
"primary": false,
|
||||
"link_row_table_id": 27,
|
||||
"link_row_related_field_id": 171
|
||||
},
|
||||
{
|
||||
"id": 175,
|
||||
"type": "single_select",
|
||||
"name": "Role",
|
||||
"order": 3,
|
||||
"primary": false,
|
||||
"select_options": [
|
||||
{
|
||||
"id": 60,
|
||||
"value": "Developer",
|
||||
"color": "light-green",
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 61,
|
||||
"value": "Manager",
|
||||
"color": "light-orange",
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 62,
|
||||
"value": "CEO",
|
||||
"color": "light-blue",
|
||||
"order": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 38,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 249,
|
||||
"field_id": 157,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 259,
|
||||
"field_id": 166,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 260,
|
||||
"field_id": 172,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 261,
|
||||
"field_id": 175,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 3,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_157": "Toni MacDougall",
|
||||
"field_166": [],
|
||||
"field_172": [
|
||||
8,
|
||||
4,
|
||||
9
|
||||
],
|
||||
"field_175": 62
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_157": "Amilia Durant",
|
||||
"field_166": [
|
||||
3,
|
||||
5,
|
||||
6
|
||||
],
|
||||
"field_172": [],
|
||||
"field_175": 61
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_157": "Elena Nogueira",
|
||||
"field_166": [
|
||||
4
|
||||
],
|
||||
"field_172": [
|
||||
4
|
||||
],
|
||||
"field_175": 61
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_157": "Anand Walther",
|
||||
"field_166": [
|
||||
8
|
||||
],
|
||||
"field_172": [
|
||||
8
|
||||
],
|
||||
"field_175": 60
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"order": "5.00000000000000000000",
|
||||
"field_157": "Simran Tos",
|
||||
"field_166": [
|
||||
9
|
||||
],
|
||||
"field_172": [
|
||||
9
|
||||
],
|
||||
"field_175": 60
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
896
backend/templates/personal-todo-tracker.json
Normal file
896
backend/templates/personal-todo-tracker.json
Normal file
|
@ -0,0 +1,896 @@
|
|||
{
|
||||
"baserow_template_version": 1,
|
||||
"name": "Personal Todo Tracker",
|
||||
"icon": "clipboard-list",
|
||||
"keywords": [
|
||||
"todo",
|
||||
"tracker",
|
||||
"tasks",
|
||||
"personal"
|
||||
],
|
||||
"categories": [
|
||||
"Personal"
|
||||
],
|
||||
"export": [
|
||||
{
|
||||
"id": 5,
|
||||
"name": "Personal Todo Tracker",
|
||||
"order": 2,
|
||||
"type": "database",
|
||||
"tables": [
|
||||
{
|
||||
"id": 13,
|
||||
"name": "Todos",
|
||||
"order": 1,
|
||||
"fields": [
|
||||
{
|
||||
"id": 75,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 76,
|
||||
"type": "long_text",
|
||||
"name": "Notes",
|
||||
"order": 1,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 77,
|
||||
"type": "boolean",
|
||||
"name": "Done",
|
||||
"order": 2,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 87,
|
||||
"type": "date",
|
||||
"name": "Due By",
|
||||
"order": 3,
|
||||
"primary": false,
|
||||
"date_format": "EU",
|
||||
"date_include_time": true,
|
||||
"date_time_format": "24"
|
||||
},
|
||||
{
|
||||
"id": 88,
|
||||
"type": "link_row",
|
||||
"name": "Shopping List",
|
||||
"order": 4,
|
||||
"primary": false,
|
||||
"link_row_table_id": 15,
|
||||
"link_row_related_field_id": 89
|
||||
},
|
||||
{
|
||||
"id": 90,
|
||||
"type": "link_row",
|
||||
"name": "Categories",
|
||||
"order": 5,
|
||||
"primary": false,
|
||||
"link_row_table_id": 14,
|
||||
"link_row_related_field_id": 91
|
||||
},
|
||||
{
|
||||
"id": 96,
|
||||
"type": "link_row",
|
||||
"name": "People",
|
||||
"order": 7,
|
||||
"primary": false,
|
||||
"link_row_table_id": 16,
|
||||
"link_row_related_field_id": 97
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 19,
|
||||
"type": "grid",
|
||||
"name": "Outstanding Todos",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [
|
||||
{
|
||||
"id": 7,
|
||||
"field_id": 77,
|
||||
"type": "boolean",
|
||||
"value": "0"
|
||||
}
|
||||
],
|
||||
"sortings": [
|
||||
{
|
||||
"id": 2,
|
||||
"field_id": 87,
|
||||
"order": "ASC"
|
||||
}
|
||||
],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 131,
|
||||
"field_id": 75,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 6
|
||||
},
|
||||
{
|
||||
"id": 129,
|
||||
"field_id": 76,
|
||||
"width": 400,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 130,
|
||||
"field_id": 77,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 148,
|
||||
"field_id": 87,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 149,
|
||||
"field_id": 88,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 153,
|
||||
"field_id": 90,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 5
|
||||
},
|
||||
{
|
||||
"id": 158,
|
||||
"field_id": 96,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 23,
|
||||
"type": "grid",
|
||||
"name": "Done Todos",
|
||||
"order": 2,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [
|
||||
{
|
||||
"id": 8,
|
||||
"field_id": 77,
|
||||
"type": "boolean",
|
||||
"value": "1"
|
||||
}
|
||||
],
|
||||
"sortings": [
|
||||
{
|
||||
"id": 12,
|
||||
"field_id": 87,
|
||||
"order": "DESC"
|
||||
}
|
||||
],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 141,
|
||||
"field_id": 75,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 6
|
||||
},
|
||||
{
|
||||
"id": 142,
|
||||
"field_id": 76,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 143,
|
||||
"field_id": 77,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 150,
|
||||
"field_id": 87,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 151,
|
||||
"field_id": 88,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 154,
|
||||
"field_id": 90,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 4
|
||||
},
|
||||
{
|
||||
"id": 162,
|
||||
"field_id": 96,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 24,
|
||||
"type": "grid",
|
||||
"name": "All Todos",
|
||||
"order": 3,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [
|
||||
{
|
||||
"id": 13,
|
||||
"field_id": 87,
|
||||
"order": "DESC"
|
||||
}
|
||||
],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 144,
|
||||
"field_id": 75,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 6
|
||||
},
|
||||
{
|
||||
"id": 145,
|
||||
"field_id": 76,
|
||||
"width": 257,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 146,
|
||||
"field_id": 77,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 415,
|
||||
"field_id": 87,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 416,
|
||||
"field_id": 88,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 417,
|
||||
"field_id": 90,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 4
|
||||
},
|
||||
{
|
||||
"id": 418,
|
||||
"field_id": 96,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_75": "Go Shopping",
|
||||
"field_76": "For the Onion Soup!",
|
||||
"field_77": "false",
|
||||
"field_87": "2021-04-16T13:00:00+00:00",
|
||||
"field_88": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
],
|
||||
"field_90": [
|
||||
1
|
||||
],
|
||||
"field_96": []
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_75": "Pay Rent",
|
||||
"field_76": "$450",
|
||||
"field_77": "false",
|
||||
"field_87": "2021-04-30T00:00:00+00:00",
|
||||
"field_88": [],
|
||||
"field_90": [
|
||||
4
|
||||
],
|
||||
"field_96": []
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_75": "Finish Invoices",
|
||||
"field_76": "",
|
||||
"field_77": "true",
|
||||
"field_87": "2021-04-22T13:00:42+00:00",
|
||||
"field_88": [],
|
||||
"field_90": [
|
||||
2
|
||||
],
|
||||
"field_96": []
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_75": "Call Mary about next week",
|
||||
"field_76": "Can she make the party?",
|
||||
"field_77": "false",
|
||||
"field_87": "2021-04-08T10:00:00+00:00",
|
||||
"field_88": [],
|
||||
"field_90": [
|
||||
1
|
||||
],
|
||||
"field_96": [
|
||||
2
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"order": "5.00000000000000000000",
|
||||
"field_75": "Steve's Birthday!",
|
||||
"field_76": "He better like the Soup...",
|
||||
"field_77": "false",
|
||||
"field_87": "2021-04-17T17:39:12+00:00",
|
||||
"field_88": [],
|
||||
"field_90": [
|
||||
1
|
||||
],
|
||||
"field_96": [
|
||||
1,
|
||||
2
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"order": "6.00000000000000000000",
|
||||
"field_75": "Buy New Garden Tools",
|
||||
"field_76": "",
|
||||
"field_77": "false",
|
||||
"field_87": "2021-05-14T17:42:58+00:00",
|
||||
"field_88": [
|
||||
4
|
||||
],
|
||||
"field_90": [
|
||||
5
|
||||
],
|
||||
"field_96": []
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"order": "7.00000000000000000000",
|
||||
"field_75": "Buy Mum Birthday Flowers",
|
||||
"field_76": "She likes Roses.",
|
||||
"field_77": "false",
|
||||
"field_87": "2021-07-17T17:48:52+00:00",
|
||||
"field_88": [
|
||||
5
|
||||
],
|
||||
"field_90": [
|
||||
6
|
||||
],
|
||||
"field_96": [
|
||||
4
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 14,
|
||||
"name": "Categories",
|
||||
"order": 2,
|
||||
"fields": [
|
||||
{
|
||||
"id": 78,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 79,
|
||||
"type": "long_text",
|
||||
"name": "Notes",
|
||||
"order": 1,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 91,
|
||||
"type": "link_row",
|
||||
"name": "Todos",
|
||||
"order": 2,
|
||||
"primary": false,
|
||||
"link_row_table_id": 13,
|
||||
"link_row_related_field_id": 90
|
||||
},
|
||||
{
|
||||
"id": 93,
|
||||
"type": "link_row",
|
||||
"name": "Contacts",
|
||||
"order": 3,
|
||||
"primary": false,
|
||||
"link_row_table_id": 16,
|
||||
"link_row_related_field_id": 92
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 20,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 134,
|
||||
"field_id": 78,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 132,
|
||||
"field_id": 79,
|
||||
"width": 400,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 152,
|
||||
"field_id": 91,
|
||||
"width": 256,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 160,
|
||||
"field_id": 93,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_78": "Steve's Birthday",
|
||||
"field_79": null,
|
||||
"field_91": [
|
||||
1,
|
||||
5,
|
||||
4
|
||||
],
|
||||
"field_93": []
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_78": "Work",
|
||||
"field_79": null,
|
||||
"field_91": [
|
||||
3
|
||||
],
|
||||
"field_93": [
|
||||
2,
|
||||
1
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_78": "Health",
|
||||
"field_79": "",
|
||||
"field_91": [],
|
||||
"field_93": []
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_78": "Financial",
|
||||
"field_79": "",
|
||||
"field_91": [
|
||||
2
|
||||
],
|
||||
"field_93": []
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"order": "5.00000000000000000000",
|
||||
"field_78": "Garden Cleanup",
|
||||
"field_79": "",
|
||||
"field_91": [
|
||||
6
|
||||
],
|
||||
"field_93": []
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"order": "6.00000000000000000000",
|
||||
"field_78": "Personal",
|
||||
"field_79": "",
|
||||
"field_91": [
|
||||
7
|
||||
],
|
||||
"field_93": [
|
||||
3,
|
||||
4
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 15,
|
||||
"name": "Shopping",
|
||||
"order": 3,
|
||||
"fields": [
|
||||
{
|
||||
"id": 81,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 82,
|
||||
"type": "long_text",
|
||||
"name": "Notes",
|
||||
"order": 1,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 89,
|
||||
"type": "link_row",
|
||||
"name": "Todos",
|
||||
"order": 3,
|
||||
"primary": false,
|
||||
"link_row_table_id": 13,
|
||||
"link_row_related_field_id": 88
|
||||
},
|
||||
{
|
||||
"id": 286,
|
||||
"type": "single_select",
|
||||
"name": "Shop",
|
||||
"order": 4,
|
||||
"primary": false,
|
||||
"select_options": [
|
||||
{
|
||||
"id": 42,
|
||||
"value": "Grocery Store",
|
||||
"color": "light-red",
|
||||
"order": 0
|
||||
},
|
||||
{
|
||||
"id": 43,
|
||||
"value": "Petrol Station",
|
||||
"color": "blue",
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 44,
|
||||
"value": "Hardware Store",
|
||||
"color": "green",
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 45,
|
||||
"value": "Flower Shop",
|
||||
"color": "light-gray",
|
||||
"order": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 21,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 137,
|
||||
"field_id": 81,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 135,
|
||||
"field_id": 82,
|
||||
"width": 400,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 147,
|
||||
"field_id": 89,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 414,
|
||||
"field_id": 286,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_81": "Cheese",
|
||||
"field_82": null,
|
||||
"field_89": [
|
||||
1
|
||||
],
|
||||
"field_286": 42
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_81": "Onions",
|
||||
"field_82": null,
|
||||
"field_89": [
|
||||
1
|
||||
],
|
||||
"field_286": 42
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_81": "Petrol",
|
||||
"field_82": "",
|
||||
"field_89": [
|
||||
1
|
||||
],
|
||||
"field_286": 43
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_81": "Garden Tools",
|
||||
"field_82": "",
|
||||
"field_89": [
|
||||
6
|
||||
],
|
||||
"field_286": 44
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"order": "5.00000000000000000000",
|
||||
"field_81": "Roses",
|
||||
"field_82": "",
|
||||
"field_89": [
|
||||
7
|
||||
],
|
||||
"field_286": 45
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 16,
|
||||
"name": "Contacts",
|
||||
"order": 4,
|
||||
"fields": [
|
||||
{
|
||||
"id": 84,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 85,
|
||||
"type": "long_text",
|
||||
"name": "Notes",
|
||||
"order": 1,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 86,
|
||||
"type": "phone_number",
|
||||
"name": "Mobile Phone",
|
||||
"order": 2,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 92,
|
||||
"type": "link_row",
|
||||
"name": "Categories",
|
||||
"order": 3,
|
||||
"primary": false,
|
||||
"link_row_table_id": 14,
|
||||
"link_row_related_field_id": 93
|
||||
},
|
||||
{
|
||||
"id": 94,
|
||||
"type": "email",
|
||||
"name": "Email",
|
||||
"order": 4,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 97,
|
||||
"type": "link_row",
|
||||
"name": "Todos",
|
||||
"order": 5,
|
||||
"primary": false,
|
||||
"link_row_table_id": 13,
|
||||
"link_row_related_field_id": 96
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 22,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [
|
||||
{
|
||||
"id": 11,
|
||||
"field_id": 84,
|
||||
"order": "ASC"
|
||||
}
|
||||
],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 140,
|
||||
"field_id": 84,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 5
|
||||
},
|
||||
{
|
||||
"id": 138,
|
||||
"field_id": 85,
|
||||
"width": 400,
|
||||
"hidden": false,
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"id": 139,
|
||||
"field_id": 86,
|
||||
"width": 149,
|
||||
"hidden": false,
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"id": 155,
|
||||
"field_id": 92,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 4
|
||||
},
|
||||
{
|
||||
"id": 156,
|
||||
"field_id": 94,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 3
|
||||
},
|
||||
{
|
||||
"id": 159,
|
||||
"field_id": 97,
|
||||
"width": 310,
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_84": "Steve",
|
||||
"field_85": null,
|
||||
"field_86": "048580394",
|
||||
"field_92": [
|
||||
2
|
||||
],
|
||||
"field_94": "steve.yoshep@gmail.com",
|
||||
"field_97": [
|
||||
5
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_84": "Mary",
|
||||
"field_85": null,
|
||||
"field_86": "442343243",
|
||||
"field_92": [
|
||||
2
|
||||
],
|
||||
"field_94": "mary.hannah@gmail.com",
|
||||
"field_97": [
|
||||
4,
|
||||
5
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_84": "Charlie",
|
||||
"field_85": "Does Charlie have an email?",
|
||||
"field_86": "+445632345",
|
||||
"field_92": [
|
||||
6
|
||||
],
|
||||
"field_94": "",
|
||||
"field_97": []
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_84": "Mum",
|
||||
"field_85": "",
|
||||
"field_86": "+3234045934",
|
||||
"field_92": [
|
||||
6
|
||||
],
|
||||
"field_94": "margret.taylor@blueyonder.co.uk",
|
||||
"field_97": [
|
||||
7
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
1246
backend/templates/project-management.json
Normal file
1246
backend/templates/project-management.json
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,8 @@
|
|||
import pytest
|
||||
|
||||
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
from django.shortcuts import reverse
|
||||
|
||||
|
@ -69,6 +71,15 @@ def test_list_applications(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()['error'] == 'ERROR_GROUP_DOES_NOT_EXIST'
|
||||
|
||||
url = reverse('api:applications:list', kwargs={'group_id': group_1.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
data_fixture.create_template(group=group_1)
|
||||
url = reverse('api:applications:list', kwargs={'group_id': group_1.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_application(api_client, data_fixture):
|
||||
|
@ -108,6 +119,10 @@ def test_create_application(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_USER_NOT_IN_GROUP'
|
||||
|
||||
url = reverse('api:applications:list', kwargs={'group_id': group_2.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:applications:list', kwargs={'group_id': group.id}),
|
||||
{
|
||||
|
|
134
backend/tests/baserow/api/templates/test_templates_views.py
Normal file
134
backend/tests/baserow/api/templates/test_templates_views.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
import pytest
|
||||
import os
|
||||
|
||||
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
||||
|
||||
from django.shortcuts import reverse
|
||||
from django.conf import settings
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import Template, Application
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_list_templates(api_client, data_fixture):
|
||||
category_1 = data_fixture.create_template_category(name='Cat 1')
|
||||
category_3 = data_fixture.create_template_category(name='Cat 3')
|
||||
category_2 = data_fixture.create_template_category(name='Cat 2')
|
||||
|
||||
template_1 = data_fixture.create_template(
|
||||
name='Template 1',
|
||||
icon='document',
|
||||
category=category_1,
|
||||
keywords='test1,test2',
|
||||
slug='project-management'
|
||||
)
|
||||
template_2 = data_fixture.create_template(
|
||||
name='Template 2',
|
||||
icon='document',
|
||||
category=category_2,
|
||||
)
|
||||
template_3 = data_fixture.create_template(
|
||||
name='Template 3',
|
||||
icon='document',
|
||||
categories=[category_2, category_3]
|
||||
)
|
||||
|
||||
response = api_client.get(reverse('api:templates:list'))
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 3
|
||||
assert response_json[0]['id'] == category_1.id
|
||||
assert response_json[0]['name'] == 'Cat 1'
|
||||
assert len(response_json[0]['templates']) == 1
|
||||
assert response_json[0]['templates'][0]['id'] == template_1.id
|
||||
assert response_json[0]['templates'][0]['name'] == template_1.name
|
||||
assert response_json[0]['templates'][0]['icon'] == template_1.icon
|
||||
assert response_json[0]['templates'][0]['keywords'] == 'test1,test2'
|
||||
assert response_json[0]['templates'][0]['group_id'] == template_1.group_id
|
||||
assert response_json[0]['templates'][0]['is_default'] is True
|
||||
assert len(response_json[1]['templates']) == 2
|
||||
assert response_json[1]['templates'][0]['id'] == template_2.id
|
||||
assert response_json[1]['templates'][0]['is_default'] is False
|
||||
assert response_json[1]['templates'][1]['id'] == template_3.id
|
||||
assert response_json[1]['templates'][1]['is_default'] is False
|
||||
assert len(response_json[2]['templates']) == 1
|
||||
assert response_json[2]['templates'][0]['id'] == template_3.id
|
||||
assert response_json[2]['templates'][0]['is_default'] is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_install_template(api_client, data_fixture):
|
||||
old_templates = settings.APPLICATION_TEMPLATES_DIR
|
||||
settings.APPLICATION_TEMPLATES_DIR = os.path.join(
|
||||
settings.BASE_DIR,
|
||||
'../../../tests/templates'
|
||||
)
|
||||
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
group_2 = data_fixture.create_group()
|
||||
|
||||
handler = CoreHandler()
|
||||
handler.sync_templates()
|
||||
|
||||
template_2 = data_fixture.create_template(slug='does-not-exist')
|
||||
template = Template.objects.get(slug='example-template')
|
||||
|
||||
response = api_client.get(
|
||||
reverse('api:templates:install', kwargs={
|
||||
'group_id': group.id,
|
||||
'template_id': template_2.id
|
||||
}),
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_TEMPLATE_FILE_DOES_NOT_EXIST'
|
||||
|
||||
response = api_client.get(
|
||||
reverse('api:templates:install', kwargs={
|
||||
'group_id': group_2.id,
|
||||
'template_id': template.id
|
||||
}),
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_USER_NOT_IN_GROUP'
|
||||
|
||||
response = api_client.get(
|
||||
reverse('api:templates:install', kwargs={
|
||||
'group_id': 0,
|
||||
'template_id': template.id
|
||||
}),
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()['error'] == 'ERROR_GROUP_DOES_NOT_EXIST'
|
||||
|
||||
response = api_client.get(
|
||||
reverse('api:templates:install', kwargs={
|
||||
'group_id': group.id,
|
||||
'template_id': 0
|
||||
}),
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()['error'] == 'ERROR_TEMPLATE_DOES_NOT_EXIST'
|
||||
|
||||
response = api_client.get(
|
||||
reverse('api:templates:install', kwargs={
|
||||
'group_id': group.id,
|
||||
'template_id': template.id
|
||||
}),
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]['group']['id'] == group.id
|
||||
application = Application.objects.all().order_by('id').last()
|
||||
assert response_json[0]['id'] == application.id
|
||||
assert response_json[0]['group']['id'] == application.group_id
|
||||
|
||||
settings.APPLICATION_TEMPLATES_DIR = old_templates
|
|
@ -250,7 +250,7 @@ def test_allowed_includes():
|
|||
|
||||
request = Request(factory.get(
|
||||
'/some-page/',
|
||||
data={'includes': 'test_1,test_2'},
|
||||
data={'include': 'test_1,test_2'},
|
||||
))
|
||||
|
||||
@allowed_includes('test_1', 'test_3')
|
||||
|
@ -262,7 +262,7 @@ def test_allowed_includes():
|
|||
|
||||
request = Request(factory.get(
|
||||
'/some-page/',
|
||||
data={'includes': 'test_3'},
|
||||
data={'include': 'test_3'},
|
||||
))
|
||||
|
||||
@allowed_includes('test_1', 'test_3')
|
||||
|
|
|
@ -167,12 +167,21 @@ def test_upload_file_via_url(api_client, data_fixture, tmpdir):
|
|||
|
||||
response = api_client.post(
|
||||
reverse('api:user_files:upload_via_url'),
|
||||
data={'url': 'http://localhost/test2.txt'},
|
||||
data={'url': 'https://baserow.io/test2.txt'},
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_FILE_URL_COULD_NOT_BE_REACHED'
|
||||
|
||||
# Only the http and https protocol are allowed.
|
||||
response = api_client.post(
|
||||
reverse('api:user_files:upload_via_url'),
|
||||
data={'url': 'ftp://baserow.io/test2.txt'},
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_INVALID_FILE_URL'
|
||||
|
||||
responses.add(
|
||||
responses.GET,
|
||||
'http://localhost/test.txt',
|
||||
|
@ -215,3 +224,19 @@ def test_upload_file_via_url(api_client, data_fixture, tmpdir):
|
|||
user_file = UserFile.objects.all().last()
|
||||
file_path = tmpdir.join('user_files', user_file.name)
|
||||
assert file_path.isfile()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_upload_file_via_url_within_private_network(api_client, data_fixture, tmpdir):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email='test@test.nl', password='password', first_name='Test1'
|
||||
)
|
||||
|
||||
# Could not be reached because it is an internal private URL.
|
||||
response = api_client.post(
|
||||
reverse('api:user_files:upload_via_url'),
|
||||
data={'url': 'https://localhost/test2.txt'},
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_FILE_URL_COULD_NOT_BE_REACHED'
|
||||
|
|
|
@ -31,7 +31,7 @@ def test_create_user(client, data_fixture):
|
|||
assert 'password' not in response_json['user']
|
||||
assert response_json['user']['username'] == 'test@test.nl'
|
||||
assert response_json['user']['first_name'] == 'Test1'
|
||||
assert response_json['user']['is_staff'] is False
|
||||
assert response_json['user']['is_staff'] is True
|
||||
|
||||
response_failed = client.post(reverse('api:user:index'), {
|
||||
'name': 'Test1',
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import pytest
|
||||
|
||||
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
from django.shortcuts import reverse
|
||||
|
||||
|
@ -61,6 +63,15 @@ def test_list_fields(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()['error'] == 'ERROR_TABLE_DOES_NOT_EXIST'
|
||||
|
||||
url = reverse('api:database:fields:list', kwargs={'table_id': table_1.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
data_fixture.create_template(group=table_1.database.group)
|
||||
url = reverse('api:database:fields:list', kwargs={'table_id': table_1.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_field(api_client, data_fixture):
|
||||
|
@ -100,6 +111,10 @@ def test_create_field(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_USER_NOT_IN_GROUP'
|
||||
|
||||
url = reverse('api:database:fields:list', kwargs={'table_id': table_2.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:database:fields:list', kwargs={'table_id': table.id}),
|
||||
{'name': 'Test 1', 'type': 'text', 'text_default': 'default!'},
|
||||
|
|
|
@ -10,7 +10,8 @@ from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_400_BAD
|
|||
from django.shortcuts import reverse
|
||||
|
||||
from baserow.contrib.database.fields.models import (
|
||||
LongTextField, URLField, DateField, EmailField, FileField, NumberField
|
||||
LongTextField, URLField, DateField, EmailField, FileField, NumberField,
|
||||
PhoneNumberField
|
||||
)
|
||||
|
||||
|
||||
|
@ -842,3 +843,96 @@ def test_number_field_type(api_client, data_fixture):
|
|||
response_json['detail'][f'field_{positive_int_field_id}'][0]['code'] ==
|
||||
'max_digits'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_phone_number_field_type(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email='test@test.nl', password='password', first_name='Test1')
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:database:fields:list', kwargs={'table_id': table.id}),
|
||||
{'name': 'phone', 'type': 'phone_number'},
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json['type'] == 'phone_number'
|
||||
assert PhoneNumberField.objects.all().count() == 1
|
||||
field_id = response_json['id']
|
||||
|
||||
response = api_client.patch(
|
||||
reverse('api:database:fields:item', kwargs={'field_id': field_id}),
|
||||
{'name': 'Phone'},
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
expected_phone_number = '+44761198672'
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
|
||||
{
|
||||
f'field_{field_id}': expected_phone_number
|
||||
},
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json[f'field_{field_id}'] == expected_phone_number
|
||||
|
||||
model = table.get_model(attribute_names=True)
|
||||
row = model.objects.all().last()
|
||||
assert row.phone == expected_phone_number
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
|
||||
{
|
||||
f'field_{field_id}': ''
|
||||
},
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json[f'field_{field_id}'] == ''
|
||||
|
||||
row = model.objects.all().last()
|
||||
assert row.phone == ''
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
|
||||
{
|
||||
f'field_{field_id}': None
|
||||
},
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json[f'field_{field_id}'] == ''
|
||||
|
||||
row = model.objects.all().last()
|
||||
assert row.phone == ''
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:database:rows:list', kwargs={'table_id': table.id}),
|
||||
{},
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json[f'field_{field_id}'] == ''
|
||||
|
||||
row = model.objects.all().last()
|
||||
assert row.phone == ''
|
||||
|
||||
email = reverse('api:database:fields:item', kwargs={'field_id': field_id})
|
||||
response = api_client.delete(email, HTTP_AUTHORIZATION=f'JWT {token}')
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
assert PhoneNumberField.objects.all().count() == 0
|
||||
|
|
|
@ -21,6 +21,8 @@ def test_list_rows(api_client, data_fixture):
|
|||
field_1 = data_fixture.create_text_field(name='Name', table=table, primary=True)
|
||||
field_2 = data_fixture.create_number_field(name='Price', table=table)
|
||||
field_3 = data_fixture.create_text_field()
|
||||
field_4 = data_fixture.create_boolean_field(
|
||||
name='InStock', table=table)
|
||||
|
||||
token = TokenHandler().create_token(user, table.database.group, 'Good')
|
||||
wrong_token = TokenHandler().create_token(user, table.database.group, 'Wrong')
|
||||
|
@ -169,7 +171,7 @@ def test_list_rows(api_client, data_fixture):
|
|||
|
||||
url = reverse('api:database:rows:list', kwargs={'table_id': table.id})
|
||||
response = api_client.get(
|
||||
f'{url}?search=1',
|
||||
f'{url}?search=4',
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {jwt_token}'
|
||||
)
|
||||
|
@ -177,7 +179,7 @@ def test_list_rows(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json['count'] == 1
|
||||
assert len(response_json['results']) == 1
|
||||
assert response_json['results'][0]['id'] == row_1.id
|
||||
assert response_json['results'][0]['id'] == row_4.id
|
||||
|
||||
url = reverse('api:database:rows:list', kwargs={'table_id': table.id})
|
||||
response = api_client.get(
|
||||
|
@ -262,7 +264,7 @@ def test_list_rows(api_client, data_fixture):
|
|||
assert response_json['error'] == 'ERROR_FILTER_FIELD_NOT_FOUND'
|
||||
|
||||
url = reverse('api:database:rows:list', kwargs={'table_id': table.id})
|
||||
get_params = [f'filter__field_{field_2.id}__contains=100']
|
||||
get_params = [f'filter__field_{field_4.id}__contains=100']
|
||||
response = api_client.get(
|
||||
f'{url}?{"&".join(get_params)}',
|
||||
format='json',
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import pytest
|
||||
|
||||
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
from django.shortcuts import reverse
|
||||
|
||||
|
@ -190,6 +192,15 @@ def test_list_rows(api_client, data_fixture):
|
|||
assert not response_json['next']
|
||||
assert len(response_json['results']) == 0
|
||||
|
||||
url = reverse('api:database:views:grid:list', kwargs={'view_id': grid.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
data_fixture.create_template(group=grid.table.database.group)
|
||||
url = reverse('api:database:views:grid:list', kwargs={'view_id': grid.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_list_rows_include_field_options(api_client, data_fixture):
|
||||
|
@ -218,7 +229,7 @@ def test_list_rows_include_field_options(api_client, data_fixture):
|
|||
url = reverse('api:database:views:grid:list', kwargs={'view_id': grid.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
{'includes': 'field_options'},
|
||||
{'include': 'field_options'},
|
||||
**{'HTTP_AUTHORIZATION': f'JWT {token}'}
|
||||
)
|
||||
response_json = response.json()
|
||||
|
@ -226,8 +237,11 @@ def test_list_rows_include_field_options(api_client, data_fixture):
|
|||
assert len(response_json['field_options']) == 2
|
||||
assert response_json['field_options'][str(text_field.id)]['width'] == 200
|
||||
assert response_json['field_options'][str(text_field.id)]['hidden'] is False
|
||||
assert response_json['field_options'][str(text_field.id)]['order'] == 32767
|
||||
assert response_json['field_options'][str(number_field.id)]['width'] == 200
|
||||
assert response_json['field_options'][str(number_field.id)]['hidden'] is False
|
||||
assert response_json['field_options'][str(number_field.id)]['order'] == 32767
|
||||
assert 'filters_disabled' not in response_json
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -271,6 +285,10 @@ def test_list_filtered_rows(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
|
||||
url = reverse('api:database:views:grid:list', kwargs={'view_id': grid.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
url = reverse('api:database:views:grid:list', kwargs={'view_id': grid.id})
|
||||
response = api_client.post(
|
||||
url,
|
||||
|
@ -385,16 +403,20 @@ def test_patch_grid_view(api_client, data_fixture):
|
|||
assert len(response_json['field_options']) == 2
|
||||
assert response_json['field_options'][str(text_field.id)]['width'] == 300
|
||||
assert response_json['field_options'][str(text_field.id)]['hidden'] is True
|
||||
assert response_json['field_options'][str(text_field.id)]['order'] == 32767
|
||||
assert response_json['field_options'][str(number_field.id)]['width'] == 200
|
||||
assert response_json['field_options'][str(number_field.id)]['hidden'] is False
|
||||
assert response_json['field_options'][str(number_field.id)]['order'] == 32767
|
||||
options = grid.get_field_options()
|
||||
assert len(options) == 2
|
||||
assert options[0].field_id == text_field.id
|
||||
assert options[0].width == 300
|
||||
assert options[0].hidden is True
|
||||
assert options[0].order == 32767
|
||||
assert options[1].field_id == number_field.id
|
||||
assert options[1].width == 200
|
||||
assert options[1].hidden is False
|
||||
assert options[1].order == 32767
|
||||
|
||||
url = reverse('api:database:views:grid:list', kwargs={'view_id': grid.id})
|
||||
response = api_client.patch(
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import pytest
|
||||
|
||||
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
from django.shortcuts import reverse
|
||||
|
||||
|
@ -67,6 +69,15 @@ def test_list_views(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()['error'] == 'ERROR_TABLE_DOES_NOT_EXIST'
|
||||
|
||||
url = reverse('api:database:views:list', kwargs={'table_id': table_1.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
data_fixture.create_template(group=table_1.database.group)
|
||||
url = reverse('api:database:views:list', kwargs={'table_id': table_1.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_list_views_including_filters(api_client, data_fixture):
|
||||
|
@ -98,7 +109,7 @@ def test_list_views_including_filters(api_client, data_fixture):
|
|||
assert 'filters' not in response_json[1]
|
||||
|
||||
response = api_client.get(
|
||||
'{}?includes=filters'.format(reverse(
|
||||
'{}?include=filters'.format(reverse(
|
||||
'api:database:views:list',
|
||||
kwargs={'table_id': table_1.id}
|
||||
)),
|
||||
|
@ -148,7 +159,7 @@ def test_list_views_including_sortings(api_client, data_fixture):
|
|||
assert 'sortings' not in response_json[1]
|
||||
|
||||
response = api_client.get(
|
||||
'{}?includes=sortings'.format(reverse(
|
||||
'{}?include=sortings'.format(reverse(
|
||||
'api:database:views:list',
|
||||
kwargs={'table_id': table_1.id}
|
||||
)),
|
||||
|
@ -205,6 +216,10 @@ def test_create_view(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()['error'] == 'ERROR_USER_NOT_IN_GROUP'
|
||||
|
||||
url = reverse('api:database:views:list', kwargs={'table_id': table_2.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
response = api_client.post(
|
||||
reverse('api:database:views:list', kwargs={'table_id': table.id}),
|
||||
{
|
||||
|
@ -232,7 +247,7 @@ def test_create_view(api_client, data_fixture):
|
|||
assert 'sortings' not in response_json
|
||||
|
||||
response = api_client.post(
|
||||
'{}?includes=filters,sortings'.format(
|
||||
'{}?include=filters,sortings'.format(
|
||||
reverse('api:database:views:list', kwargs={'table_id': table.id})
|
||||
),
|
||||
{
|
||||
|
@ -318,7 +333,7 @@ def test_get_view(api_client, data_fixture):
|
|||
|
||||
url = reverse('api:database:views:item', kwargs={'view_id': view.id})
|
||||
response = api_client.get(
|
||||
'{}?includes=filters,sortings'.format(url),
|
||||
'{}?include=filters,sortings'.format(url),
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
)
|
||||
|
@ -417,7 +432,7 @@ def test_update_view(api_client, data_fixture):
|
|||
filter_1 = data_fixture.create_view_filter(view=view)
|
||||
url = reverse('api:database:views:item', kwargs={'view_id': view.id})
|
||||
response = api_client.patch(
|
||||
'{}?includes=filters,sortings'.format(url),
|
||||
'{}?include=filters,sortings'.format(url),
|
||||
{'filter_type': 'AND'},
|
||||
format='json',
|
||||
HTTP_AUTHORIZATION=f'JWT {token}'
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import pytest
|
||||
|
||||
from django.db import connection
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.dummy.base import DatabaseWrapper as DummyDatabaseWrapper
|
||||
|
@ -26,6 +25,7 @@ def test_lenient_schema_editor():
|
|||
assert isinstance(schema_editor, BaseDatabaseSchemaEditor)
|
||||
assert schema_editor.alter_column_prepare_old_value == ''
|
||||
assert schema_editor.alter_column_prepare_new_value == ''
|
||||
assert not schema_editor.force_alter_column
|
||||
assert connection.SchemaEditorClass != PostgresqlDatabaseSchemaEditor
|
||||
|
||||
assert connection.SchemaEditorClass == PostgresqlDatabaseSchemaEditor
|
||||
|
@ -33,7 +33,8 @@ def test_lenient_schema_editor():
|
|||
with lenient_schema_editor(
|
||||
connection,
|
||||
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');",
|
||||
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');"
|
||||
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');",
|
||||
True
|
||||
) as schema_editor:
|
||||
assert schema_editor.alter_column_prepare_old_value == (
|
||||
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');"
|
||||
|
@ -41,3 +42,4 @@ def test_lenient_schema_editor():
|
|||
assert schema_editor.alter_column_prepare_new_value == (
|
||||
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');"
|
||||
)
|
||||
assert schema_editor.force_alter_column
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
import pytest
|
||||
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_alter_boolean_field_column_type(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
handler = FieldHandler()
|
||||
field = handler.update_field(user=user, field=field, name='Text field')
|
||||
|
||||
model = table.get_model()
|
||||
mapping = {
|
||||
'1': True,
|
||||
't': True,
|
||||
'y': True,
|
||||
'yes': True,
|
||||
'on': True,
|
||||
'YES': True,
|
||||
|
||||
'': False,
|
||||
'f': False,
|
||||
'n': False,
|
||||
'false': False,
|
||||
'off': False,
|
||||
'Random text': False,
|
||||
}
|
||||
|
||||
for value in mapping.keys():
|
||||
model.objects.create(**{f'field_{field.id}': value})
|
||||
|
||||
# Change the field type to a number and test if the values have been changed.
|
||||
field = handler.update_field(user=user, field=field, new_type_name='boolean')
|
||||
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
|
||||
for index, value in enumerate(mapping.values()):
|
||||
assert getattr(rows[index], f'field_{field.id}') == value
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_set_export_serialized_value_boolean_field(data_fixture):
|
||||
table = data_fixture.create_database_table()
|
||||
boolean_field = data_fixture.create_boolean_field(table=table)
|
||||
boolean_field_name = f'field_{boolean_field.id}'
|
||||
boolean_field_type = field_type_registry.get_by_model(boolean_field)
|
||||
|
||||
model = table.get_model()
|
||||
row_1 = model.objects.create()
|
||||
row_2 = model.objects.create(**{f'field_{boolean_field.id}': True})
|
||||
row_3 = model.objects.create(**{f'field_{boolean_field.id}': False})
|
||||
|
||||
row_1.refresh_from_db()
|
||||
row_2.refresh_from_db()
|
||||
row_3.refresh_from_db()
|
||||
|
||||
old_row_1_value = getattr(row_1, boolean_field_name)
|
||||
old_row_2_value = getattr(row_2, boolean_field_name)
|
||||
old_row_3_value = getattr(row_3, boolean_field_name)
|
||||
|
||||
boolean_field_type.set_import_serialized_value(
|
||||
row_1,
|
||||
boolean_field_name,
|
||||
boolean_field_type.get_export_serialized_value(row_1, boolean_field_name, {}),
|
||||
{}
|
||||
)
|
||||
boolean_field_type.set_import_serialized_value(
|
||||
row_2,
|
||||
boolean_field_name,
|
||||
boolean_field_type.get_export_serialized_value(row_2, boolean_field_name, {}),
|
||||
{}
|
||||
)
|
||||
boolean_field_type.set_import_serialized_value(
|
||||
row_3,
|
||||
boolean_field_name,
|
||||
boolean_field_type.get_export_serialized_value(row_3, boolean_field_name, {}),
|
||||
{}
|
||||
)
|
||||
|
||||
row_1.save()
|
||||
row_2.save()
|
||||
row_3.save()
|
||||
|
||||
row_1.refresh_from_db()
|
||||
row_2.refresh_from_db()
|
||||
row_3.refresh_from_db()
|
||||
|
||||
assert old_row_1_value == getattr(row_1, boolean_field_name)
|
||||
assert old_row_2_value == getattr(row_2, boolean_field_name)
|
||||
assert old_row_3_value == getattr(row_3, boolean_field_name)
|
|
@ -3,11 +3,12 @@ from pytz import timezone
|
|||
from datetime import date
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.timezone import make_aware, datetime
|
||||
from django.utils.timezone import make_aware, datetime, utc
|
||||
|
||||
from baserow.contrib.database.fields.field_types import DateFieldType
|
||||
from baserow.contrib.database.fields.models import DateField
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
|
||||
|
||||
|
@ -361,3 +362,159 @@ def test_converting_date_field_value(data_fixture):
|
|||
assert getattr(rows[0], f'field_{date_field_iso_24.id}') == '2021-07-22 12:45'
|
||||
|
||||
assert getattr(rows[2], f'field_{date_field_eu_12.id}') is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_negative_date_field_value(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
date_field = data_fixture.create_text_field(table=table)
|
||||
datetime_field = data_fixture.create_text_field(table=table)
|
||||
|
||||
model = table.get_model()
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': '',
|
||||
f'field_{datetime_field.id}': '',
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': 'INVALID',
|
||||
f'field_{datetime_field.id}': 'INVALID',
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': ' ',
|
||||
f'field_{datetime_field.id}': ' ',
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': '0',
|
||||
f'field_{datetime_field.id}': '0',
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': '-0',
|
||||
f'field_{datetime_field.id}': '-0',
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': '00000',
|
||||
f'field_{datetime_field.id}': '00000',
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': None,
|
||||
f'field_{datetime_field.id}': None,
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': '2010-02-03',
|
||||
f'field_{datetime_field.id}': '2010-02-03 12:30',
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{date_field.id}': '28/01/2012',
|
||||
f'field_{datetime_field.id}': '28/01/2012 12:30',
|
||||
})
|
||||
|
||||
date_field = FieldHandler().update_field(
|
||||
user, date_field, new_type_name='date'
|
||||
)
|
||||
datetime_field = FieldHandler().update_field(
|
||||
user, datetime_field, new_type_name='date', date_include_time=True
|
||||
)
|
||||
|
||||
model = table.get_model()
|
||||
results = model.objects.all()
|
||||
|
||||
assert getattr(results[0], f'field_{date_field.id}') is None
|
||||
assert getattr(results[0], f'field_{datetime_field.id}') is None
|
||||
assert getattr(results[1], f'field_{date_field.id}') is None
|
||||
assert getattr(results[1], f'field_{datetime_field.id}') is None
|
||||
assert getattr(results[2], f'field_{date_field.id}') is None
|
||||
assert getattr(results[2], f'field_{datetime_field.id}') is None
|
||||
assert getattr(results[3], f'field_{date_field.id}') is None
|
||||
assert getattr(results[3], f'field_{datetime_field.id}') is None
|
||||
assert getattr(results[4], f'field_{date_field.id}') is None
|
||||
assert getattr(results[4], f'field_{datetime_field.id}') is None
|
||||
assert getattr(results[5], f'field_{date_field.id}') == date(1, 1, 1)
|
||||
assert getattr(results[5], f'field_{datetime_field.id}') == (
|
||||
datetime(1, 1, 1, tzinfo=timezone('utc'))
|
||||
)
|
||||
assert getattr(results[6], f'field_{date_field.id}') is None
|
||||
assert getattr(results[6], f'field_{datetime_field.id}') is None
|
||||
assert getattr(results[7], f'field_{date_field.id}') == date(2010, 2, 3)
|
||||
assert getattr(results[7], f'field_{datetime_field.id}') == (
|
||||
datetime(2010, 2, 3, 12, 30, 0, tzinfo=timezone('utc'))
|
||||
)
|
||||
assert getattr(results[8], f'field_{date_field.id}') == date(2012, 1, 28)
|
||||
assert getattr(results[8], f'field_{datetime_field.id}') == (
|
||||
datetime(2012, 1, 28, 12, 30, 0, tzinfo=timezone('utc'))
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_import_export_date_field(data_fixture):
|
||||
date_field = data_fixture.create_date_field()
|
||||
date_field_type = field_type_registry.get_by_model(date_field)
|
||||
number_serialized = date_field_type.export_serialized(date_field)
|
||||
number_field_imported = date_field_type.import_serialized(
|
||||
date_field.table,
|
||||
number_serialized,
|
||||
{}
|
||||
)
|
||||
assert date_field.date_format == number_field_imported.date_format
|
||||
assert date_field.date_include_time == number_field_imported.date_include_time
|
||||
assert date_field.date_time_format == number_field_imported.date_time_format
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_set_export_serialized_value_date_field(data_fixture):
|
||||
table = data_fixture.create_database_table()
|
||||
date_field = data_fixture.create_date_field(table=table)
|
||||
datetime_field = data_fixture.create_date_field(table=table,
|
||||
date_include_time=True)
|
||||
|
||||
date_field_name = f'field_{date_field.id}'
|
||||
datetime_field_name = f'field_{datetime_field.id}'
|
||||
date_field_type = field_type_registry.get_by_model(date_field)
|
||||
|
||||
model = table.get_model()
|
||||
row_1 = model.objects.create()
|
||||
row_2 = model.objects.create(**{
|
||||
f'field_{date_field.id}': '2010-02-03',
|
||||
f'field_{datetime_field.id}': make_aware(datetime(2010, 2, 3, 12, 30, 0), utc),
|
||||
})
|
||||
|
||||
row_1.refresh_from_db()
|
||||
row_2.refresh_from_db()
|
||||
|
||||
old_row_1_date = getattr(row_1, date_field_name)
|
||||
old_row_1_datetime = getattr(row_1, datetime_field_name)
|
||||
old_row_2_date = getattr(row_2, date_field_name)
|
||||
old_row_2_datetime = getattr(row_2, datetime_field_name)
|
||||
|
||||
date_field_type.set_import_serialized_value(
|
||||
row_1,
|
||||
date_field_name,
|
||||
date_field_type.get_export_serialized_value(row_1, date_field_name, {}),
|
||||
{}
|
||||
)
|
||||
date_field_type.set_import_serialized_value(
|
||||
row_1,
|
||||
datetime_field_name,
|
||||
date_field_type.get_export_serialized_value(row_1, datetime_field_name, {}),
|
||||
{}
|
||||
)
|
||||
date_field_type.set_import_serialized_value(
|
||||
row_2,
|
||||
date_field_name,
|
||||
date_field_type.get_export_serialized_value(row_2, date_field_name, {}),
|
||||
{}
|
||||
)
|
||||
date_field_type.set_import_serialized_value(
|
||||
row_2,
|
||||
datetime_field_name,
|
||||
date_field_type.get_export_serialized_value(row_2, datetime_field_name, {}),
|
||||
{}
|
||||
)
|
||||
|
||||
row_1.refresh_from_db()
|
||||
row_2.refresh_from_db()
|
||||
|
||||
assert old_row_1_date == getattr(row_1, date_field_name)
|
||||
assert old_row_1_datetime == getattr(row_1, datetime_field_name)
|
||||
assert old_row_2_date == getattr(row_2, date_field_name)
|
||||
assert old_row_2_datetime == getattr(row_2, datetime_field_name)
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
import pytest
|
||||
from django.db.models import Q
|
||||
from django.db.models.functions import Reverse, Upper
|
||||
|
||||
from baserow.contrib.database.fields.field_filters import FilterBuilder, \
|
||||
FILTER_TYPE_AND, FILTER_TYPE_OR, AnnotatedQ
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_building_filter_with_and_type_ands_all_provided_qs_together(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table, order=1, name='name')
|
||||
bool_field = data_fixture.create_boolean_field(table=table, order=2,
|
||||
name='is_active')
|
||||
|
||||
model = table.get_model()
|
||||
row_1 = model.objects.create(**{f'field_{text_field.id}': 'name',
|
||||
f'field_{bool_field.id}': True})
|
||||
model.objects.create(**{f'field_{text_field.id}': 'name',
|
||||
f'field_{bool_field.id}': False})
|
||||
|
||||
builder = FilterBuilder(filter_type=FILTER_TYPE_AND)
|
||||
builder.filter(Q(**{f'field_{text_field.id}': 'name'}))
|
||||
builder.filter(Q(**{f'field_{bool_field.id}': True}))
|
||||
|
||||
queryset = builder.apply_to_queryset(model.objects)
|
||||
assert queryset.count() == 1
|
||||
assert row_1 in queryset
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_building_filter_with_or_type_ors_all_provided_qs_together(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table, order=1, name='name')
|
||||
another_text_field = data_fixture.create_text_field(table=table, order=2,
|
||||
name='surname')
|
||||
|
||||
model = table.get_model()
|
||||
row_1 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'name',
|
||||
f'field_{another_text_field.id}': 'other'})
|
||||
row_2 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'not_name',
|
||||
f'field_{another_text_field.id}': 'extra'})
|
||||
model.objects.create(**{
|
||||
f'field_{text_field.id}': 'not_name',
|
||||
f'field_{another_text_field.id}': 'not_other'})
|
||||
|
||||
builder = FilterBuilder(filter_type=FILTER_TYPE_OR)
|
||||
builder.filter(Q(**{f'field_{text_field.id}': 'name'}))
|
||||
builder.filter(Q(**{f'field_{another_text_field.id}': 'extra'}))
|
||||
|
||||
queryset = builder.apply_to_queryset(model.objects)
|
||||
assert queryset.count() == 2
|
||||
assert row_1 in queryset
|
||||
assert row_2 in queryset
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_building_filter_with_annotated_qs_annotates_prior_to_filter(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table, order=1, name='name')
|
||||
another_text_field = data_fixture.create_text_field(table=table, order=2,
|
||||
name='surname')
|
||||
|
||||
model = table.get_model()
|
||||
row_1 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'name',
|
||||
f'field_{another_text_field.id}': 'other'})
|
||||
model.objects.create(**{
|
||||
f'field_{text_field.id}': 'eman',
|
||||
f'field_{another_text_field.id}': 'extra'})
|
||||
model.objects.create(**{
|
||||
f'field_{text_field.id}': 'not_name',
|
||||
f'field_{another_text_field.id}': 'not_other'})
|
||||
|
||||
builder = FilterBuilder(filter_type=FILTER_TYPE_OR)
|
||||
builder.filter(AnnotatedQ(annotation={
|
||||
'reversed_name': Reverse(f'field_{text_field.id}')},
|
||||
q={f'field_{text_field.id}': 'name'}))
|
||||
builder.filter(Q(**{f'reversed_name': 'eman'}))
|
||||
|
||||
queryset = builder.apply_to_queryset(model.objects)
|
||||
assert queryset.count() == 1
|
||||
assert row_1 in queryset
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_building_filter_with_many_annotated_qs_merges_the_annotations(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table, order=1, name='name')
|
||||
another_text_field = data_fixture.create_text_field(table=table, order=2,
|
||||
name='surname')
|
||||
|
||||
model = table.get_model()
|
||||
row_1 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'name',
|
||||
f'field_{another_text_field.id}': 'other'})
|
||||
model.objects.create(**{
|
||||
f'field_{text_field.id}': 'eman',
|
||||
f'field_{another_text_field.id}': 'extra'})
|
||||
model.objects.create(**{
|
||||
f'field_{text_field.id}': 'not_name',
|
||||
f'field_{another_text_field.id}': 'not_other'})
|
||||
|
||||
builder = FilterBuilder(filter_type=FILTER_TYPE_AND)
|
||||
builder.filter(AnnotatedQ(annotation={
|
||||
'reversed_name': Reverse(f'field_{text_field.id}')},
|
||||
q={f'field_{text_field.id}': 'name'}))
|
||||
builder.filter(AnnotatedQ(annotation={
|
||||
'upper_name': Upper(f'field_{text_field.id}')},
|
||||
q={f'field_{text_field.id}': 'name'}))
|
||||
builder.filter(Q(reversed_name='eman'))
|
||||
builder.filter(Q(upper_name='NAME'))
|
||||
|
||||
queryset = builder.apply_to_queryset(model.objects)
|
||||
assert queryset.count() == 1
|
||||
assert row_1 in queryset
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_can_invert_an_annotated_q(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table, order=1, name='name')
|
||||
another_text_field = data_fixture.create_text_field(table=table, order=2,
|
||||
name='surname')
|
||||
|
||||
model = table.get_model()
|
||||
model.objects.create(**{
|
||||
f'field_{text_field.id}': 'name',
|
||||
f'field_{another_text_field.id}': 'other'})
|
||||
row_2 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'eman',
|
||||
f'field_{another_text_field.id}': 'extra'})
|
||||
row_3 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'not_name',
|
||||
f'field_{another_text_field.id}': 'not_other'})
|
||||
|
||||
builder = FilterBuilder(filter_type=FILTER_TYPE_AND)
|
||||
q_to_invert = AnnotatedQ(
|
||||
annotation={'reversed_name': Reverse(f'field_{text_field.id}')},
|
||||
q={f'reversed_name': 'eman'})
|
||||
builder.filter(~q_to_invert)
|
||||
|
||||
queryset = builder.apply_to_queryset(model.objects)
|
||||
assert queryset.count() == 2
|
||||
assert row_2 in queryset
|
||||
assert row_3 in queryset
|
|
@ -1,18 +1,126 @@
|
|||
import pytest
|
||||
import itertools
|
||||
from datetime import date
|
||||
from decimal import Decimal
|
||||
from unittest.mock import patch
|
||||
|
||||
from baserow.core.exceptions import UserNotInGroupError
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import (
|
||||
Field, TextField, NumberField, BooleanField, SelectOption
|
||||
)
|
||||
from baserow.contrib.database.fields.field_types import TextFieldType
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
import pytest
|
||||
from django.db import models
|
||||
from faker import Faker
|
||||
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
FieldTypeDoesNotExist, PrimaryFieldAlreadyExists, CannotDeletePrimaryField,
|
||||
FieldDoesNotExist, IncompatiblePrimaryFieldTypeError, CannotChangeFieldType
|
||||
)
|
||||
from baserow.contrib.database.fields.field_types import TextFieldType, LongTextFieldType
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import (
|
||||
Field, TextField, NumberField, BooleanField, SelectOption, LongTextField,
|
||||
NUMBER_TYPE_CHOICES
|
||||
)
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.core.exceptions import UserNotInGroupError
|
||||
|
||||
|
||||
def dict_to_pairs(field_type_kwargs):
|
||||
pairs_dict = {}
|
||||
for name, options in field_type_kwargs.items():
|
||||
pairs_dict[name] = []
|
||||
if not isinstance(options, list):
|
||||
options = [options]
|
||||
for option in options:
|
||||
pairs_dict[name].append((name, option))
|
||||
return pairs_dict
|
||||
|
||||
|
||||
def construct_all_possible_kwargs(field_type_kwargs):
|
||||
pairs_dict = dict_to_pairs(field_type_kwargs)
|
||||
args = [dict(pairwise_args) for pairwise_args in itertools.product(
|
||||
*pairs_dict.values())]
|
||||
|
||||
return args
|
||||
|
||||
|
||||
# You must add --runslow to pytest to run this test, you can do this in intellij by
|
||||
# editing the run config for this test and adding --runslow to additional args.
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.slow
|
||||
def test_can_convert_between_all_fields(data_fixture):
|
||||
"""
|
||||
A nuclear option test turned off by default to help verify changes made to
|
||||
field conversions work in every possible conversion scenario. This test checks
|
||||
is possible to convert from every possible field to every other possible field
|
||||
including converting to themselves. It only checks that the conversion does not
|
||||
raise any exceptions.
|
||||
"""
|
||||
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
table = data_fixture.create_database_table(database=database, user=user)
|
||||
link_table = data_fixture.create_database_table(database=database, user=user)
|
||||
handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
fake = Faker()
|
||||
|
||||
model = table.get_model()
|
||||
cache = {}
|
||||
# Make a blank row to test empty field conversion also.
|
||||
model.objects.create(**{})
|
||||
second_row_with_values = model.objects.create(**{})
|
||||
|
||||
# Some baserow field types have multiple different 'modes' which result in
|
||||
# different conversion behaviour or entirely different database columns being
|
||||
# created. Here the kwargs which control these modes are enumerated so we can then
|
||||
# generate every possible type of conversion.
|
||||
extra_kwargs_for_type = {
|
||||
'date': {
|
||||
'date_include_time': [True, False],
|
||||
},
|
||||
'number': {
|
||||
'number_type': [number_type for number_type, _ in NUMBER_TYPE_CHOICES],
|
||||
'number_negative': [True, False],
|
||||
},
|
||||
'link_row': {
|
||||
'link_row_table': link_table
|
||||
}
|
||||
}
|
||||
|
||||
all_possible_kwargs_per_type = {}
|
||||
for field_type_name in field_type_registry.get_types():
|
||||
extra_kwargs = extra_kwargs_for_type.get(field_type_name, {})
|
||||
all_possible_kwargs = construct_all_possible_kwargs(extra_kwargs)
|
||||
all_possible_kwargs_per_type[field_type_name] = all_possible_kwargs
|
||||
|
||||
i = 1
|
||||
for field_type_name, all_possible_kwargs in all_possible_kwargs_per_type.items():
|
||||
for kwargs in all_possible_kwargs:
|
||||
for inner_field_type_name in field_type_registry.get_types():
|
||||
for inner_kwargs in all_possible_kwargs_per_type[inner_field_type_name]:
|
||||
field_type = field_type_registry.get(field_type_name)
|
||||
field_name = f'field_{i}'
|
||||
from_field = handler.create_field(
|
||||
user=user, table=table, type_name=field_type_name,
|
||||
name=field_name,
|
||||
**kwargs
|
||||
)
|
||||
random_value = field_type.random_value(
|
||||
from_field,
|
||||
fake,
|
||||
cache
|
||||
)
|
||||
if isinstance(random_value, date):
|
||||
# Faker produces subtypes of date / datetime which baserow
|
||||
# does not want, instead just convert to str.
|
||||
random_value = str(random_value)
|
||||
row_handler.update_row(user=user, table=table,
|
||||
row_id=second_row_with_values.id,
|
||||
values={
|
||||
f'field_{from_field.id}': random_value
|
||||
})
|
||||
handler.update_field(user=user, field=from_field,
|
||||
new_type_name=inner_field_type_name,
|
||||
**inner_kwargs)
|
||||
i = i + 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -269,6 +377,295 @@ def test_update_field_failing(data_fixture):
|
|||
assert TextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_field_when_underlying_sql_type_doesnt_change(data_fixture):
|
||||
class AlwaysLowercaseTextField(TextFieldType):
|
||||
type = 'lowercase_text'
|
||||
model_class = LongTextField
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
return '''p_in = (lower(p_in));'''
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
existing_text_field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
field_name = f'field_{existing_text_field.id}'
|
||||
row = model.objects.create(**{
|
||||
field_name: 'Test',
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with patch.dict(
|
||||
field_type_registry.registry,
|
||||
{'lowercase_text': AlwaysLowercaseTextField()}
|
||||
):
|
||||
handler.update_field(user=user,
|
||||
field=existing_text_field,
|
||||
new_type_name='lowercase_text')
|
||||
|
||||
row.refresh_from_db()
|
||||
assert getattr(row, field_name) == 'test'
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 0
|
||||
assert LongTextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_field_which_changes_its_underlying_type_will_have_alter_sql_run(data_fixture):
|
||||
class ReversingTextFieldUsingBothVarCharAndTextSqlTypes(TextFieldType):
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
return '''p_in = (reverse(p_in));'''
|
||||
|
||||
def get_model_field(self, instance, **kwargs):
|
||||
kwargs['null'] = True
|
||||
kwargs['blank'] = True
|
||||
if instance.text_default == 'use_other_sql_type':
|
||||
return models.TextField(**kwargs)
|
||||
else:
|
||||
return models.CharField(**kwargs)
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
existing_text_field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
field_name = f'field_{existing_text_field.id}'
|
||||
row = model.objects.create(**{
|
||||
field_name: 'Test',
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with patch.dict(
|
||||
field_type_registry.registry,
|
||||
{'text': ReversingTextFieldUsingBothVarCharAndTextSqlTypes()}
|
||||
):
|
||||
# Update to the same baserow type, but due to this fields implementation of
|
||||
# get_model_field this will alter the underlying database column from type
|
||||
# of varchar to text, which should make our reversing alter sql run.
|
||||
handler.update_field(user=user,
|
||||
field=existing_text_field,
|
||||
new_type_name='text',
|
||||
text_default='use_other_sql_type')
|
||||
|
||||
row.refresh_from_db()
|
||||
assert getattr(row, field_name) == 'tseT'
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_just_changing_a_fields_name_will_not_run_alter_sql(data_fixture):
|
||||
class AlwaysReverseOnUpdateField(TextFieldType):
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
return '''p_in = (reverse(p_in));'''
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
existing_text_field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
field_name = f'field_{existing_text_field.id}'
|
||||
row = model.objects.create(**{
|
||||
field_name: 'Test',
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with patch.dict(
|
||||
field_type_registry.registry,
|
||||
{'text': AlwaysReverseOnUpdateField()}
|
||||
):
|
||||
handler.update_field(user=user, field=existing_text_field,
|
||||
new_type_name='text', name='new_name')
|
||||
|
||||
row.refresh_from_db()
|
||||
# The field has not been reversed as just the name changed!
|
||||
assert getattr(row, field_name) == 'Test'
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_when_field_type_forces_same_type_alter_fields_alter_sql_is_run(data_fixture):
|
||||
class SameTypeAlwaysReverseOnUpdateField(TextFieldType):
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
return '''p_in = (reverse(p_in));'''
|
||||
|
||||
def force_same_type_alter_column(self, from_field, to_field):
|
||||
return True
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
existing_text_field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
field_name = f'field_{existing_text_field.id}'
|
||||
row = model.objects.create(**{
|
||||
field_name: 'Test',
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with patch.dict(
|
||||
field_type_registry.registry,
|
||||
{'text': SameTypeAlwaysReverseOnUpdateField()}
|
||||
):
|
||||
handler.update_field(user=user, field=existing_text_field,
|
||||
new_type_name='text', name='new_name')
|
||||
|
||||
row.refresh_from_db()
|
||||
# The alter sql has been run due to the force override
|
||||
assert getattr(row, field_name) == 'tseT'
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_field_with_type_error_on_conversion_should_null_field(data_fixture):
|
||||
class AlwaysThrowsSqlExceptionOnConversionField(TextFieldType):
|
||||
type = 'throws_field'
|
||||
model_class = LongTextField
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
return '''p_in = (lower(p_in::numeric::text));'''
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
existing_text_field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
field_name = f'field_{existing_text_field.id}'
|
||||
row = model.objects.create(**{
|
||||
field_name: 'Test',
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with patch.dict(
|
||||
field_type_registry.registry,
|
||||
{'throws_field': AlwaysThrowsSqlExceptionOnConversionField()}
|
||||
):
|
||||
handler.update_field(user=user,
|
||||
field=existing_text_field,
|
||||
new_type_name='throws_field')
|
||||
|
||||
row.refresh_from_db()
|
||||
assert getattr(row, field_name) is None
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 0
|
||||
assert LongTextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_field_when_underlying_sql_type_doesnt_change_with_vars(data_fixture):
|
||||
class ReversesWhenConvertsAwayTextField(LongTextFieldType):
|
||||
type = 'reserves_text'
|
||||
model_class = LongTextField
|
||||
|
||||
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
|
||||
return '''p_in = concat(reverse(p_in), %(some_variable)s);''', {
|
||||
"some_variable": "_POST_FIX"
|
||||
}
|
||||
|
||||
class AlwaysLowercaseTextField(TextFieldType):
|
||||
type = 'lowercase_text'
|
||||
model_class = LongTextField
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
return '''p_in = concat(%(other_variable)s, lower(p_in));''', {
|
||||
"other_variable": "pre_fix_"
|
||||
}
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
existing_field_with_old_value_prep = data_fixture.create_long_text_field(
|
||||
table=table)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
field_name = f'field_{existing_field_with_old_value_prep.id}'
|
||||
row = model.objects.create(**{
|
||||
field_name: 'Test',
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with patch.dict(
|
||||
field_type_registry.registry,
|
||||
{
|
||||
'lowercase_text': AlwaysLowercaseTextField(),
|
||||
'long_text': ReversesWhenConvertsAwayTextField()
|
||||
}
|
||||
):
|
||||
handler.update_field(user=user,
|
||||
field=existing_field_with_old_value_prep,
|
||||
new_type_name='lowercase_text')
|
||||
|
||||
row.refresh_from_db()
|
||||
assert getattr(row, field_name) == 'pre_fix_tset_post_fix'
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 0
|
||||
assert LongTextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_field_when_underlying_sql_type_doesnt_change_old_prep(data_fixture):
|
||||
class ReversesWhenConvertsAwayTextField(LongTextFieldType):
|
||||
type = 'reserves_text'
|
||||
model_class = LongTextField
|
||||
|
||||
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
|
||||
return '''p_in = (reverse(p_in));'''
|
||||
|
||||
class AlwaysLowercaseTextField(TextFieldType):
|
||||
type = 'lowercase_text'
|
||||
model_class = LongTextField
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
return '''p_in = (lower(p_in));'''
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
existing_field_with_old_value_prep = data_fixture.create_long_text_field(
|
||||
table=table)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
field_name = f'field_{existing_field_with_old_value_prep.id}'
|
||||
row = model.objects.create(**{
|
||||
field_name: 'Test',
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
|
||||
with patch.dict(
|
||||
field_type_registry.registry,
|
||||
{
|
||||
'lowercase_text': AlwaysLowercaseTextField(),
|
||||
'long_text': ReversesWhenConvertsAwayTextField()
|
||||
}
|
||||
):
|
||||
handler.update_field(user=user,
|
||||
field=existing_field_with_old_value_prep,
|
||||
new_type_name='lowercase_text')
|
||||
|
||||
row.refresh_from_db()
|
||||
assert getattr(row, field_name) == 'tset'
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 0
|
||||
assert LongTextField.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch('baserow.contrib.database.fields.signals.field_deleted.send')
|
||||
def test_delete_field(send_mock, data_fixture):
|
||||
|
|
|
@ -1,160 +1,44 @@
|
|||
import pytest
|
||||
import json
|
||||
|
||||
from django.test.utils import override_settings
|
||||
from faker import Faker
|
||||
from decimal import Decimal
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from baserow.contrib.database.fields.field_types import PhoneNumberFieldType
|
||||
from baserow.core.user_files.exceptions import (
|
||||
InvalidUserFileNameError, UserFileDoesNotExist
|
||||
)
|
||||
from baserow.contrib.database.fields.models import (
|
||||
LongTextField, URLField, EmailField, FileField
|
||||
LongTextField, URLField, EmailField, FileField, PhoneNumberField
|
||||
)
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"expected,field_kwargs",
|
||||
[
|
||||
(
|
||||
[
|
||||
9223372036854775807, 100, 100, 101, 0, 0, 0, 0, None, None, None, None,
|
||||
None
|
||||
],
|
||||
{'number_type': 'INTEGER', 'number_negative': False}
|
||||
),
|
||||
(
|
||||
[9223372036854775807, 100, 100, 101, -9223372036854775808, -100, -100, -101,
|
||||
None, None, None, None, None],
|
||||
{'number_type': 'INTEGER', 'number_negative': True}
|
||||
),
|
||||
(
|
||||
[
|
||||
Decimal('9223372036854775807.0'), Decimal('100.0'), Decimal('100.2'),
|
||||
Decimal('100.6'), Decimal('0.0'), Decimal('0.0'), Decimal('0.0'),
|
||||
Decimal('0.0'), None, None, None, None, None
|
||||
],
|
||||
{
|
||||
'number_type': 'DECIMAL', 'number_negative': False,
|
||||
'number_decimal_places': 1
|
||||
}
|
||||
),
|
||||
(
|
||||
[
|
||||
Decimal('9223372036854775807.000'), Decimal('100.000'),
|
||||
Decimal('100.220'), Decimal('100.600'),
|
||||
Decimal('-9223372036854775808.0'), Decimal('-100.0'),
|
||||
Decimal('-100.220'), Decimal('-100.600'), None, None, None, None, None
|
||||
],
|
||||
{
|
||||
'number_type': 'DECIMAL', 'number_negative': True,
|
||||
'number_decimal_places': 3
|
||||
}
|
||||
)
|
||||
]
|
||||
)
|
||||
def test_alter_number_field_column_type(expected, field_kwargs, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
field = data_fixture.create_text_field(table=table, order=1)
|
||||
def test_import_export_text_field(data_fixture):
|
||||
id_mapping = {}
|
||||
|
||||
handler = FieldHandler()
|
||||
field = handler.update_field(user=user, field=field, name='Text field')
|
||||
|
||||
model = table.get_model()
|
||||
model.objects.create(**{f'field_{field.id}': '9223372036854775807'})
|
||||
model.objects.create(**{f'field_{field.id}': '100'})
|
||||
model.objects.create(**{f'field_{field.id}': '100.22'})
|
||||
model.objects.create(**{f'field_{field.id}': '100.59999'})
|
||||
model.objects.create(**{f'field_{field.id}': '-9223372036854775808'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100.22'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100.5999'})
|
||||
model.objects.create(**{f'field_{field.id}': '100.59.99'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100.59.99'})
|
||||
model.objects.create(**{f'field_{field.id}': '100TEST100.10'})
|
||||
model.objects.create(**{f'field_{field.id}': '!@#$%%^^&&^^%$$'})
|
||||
model.objects.create(**{f'field_{field.id}': '!@#$%%^^5.2&&^^%$$'})
|
||||
|
||||
# Change the field type to a number and test if the values have been changed.
|
||||
field = handler.update_field(user=user, field=field, new_type_name='number',
|
||||
**field_kwargs)
|
||||
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
for index, row in enumerate(rows):
|
||||
assert getattr(row, f'field_{field.id}') == expected[index]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_alter_number_field_column_type_negative(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
number_field = data_fixture.create_number_field(table=table, order=1,
|
||||
number_negative=True)
|
||||
decimal_field = data_fixture.create_number_field(table=table, order=2,
|
||||
number_type='DECIMAL',
|
||||
number_negative=True,
|
||||
number_decimal_places=2)
|
||||
|
||||
model = table.get_model()
|
||||
model.objects.create(**{
|
||||
f'field_{number_field.id}': -10,
|
||||
f'field_{decimal_field.id}': Decimal('-10.10')
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
number_field = handler.update_field(user=user, field=number_field,
|
||||
number_negative=False)
|
||||
decimal_field = handler.update_field(user=user, field=decimal_field,
|
||||
number_negative=False)
|
||||
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
assert getattr(rows[0], f'field_{number_field.id}') == 0
|
||||
assert getattr(rows[0], f'field_{decimal_field.id}') == 0.00
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_alter_boolean_field_column_type(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
handler = FieldHandler()
|
||||
field = handler.update_field(user=user, field=field, name='Text field')
|
||||
|
||||
model = table.get_model()
|
||||
mapping = {
|
||||
'1': True,
|
||||
't': True,
|
||||
'y': True,
|
||||
'yes': True,
|
||||
'on': True,
|
||||
'YES': True,
|
||||
|
||||
'': False,
|
||||
'f': False,
|
||||
'n': False,
|
||||
'false': False,
|
||||
'off': False,
|
||||
'Random text': False,
|
||||
}
|
||||
|
||||
for value in mapping.keys():
|
||||
model.objects.create(**{f'field_{field.id}': value})
|
||||
|
||||
# Change the field type to a number and test if the values have been changed.
|
||||
field = handler.update_field(user=user, field=field, new_type_name='boolean')
|
||||
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
|
||||
for index, value in enumerate(mapping.values()):
|
||||
assert getattr(rows[index], f'field_{field.id}') == value
|
||||
text_field = data_fixture.create_text_field(
|
||||
name='Text name',
|
||||
text_default='Text default'
|
||||
)
|
||||
text_field_type = field_type_registry.get_by_model(text_field)
|
||||
text_serialized = text_field_type.export_serialized(text_field)
|
||||
text_field_imported = text_field_type.import_serialized(
|
||||
text_field.table,
|
||||
text_serialized,
|
||||
id_mapping
|
||||
)
|
||||
assert text_field.id != text_field_imported.id
|
||||
assert text_field.name == text_field_imported.name
|
||||
assert text_field.order == text_field_imported.order
|
||||
assert text_field.primary == text_field_imported.primary
|
||||
assert text_field.text_default == text_field_imported.text_default
|
||||
assert id_mapping['database_fields'][text_field.id] == text_field_imported.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -511,3 +395,102 @@ def test_file_field_type(data_fixture):
|
|||
assert results[0].text is None
|
||||
assert results[1].text is None
|
||||
assert results[2].text is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(debug=True)
|
||||
def test_phone_number_field_type(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
data_fixture.create_database_table(user=user, database=table.database)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
text_field = field_handler.create_field(user=user, table=table,
|
||||
order=1,
|
||||
type_name='text',
|
||||
name='name')
|
||||
phone_number_field = field_handler.create_field(user=user, table=table,
|
||||
type_name='phone_number',
|
||||
name='phonenumber')
|
||||
email_field = field_handler.create_field(user=user, table=table,
|
||||
type_name='email',
|
||||
name='email')
|
||||
number_field = data_fixture.create_number_field(table=table, order=1,
|
||||
number_negative=True, name="number")
|
||||
|
||||
assert len(PhoneNumberField.objects.all()) == 1
|
||||
model = table.get_model(attribute_names=True)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
row_handler.create_row(user=user, table=table, values={
|
||||
'phonenumber': 'invalid phone number'
|
||||
}, model=model)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
row_handler.create_row(user=user, table=table, values={
|
||||
'phonenumber': 'Phone: 2312321 2349432 '
|
||||
}, model=model)
|
||||
with pytest.raises(ValidationError):
|
||||
row_handler.create_row(user=user, table=table, values={
|
||||
'phonenumber': '1' * (PhoneNumberFieldType.MAX_PHONE_NUMBER_LENGTH+1)
|
||||
}, model=model)
|
||||
|
||||
max_length_phone_number = '1' * PhoneNumberFieldType.MAX_PHONE_NUMBER_LENGTH
|
||||
row_handler.create_row(user=user, table=table, values={
|
||||
'name': '+45(1424) 322314 324234',
|
||||
'phonenumber': max_length_phone_number,
|
||||
'number': 1234534532,
|
||||
'email': 'a_valid_email_to_be_blanked_after_conversion@email.com'
|
||||
}, model=model)
|
||||
row_handler.create_row(user=user, table=table, values={
|
||||
'name': 'some text which should be blanked out after conversion',
|
||||
'phonenumber': '1234567890 NnXx,+._*()#=;/ -',
|
||||
'number': 0
|
||||
}, model=model)
|
||||
row_handler.create_row(user=user, table=table, values={
|
||||
'name': max_length_phone_number,
|
||||
'phonenumber': '',
|
||||
'number': -10230450,
|
||||
}, model=model)
|
||||
row_handler.create_row(user=user, table=table, values={
|
||||
'phonenumber': None,
|
||||
'name': '1' * (PhoneNumberFieldType.MAX_PHONE_NUMBER_LENGTH+1)
|
||||
|
||||
}, model=model)
|
||||
row_handler.create_row(user=user, table=table, values={}, model=model)
|
||||
|
||||
# No actual database type change occurs here as a phone number field is also a text
|
||||
# field. Instead the after_update hook is being used to clear out invalid
|
||||
# phone numbers.
|
||||
field_handler.update_field(user=user, field=text_field,
|
||||
new_type_name='phone_number')
|
||||
|
||||
field_handler.update_field(user=user, field=number_field,
|
||||
new_type_name='phone_number')
|
||||
field_handler.update_field(user=user, field=email_field,
|
||||
new_type_name='phone_number')
|
||||
|
||||
model = table.get_model(attribute_names=True)
|
||||
rows = model.objects.all()
|
||||
|
||||
assert rows[0].name == '+45(1424) 322314 324234'
|
||||
assert rows[0].phonenumber == max_length_phone_number
|
||||
assert rows[0].number == '1234534532'
|
||||
assert rows[0].email == ''
|
||||
|
||||
assert rows[1].name == ''
|
||||
assert rows[1].phonenumber == '1234567890 NnXx,+._*()#=;/ -'
|
||||
assert rows[1].number == '0'
|
||||
|
||||
assert rows[2].name == max_length_phone_number
|
||||
assert rows[2].phonenumber == ''
|
||||
assert rows[2].number == '-10230450'
|
||||
|
||||
assert rows[3].name == ''
|
||||
assert rows[3].phonenumber == ''
|
||||
assert rows[3].number == ''
|
||||
|
||||
field_handler.delete_field(user=user, field=phone_number_field)
|
||||
assert len(PhoneNumberField.objects.all()) == 3
|
||||
|
|
|
@ -5,6 +5,7 @@ from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_400_BAD
|
|||
from django.shortcuts import reverse
|
||||
from django.db import connections
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import LinkRowField
|
||||
|
@ -692,3 +693,69 @@ def test_link_row_field_type_api_row_views(api_client, data_fixture):
|
|||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert len(response_json[f'field_{link_row_field.id}']) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_import_export_link_row_field(data_fixture, user_tables_in_separate_db):
|
||||
user = data_fixture.create_user()
|
||||
imported_group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(user=user, name='Placeholder')
|
||||
table = data_fixture.create_database_table(name='Example', database=database)
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name='Customers', database=database
|
||||
)
|
||||
field_handler = FieldHandler()
|
||||
core_handler = CoreHandler()
|
||||
link_row_field = field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
type_name='link_row',
|
||||
link_row_table=customers_table
|
||||
)
|
||||
|
||||
row_handler = RowHandler()
|
||||
c_row = row_handler.create_row(user=user, table=customers_table, values={})
|
||||
c_row_2 = row_handler.create_row(user=user, table=customers_table, values={})
|
||||
row = row_handler.create_row(user=user, table=table, values={
|
||||
f'field_{link_row_field.id}': [c_row.id, c_row_2.id]
|
||||
})
|
||||
|
||||
exported_applications = core_handler.export_group_applications(database.group)
|
||||
imported_applications, id_mapping = core_handler.import_application_to_group(
|
||||
imported_group,
|
||||
exported_applications
|
||||
)
|
||||
imported_database = imported_applications[0]
|
||||
imported_tables = imported_database.table_set.all()
|
||||
imported_table = imported_tables[0]
|
||||
imported_customers_table = imported_tables[1]
|
||||
imported_link_row_field = imported_table.field_set.all().first().specific
|
||||
imported_link_row_relation_field = (
|
||||
imported_customers_table.field_set.all().first().specific
|
||||
)
|
||||
|
||||
assert imported_table.id != table.id
|
||||
assert imported_table.name == table.name
|
||||
assert imported_customers_table.id != customers_table.id
|
||||
assert imported_customers_table.name == customers_table.name
|
||||
assert imported_link_row_field.id != link_row_field.id
|
||||
assert imported_link_row_field.name == link_row_field.name
|
||||
assert imported_link_row_field.link_row_table_id == imported_customers_table.id
|
||||
assert imported_link_row_relation_field.link_row_table_id == imported_table.id
|
||||
assert imported_link_row_field.link_row_relation_id == (
|
||||
imported_link_row_relation_field.link_row_relation_id
|
||||
)
|
||||
|
||||
imported_c_row = row_handler.get_row(user=user, table=imported_customers_table,
|
||||
row_id=c_row.id)
|
||||
imported_c_row_2 = row_handler.get_row(user=user, table=imported_customers_table,
|
||||
row_id=c_row_2.id)
|
||||
imported_row = row_handler.get_row(user=user, table=imported_table, row_id=row.id)
|
||||
|
||||
assert imported_row.id == row.id
|
||||
assert imported_c_row.id == c_row.id
|
||||
assert imported_c_row_2.id == c_row_2.id
|
||||
assert [
|
||||
r.id
|
||||
for r in getattr(imported_row, f'field_{imported_link_row_field.id}').all()
|
||||
] == [imported_c_row.id, imported_c_row_2.id]
|
||||
|
|
|
@ -0,0 +1,131 @@
|
|||
import pytest
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"expected,field_kwargs",
|
||||
[
|
||||
(
|
||||
[
|
||||
9223372036854775807, 100, 100, 101, 0, 0, 0, 0, None, None, None, None,
|
||||
None
|
||||
],
|
||||
{'number_type': 'INTEGER', 'number_negative': False}
|
||||
),
|
||||
(
|
||||
[9223372036854775807, 100, 100, 101, -9223372036854775808, -100, -100, -101,
|
||||
None, None, None, None, None],
|
||||
{'number_type': 'INTEGER', 'number_negative': True}
|
||||
),
|
||||
(
|
||||
[
|
||||
Decimal('9223372036854775807.0'), Decimal('100.0'), Decimal('100.2'),
|
||||
Decimal('100.6'), Decimal('0.0'), Decimal('0.0'), Decimal('0.0'),
|
||||
Decimal('0.0'), None, None, None, None, None
|
||||
],
|
||||
{
|
||||
'number_type': 'DECIMAL', 'number_negative': False,
|
||||
'number_decimal_places': 1
|
||||
}
|
||||
),
|
||||
(
|
||||
[
|
||||
Decimal('9223372036854775807.000'), Decimal('100.000'),
|
||||
Decimal('100.220'), Decimal('100.600'),
|
||||
Decimal('-9223372036854775808.0'), Decimal('-100.0'),
|
||||
Decimal('-100.220'), Decimal('-100.600'), None, None, None, None, None
|
||||
],
|
||||
{
|
||||
'number_type': 'DECIMAL', 'number_negative': True,
|
||||
'number_decimal_places': 3
|
||||
}
|
||||
)
|
||||
]
|
||||
)
|
||||
def test_alter_number_field_column_type(expected, field_kwargs, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
field = data_fixture.create_text_field(table=table, order=1)
|
||||
|
||||
handler = FieldHandler()
|
||||
field = handler.update_field(user=user, field=field, name='Text field')
|
||||
|
||||
model = table.get_model()
|
||||
model.objects.create(**{f'field_{field.id}': '9223372036854775807'})
|
||||
model.objects.create(**{f'field_{field.id}': '100'})
|
||||
model.objects.create(**{f'field_{field.id}': '100.22'})
|
||||
model.objects.create(**{f'field_{field.id}': '100.59999'})
|
||||
model.objects.create(**{f'field_{field.id}': '-9223372036854775808'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100.22'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100.5999'})
|
||||
model.objects.create(**{f'field_{field.id}': '100.59.99'})
|
||||
model.objects.create(**{f'field_{field.id}': '-100.59.99'})
|
||||
model.objects.create(**{f'field_{field.id}': '100TEST100.10'})
|
||||
model.objects.create(**{f'field_{field.id}': '!@#$%%^^&&^^%$$'})
|
||||
model.objects.create(**{f'field_{field.id}': '!@#$%%^^5.2&&^^%$$'})
|
||||
|
||||
# Change the field type to a number and test if the values have been changed.
|
||||
field = handler.update_field(user=user, field=field, new_type_name='number',
|
||||
**field_kwargs)
|
||||
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
for index, row in enumerate(rows):
|
||||
assert getattr(row, f'field_{field.id}') == expected[index]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_alter_number_field_column_type_negative(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
number_field = data_fixture.create_number_field(table=table, order=1,
|
||||
number_negative=True)
|
||||
decimal_field = data_fixture.create_number_field(table=table, order=2,
|
||||
number_type='DECIMAL',
|
||||
number_negative=True,
|
||||
number_decimal_places=2)
|
||||
|
||||
model = table.get_model()
|
||||
model.objects.create(**{
|
||||
f'field_{number_field.id}': -10,
|
||||
f'field_{decimal_field.id}': Decimal('-10.10')
|
||||
})
|
||||
|
||||
handler = FieldHandler()
|
||||
number_field = handler.update_field(user=user, field=number_field,
|
||||
number_negative=False)
|
||||
decimal_field = handler.update_field(user=user, field=decimal_field,
|
||||
number_negative=False)
|
||||
|
||||
model = table.get_model()
|
||||
rows = model.objects.all()
|
||||
assert getattr(rows[0], f'field_{number_field.id}') == 0
|
||||
assert getattr(rows[0], f'field_{decimal_field.id}') == 0.00
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_import_export_number_field(data_fixture):
|
||||
number_field = data_fixture.create_number_field(
|
||||
name='Number field',
|
||||
number_type='DECIMAL',
|
||||
number_negative=True,
|
||||
number_decimal_places=2
|
||||
)
|
||||
number_field_type = field_type_registry.get_by_model(number_field)
|
||||
number_serialized = number_field_type.export_serialized(number_field)
|
||||
number_field_imported = number_field_type.import_serialized(
|
||||
number_field.table,
|
||||
number_serialized,
|
||||
{}
|
||||
)
|
||||
assert number_field.number_type == number_field_imported.number_type
|
||||
assert number_field.number_negative == number_field_imported.number_negative
|
||||
assert number_field.number_decimal_places == (
|
||||
number_field_imported.number_decimal_places
|
||||
)
|
|
@ -7,9 +7,11 @@ from django.core.exceptions import ValidationError
|
|||
|
||||
from faker import Faker
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import SelectOption, SingleSelectField
|
||||
from baserow.contrib.database.fields.field_types import SingleSelectFieldType
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
|
@ -647,3 +649,76 @@ def test_single_select_field_type_random_value(data_fixture):
|
|||
)
|
||||
random_choice_2 = SingleSelectFieldType().random_value(email_field, fake, cache)
|
||||
assert random_choice_2 is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_import_export_single_select_field(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
field_handler = FieldHandler()
|
||||
field = field_handler.create_field(
|
||||
user=user, table=table, type_name='single_select', name='Single select',
|
||||
select_options=[{'value': 'Option 1', 'color': 'blue'}]
|
||||
)
|
||||
select_option = field.select_options.all().first()
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
field_serialized = field_type.export_serialized(field)
|
||||
id_mapping = {}
|
||||
field_imported = field_type.import_serialized(
|
||||
table,
|
||||
field_serialized,
|
||||
id_mapping
|
||||
)
|
||||
|
||||
assert field_imported.select_options.all().count() == 1
|
||||
imported_select_option = field_imported.select_options.all().first()
|
||||
assert imported_select_option.id != select_option.id
|
||||
assert imported_select_option.value == select_option.value
|
||||
assert imported_select_option.color == select_option.color
|
||||
assert imported_select_option.order == select_option.order
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_set_export_serialized_value_single_select_field(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
imported_group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
field = data_fixture.create_single_select_field(table=table)
|
||||
option_a = data_fixture.create_select_option(field=field, value='A', color='green')
|
||||
option_b = data_fixture.create_select_option(field=field, value='B', color='red')
|
||||
|
||||
core_handler = CoreHandler()
|
||||
|
||||
model = table.get_model()
|
||||
model.objects.create()
|
||||
model.objects.create(**{f'field_{field.id}_id': option_a.id})
|
||||
model.objects.create(**{f'field_{field.id}_id': option_b.id})
|
||||
|
||||
exported_applications = core_handler.export_group_applications(group)
|
||||
imported_applications, id_mapping = core_handler.import_application_to_group(
|
||||
imported_group,
|
||||
exported_applications
|
||||
)
|
||||
imported_database = imported_applications[0]
|
||||
imported_table = imported_database.table_set.all()[0]
|
||||
imported_field = imported_table.field_set.all().first().specific
|
||||
|
||||
assert imported_table.id != table.id
|
||||
assert imported_field.id != field.id
|
||||
|
||||
imported_model = imported_table.get_model()
|
||||
all = imported_model.objects.all()
|
||||
assert len(all) == 3
|
||||
imported_row_1 = all[0]
|
||||
imported_row_2 = all[1]
|
||||
imported_row_3 = all[2]
|
||||
|
||||
assert getattr(imported_row_1, f'field_{imported_field.id}') is None
|
||||
assert getattr(imported_row_2, f'field_{imported_field.id}_id') != option_a.id
|
||||
assert getattr(imported_row_2, f'field_{imported_field.id}').value == 'A'
|
||||
assert getattr(imported_row_2, f'field_{imported_field.id}').color == 'green'
|
||||
assert getattr(imported_row_3, f'field_{imported_field.id}_id') != option_b.id
|
||||
assert getattr(imported_row_3, f'field_{imported_field.id}').value == 'B'
|
||||
assert getattr(imported_row_3, f'field_{imported_field.id}').color == 'red'
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
from decimal import Decimal
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from django.db import models
|
||||
from django.utils.timezone import make_aware, utc
|
||||
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
|
@ -147,31 +150,57 @@ def test_enhance_by_fields_queryset(data_fixture):
|
|||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_search_all_fields_queryset(data_fixture):
|
||||
def test_search_all_fields_queryset(data_fixture, user_tables_in_separate_db):
|
||||
table = data_fixture.create_database_table(name='Cars')
|
||||
data_fixture.create_text_field(table=table, order=0, name='Name')
|
||||
data_fixture.create_text_field(table=table, order=1, name='Color')
|
||||
data_fixture.create_number_field(table=table, order=2, name='Price')
|
||||
data_fixture.create_long_text_field(table=table, order=3, name='Description')
|
||||
data_fixture.create_date_field(table=table, order=4, name='Date', date_format="EU")
|
||||
data_fixture.create_date_field(table=table, order=5, name='DateTime',
|
||||
date_format="US", date_include_time=True,
|
||||
date_time_format="24")
|
||||
data_fixture.create_file_field(table=table, order=6, name='File')
|
||||
select = data_fixture.create_single_select_field(table=table, order=7,
|
||||
name='select')
|
||||
option_a = data_fixture.create_select_option(field=select, value='Option A',
|
||||
color='blue')
|
||||
option_b = data_fixture.create_select_option(field=select, value='Option B',
|
||||
color='red')
|
||||
data_fixture.create_phone_number_field(table=table, order=8, name='PhoneNumber')
|
||||
|
||||
model = table.get_model(attribute_names=True)
|
||||
row_1 = model.objects.create(
|
||||
name='BMW',
|
||||
color='Blue',
|
||||
price=10000,
|
||||
description='This is the fastest car there is.'
|
||||
price='10000',
|
||||
description='This is the fastest car there is.',
|
||||
date='0005-05-05',
|
||||
datetime=make_aware(datetime(4006, 7, 8, 0, 0, 0), utc),
|
||||
file=[{'visible_name': 'test_file.png'}],
|
||||
select=option_a,
|
||||
phonenumber='99999'
|
||||
)
|
||||
row_2 = model.objects.create(
|
||||
name='Audi',
|
||||
color='Orange',
|
||||
price=20000,
|
||||
description='This is the most expensive car we have.'
|
||||
price='20500',
|
||||
description='This is the most expensive car we have.',
|
||||
date='2005-05-05',
|
||||
datetime=make_aware(datetime(5, 5, 5, 0, 48, 0), utc),
|
||||
file=[{'visible_name': 'other_file.png'}],
|
||||
select=option_b,
|
||||
phonenumber='++--999999'
|
||||
)
|
||||
row_3 = model.objects.create(
|
||||
name='Volkswagen',
|
||||
color='White',
|
||||
price=5000,
|
||||
description='The oldest car that we have.'
|
||||
price='5000',
|
||||
description='The oldest car that we have.',
|
||||
date='9999-05-05',
|
||||
datetime=make_aware(datetime(5, 5, 5, 9, 59, 0), utc),
|
||||
file=[],
|
||||
phonenumber=''
|
||||
)
|
||||
|
||||
results = model.objects.all().search_all_fields('FASTEST')
|
||||
|
@ -191,14 +220,62 @@ def test_search_all_fields_queryset(data_fixture):
|
|||
assert len(results) == 1
|
||||
assert row_2 in results
|
||||
|
||||
results = model.objects.all().search_all_fields(row_1.id)
|
||||
results = model.objects.all().search_all_fields(str(row_1.id))
|
||||
assert len(results) == 1
|
||||
assert row_1 in results
|
||||
|
||||
results = model.objects.all().search_all_fields(row_3.id)
|
||||
results = model.objects.all().search_all_fields(str(row_3.id))
|
||||
assert len(results) == 1
|
||||
assert row_3 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('500')
|
||||
assert len(results) == 2
|
||||
assert row_2 in results
|
||||
assert row_3 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('0' + str(row_1.id))
|
||||
assert len(results) == 0
|
||||
|
||||
results = model.objects.all().search_all_fields('05/05/9999')
|
||||
assert len(results) == 1
|
||||
assert row_3 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('07/08/4006')
|
||||
assert len(results) == 1
|
||||
assert row_1 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('00:')
|
||||
assert len(results) == 2
|
||||
assert row_1 in results
|
||||
assert row_2 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('.png')
|
||||
assert len(results) == 2
|
||||
assert row_1 in results
|
||||
assert row_2 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('test_file')
|
||||
assert len(results) == 1
|
||||
assert row_1 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('Option')
|
||||
assert len(results) == 2
|
||||
assert row_1 in results
|
||||
assert row_2 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('Option B')
|
||||
assert len(results) == 1
|
||||
assert row_2 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('999999')
|
||||
assert len(results) == 1
|
||||
assert row_2 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('99999')
|
||||
assert len(results) == 2
|
||||
assert row_1 in results
|
||||
assert row_2 in results
|
||||
|
||||
results = model.objects.all().search_all_fields('white car')
|
||||
assert len(results) == 0
|
||||
|
||||
|
@ -315,6 +392,9 @@ def test_filter_by_fields_object_queryset(data_fixture):
|
|||
name_field = data_fixture.create_text_field(table=table, order=0, name='Name')
|
||||
data_fixture.create_text_field(table=table, order=1, name='Color')
|
||||
price_field = data_fixture.create_number_field(table=table, order=2, name='Price')
|
||||
active_field = data_fixture.create_boolean_field(table=table,
|
||||
order=2,
|
||||
name='Active')
|
||||
description_field = data_fixture.create_long_text_field(
|
||||
table=table, order=3, name='Description'
|
||||
)
|
||||
|
@ -362,7 +442,7 @@ def test_filter_by_fields_object_queryset(data_fixture):
|
|||
|
||||
with pytest.raises(ViewFilterTypeNotAllowedForField):
|
||||
model.objects.all().filter_by_fields_object(filter_object={
|
||||
f'filter__field_{price_field.id}__contains': '10',
|
||||
f'filter__field_{active_field.id}__contains': '10',
|
||||
}, filter_type='AND')
|
||||
|
||||
# All the entries are not following the correct format and should be ignored.
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
import pytest
|
||||
|
||||
from baserow.core.registries import application_type_registry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_import_export_database(data_fixture, user_tables_in_separate_db):
|
||||
database = data_fixture.create_database_application()
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
text_field = data_fixture.create_text_field(table=table)
|
||||
view = data_fixture.create_grid_view(table=table)
|
||||
data_fixture.create_view_filter(view=view, field=text_field, value='Test')
|
||||
data_fixture.create_view_sort(view=view, field=text_field)
|
||||
model = table.get_model()
|
||||
row = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'Test'
|
||||
})
|
||||
|
||||
database_type = application_type_registry.get('database')
|
||||
serialized = database_type.export_serialized(database)
|
||||
imported_group = data_fixture.create_group()
|
||||
id_mapping = {}
|
||||
imported_database = database_type.import_serialized(
|
||||
imported_group,
|
||||
serialized,
|
||||
id_mapping
|
||||
)
|
||||
|
||||
assert imported_database.id != database.id
|
||||
assert imported_database.group_id == imported_group.id
|
||||
assert imported_database.name == database.name
|
||||
assert imported_database.order == database.order
|
||||
assert imported_database.table_set.all().count() == 1
|
||||
|
||||
imported_table = imported_database.table_set.all().first()
|
||||
assert imported_table.id != table.id
|
||||
assert imported_table.name == table.name
|
||||
assert imported_table.order == table.order
|
||||
assert imported_table.field_set.all().count() == 1
|
||||
assert imported_table.view_set.all().count() == 1
|
||||
|
||||
imported_view = imported_table.view_set.all().first()
|
||||
assert imported_view.viewfilter_set.all().count() == 1
|
||||
assert imported_view.viewsort_set.all().count() == 1
|
||||
|
||||
imported_model = imported_table.get_model()
|
||||
assert imported_model.objects.all().count() == 1
|
||||
imported_row = imported_model.objects.all().first()
|
||||
|
||||
# Because the rows have unique id within the table, we expect the row id to be the
|
||||
# same.
|
||||
assert imported_row.id == row.id
|
||||
assert imported_row.order == row.order
|
||||
assert getattr(
|
||||
imported_row,
|
||||
f'field_{id_mapping["database_fields"][text_field.id]}'
|
||||
) == (getattr(row, f'field_{text_field.id}'))
|
||||
|
||||
# It must still be possible to create a new row in the imported table
|
||||
row_2 = imported_model.objects.create()
|
||||
assert row_2.id == 2
|
|
@ -4,6 +4,7 @@ from pytz import timezone
|
|||
|
||||
from django.utils.timezone import make_aware, datetime
|
||||
|
||||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
|
||||
|
@ -257,6 +258,17 @@ def test_contains_filter_type(data_fixture):
|
|||
grid_view = data_fixture.create_grid_view(table=table)
|
||||
text_field = data_fixture.create_text_field(table=table)
|
||||
long_text_field = data_fixture.create_long_text_field(table=table)
|
||||
date_field = data_fixture.create_date_field(table=table, date_include_time=True,
|
||||
date_format="ISO")
|
||||
number_field = data_fixture.create_number_field(table=table,
|
||||
number_type='DECIMAL',
|
||||
number_negative=True,
|
||||
number_decimal_places=2)
|
||||
single_select_field = data_fixture.create_single_select_field(table=table)
|
||||
option_a = data_fixture.create_select_option(field=single_select_field, value='AC',
|
||||
color='blue')
|
||||
option_b = data_fixture.create_select_option(field=single_select_field, value='BC',
|
||||
color='red')
|
||||
|
||||
handler = ViewHandler()
|
||||
model = table.get_model()
|
||||
|
@ -264,15 +276,24 @@ def test_contains_filter_type(data_fixture):
|
|||
row = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'My name is John Doe.',
|
||||
f'field_{long_text_field.id}': 'Long text that is not empty.',
|
||||
f'field_{date_field.id}': '2020-02-01 01:23',
|
||||
f'field_{number_field.id}': '98989898',
|
||||
f'field_{single_select_field.id}': option_a
|
||||
})
|
||||
model.objects.create(**{
|
||||
f'field_{text_field.id}': '',
|
||||
f'field_{long_text_field.id}': '',
|
||||
f'field_{date_field.id}': None,
|
||||
f'field_{number_field.id}': None,
|
||||
f'field_{single_select_field.id}': None,
|
||||
})
|
||||
row_3 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'This is a test field.',
|
||||
f'field_{long_text_field.id}': 'This text is a bit longer, but it also '
|
||||
'contains.\n A multiline approach.',
|
||||
f'field_{date_field.id}': '0001-01-01 00:12',
|
||||
f'field_{number_field.id}': '10000',
|
||||
f'field_{single_select_field.id}': option_b
|
||||
})
|
||||
|
||||
filter = data_fixture.create_view_filter(
|
||||
|
@ -321,6 +342,72 @@ def test_contains_filter_type(data_fixture):
|
|||
assert len(ids) == 1
|
||||
assert row_3.id in ids
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = '2020-02-01'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 1
|
||||
assert row.id in ids
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = '01/02/2020'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 0
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = ''
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 3
|
||||
|
||||
filter.field = number_field
|
||||
filter.value = '98'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 1
|
||||
assert row.id in ids
|
||||
|
||||
filter.field = number_field
|
||||
filter.value = '0' + str(row.id)
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 0
|
||||
|
||||
filter.field = number_field
|
||||
filter.value = ''
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 3
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = '00:12'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 1
|
||||
assert row_3.id in ids
|
||||
|
||||
filter.field = single_select_field
|
||||
filter.value = 'A'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 1
|
||||
assert row.id in ids
|
||||
|
||||
filter.field = single_select_field
|
||||
filter.value = ''
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 3
|
||||
|
||||
filter.field = single_select_field
|
||||
filter.value = 'C'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 2
|
||||
assert row.id in ids
|
||||
assert row_3.id in ids
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_contains_not_filter_type(data_fixture):
|
||||
|
@ -329,6 +416,17 @@ def test_contains_not_filter_type(data_fixture):
|
|||
grid_view = data_fixture.create_grid_view(table=table)
|
||||
text_field = data_fixture.create_text_field(table=table)
|
||||
long_text_field = data_fixture.create_long_text_field(table=table)
|
||||
date_field = data_fixture.create_date_field(table=table, date_include_time=True,
|
||||
date_format="ISO")
|
||||
number_field = data_fixture.create_number_field(table=table,
|
||||
number_type='DECIMAL',
|
||||
number_negative=True,
|
||||
number_decimal_places=2)
|
||||
single_select_field = data_fixture.create_single_select_field(table=table)
|
||||
option_a = data_fixture.create_select_option(field=single_select_field, value='AC',
|
||||
color='blue')
|
||||
option_b = data_fixture.create_select_option(field=single_select_field, value='BC',
|
||||
color='red')
|
||||
|
||||
handler = ViewHandler()
|
||||
model = table.get_model()
|
||||
|
@ -336,15 +434,24 @@ def test_contains_not_filter_type(data_fixture):
|
|||
row = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'My name is John Doe.',
|
||||
f'field_{long_text_field.id}': 'Long text that is not empty.',
|
||||
f'field_{date_field.id}': '2020-02-01 01:23',
|
||||
f'field_{number_field.id}': '98989898',
|
||||
f'field_{single_select_field.id}': option_a
|
||||
})
|
||||
row_2 = model.objects.create(**{
|
||||
f'field_{text_field.id}': '',
|
||||
f'field_{long_text_field.id}': '',
|
||||
f'field_{date_field.id}': None,
|
||||
f'field_{number_field.id}': None,
|
||||
f'field_{single_select_field.id}': None,
|
||||
})
|
||||
row_3 = model.objects.create(**{
|
||||
f'field_{text_field.id}': 'This is a test field.',
|
||||
f'field_{long_text_field.id}': 'This text is a bit longer, but it also '
|
||||
'contains.\n A multiline approach.',
|
||||
f'field_{date_field.id}': '0001-01-01 00:12',
|
||||
f'field_{number_field.id}': '10000',
|
||||
f'field_{single_select_field.id}': option_b
|
||||
})
|
||||
|
||||
filter = data_fixture.create_view_filter(
|
||||
|
@ -396,6 +503,65 @@ def test_contains_not_filter_type(data_fixture):
|
|||
assert row.id in ids
|
||||
assert row_2.id in ids
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = '2020-02-01'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 2
|
||||
assert row.id not in ids
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = '01/02/2020'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 3
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = ''
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 3
|
||||
|
||||
filter.field = number_field
|
||||
filter.value = '98'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 2
|
||||
assert row.id not in ids
|
||||
|
||||
filter.field = number_field
|
||||
filter.value = ''
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 3
|
||||
|
||||
filter.field = date_field
|
||||
filter.value = '00:12'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 2
|
||||
assert row_3.id not in ids
|
||||
|
||||
filter.field = single_select_field
|
||||
filter.value = 'A'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 2
|
||||
assert row.id not in ids
|
||||
|
||||
filter.field = single_select_field
|
||||
filter.value = ''
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 3
|
||||
|
||||
filter.field = single_select_field
|
||||
filter.value = 'C'
|
||||
filter.save()
|
||||
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
|
||||
assert len(ids) == 1
|
||||
assert row_2.id in ids
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_select_equal_filter_type(data_fixture):
|
||||
|
@ -451,6 +617,16 @@ def test_single_select_equal_filter_type(data_fixture):
|
|||
assert len(ids) == 3
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_select_equal_filter_type_export_import():
|
||||
view_filter_type = view_filter_type_registry.get('single_select_equal')
|
||||
id_mapping = {'database_field_select_options': {1: 2}}
|
||||
assert view_filter_type.get_export_serialized_value('1') == '1'
|
||||
assert view_filter_type.set_import_serialized_value('1', id_mapping) == '2'
|
||||
assert view_filter_type.set_import_serialized_value('', id_mapping) == ''
|
||||
assert view_filter_type.set_import_serialized_value('wrong', id_mapping) == ''
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_select_not_equal_filter_type(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
|
|
|
@ -276,7 +276,7 @@ def test_field_type_changed(data_fixture):
|
|||
assert ViewSort.objects.all().count() == 1
|
||||
|
||||
field_handler.update_field(user=user, field=long_text_field,
|
||||
new_type_name='number')
|
||||
new_type_name='boolean')
|
||||
assert ViewFilter.objects.all().count() == 0
|
||||
assert ViewSort.objects.all().count() == 1
|
||||
|
||||
|
|
|
@ -14,8 +14,10 @@ def test_grid_view_get_field_options(data_fixture):
|
|||
assert len(field_options) == 2
|
||||
assert field_options[0].field_id == field_1.id
|
||||
assert field_options[0].width == 200
|
||||
assert field_options[0].order == 32767
|
||||
assert field_options[1].field_id == field_2.id
|
||||
assert field_options[1].width == 200
|
||||
assert field_options[1].order == 32767
|
||||
|
||||
field_3 = data_fixture.create_text_field(table=table)
|
||||
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
import pytest
|
||||
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_import_export_grid_view(data_fixture):
|
||||
grid_view = data_fixture.create_grid_view(
|
||||
name='Test',
|
||||
order=1,
|
||||
filter_type='AND',
|
||||
filters_disabled=False
|
||||
)
|
||||
field = data_fixture.create_text_field(table=grid_view.table)
|
||||
imported_field = data_fixture.create_text_field(table=grid_view.table)
|
||||
field_option = data_fixture.create_grid_view_field_option(
|
||||
grid_view=grid_view,
|
||||
field=field
|
||||
)
|
||||
view_filter = data_fixture.create_view_filter(
|
||||
view=grid_view,
|
||||
field=field,
|
||||
value='test',
|
||||
type='equal'
|
||||
)
|
||||
view_sort = data_fixture.create_view_sort(
|
||||
view=grid_view,
|
||||
field=field,
|
||||
order='ASC'
|
||||
)
|
||||
|
||||
id_mapping = {'database_fields': {field.id: imported_field.id}}
|
||||
|
||||
grid_view_type = view_type_registry.get('grid')
|
||||
serialized = grid_view_type.export_serialized(grid_view)
|
||||
imported_grid_view = grid_view_type.import_serialized(
|
||||
grid_view.table,
|
||||
serialized,
|
||||
id_mapping
|
||||
)
|
||||
|
||||
assert grid_view.id != imported_grid_view.id
|
||||
assert grid_view.name == imported_grid_view.name
|
||||
assert grid_view.order == imported_grid_view.order
|
||||
assert grid_view.filter_type == imported_grid_view.filter_type
|
||||
assert grid_view.filters_disabled == imported_grid_view.filters_disabled
|
||||
assert imported_grid_view.viewfilter_set.all().count() == 1
|
||||
assert imported_grid_view.viewsort_set.all().count() == 1
|
||||
|
||||
imported_view_filter = imported_grid_view.viewfilter_set.all().first()
|
||||
assert view_filter.id != imported_view_filter.id
|
||||
assert imported_field.id == imported_view_filter.field_id
|
||||
assert view_filter.value == imported_view_filter.value
|
||||
assert view_filter.type == imported_view_filter.type
|
||||
|
||||
imported_view_sort = imported_grid_view.viewsort_set.all().first()
|
||||
assert view_sort.id != imported_view_sort.id
|
||||
assert imported_field.id == imported_view_sort.field_id
|
||||
assert view_sort.order == imported_view_sort.order
|
||||
|
||||
imported_field_options = imported_grid_view.get_field_options()
|
||||
imported_field_option = imported_field_options[0]
|
||||
assert field_option.id != imported_field_option.id
|
||||
assert imported_field.id == imported_field_option.field_id
|
||||
assert field_option.width == imported_field_option.width
|
||||
assert field_option.hidden == imported_field_option.hidden
|
||||
assert field_option.order == imported_field_option.order
|
|
@ -1,20 +1,24 @@
|
|||
import pytest
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from itsdangerous.exc import BadSignature
|
||||
|
||||
from django.db import connection
|
||||
from django.conf import settings
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import (
|
||||
Settings, Group, GroupUser, GroupInvitation, Application,
|
||||
GROUP_USER_PERMISSION_ADMIN
|
||||
Settings, Group, GroupUser, GroupInvitation, Application, Template,
|
||||
TemplateCategory, GROUP_USER_PERMISSION_ADMIN
|
||||
)
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroupError, ApplicationTypeDoesNotExist, GroupDoesNotExist,
|
||||
GroupUserDoesNotExist, ApplicationDoesNotExist, UserInvalidGroupPermissionsError,
|
||||
BaseURLHostnameNotAllowed, GroupInvitationEmailMismatch,
|
||||
GroupInvitationDoesNotExist, GroupUserAlreadyExists, IsNotAdminError
|
||||
GroupInvitationDoesNotExist, GroupUserAlreadyExists, IsNotAdminError,
|
||||
TemplateFileDoesNotExist, TemplateDoesNotExist
|
||||
)
|
||||
from baserow.contrib.database.models import Database, Table
|
||||
|
||||
|
@ -716,3 +720,171 @@ def test_delete_database_application(send_mock, data_fixture):
|
|||
assert send_mock.call_args[1]['application_id'] == database.id
|
||||
assert send_mock.call_args[1]['application'].id == database.id
|
||||
assert send_mock.call_args[1]['user'].id == user.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_template(data_fixture):
|
||||
data_fixture.create_user()
|
||||
template_1 = data_fixture.create_template()
|
||||
|
||||
handler = CoreHandler()
|
||||
|
||||
with pytest.raises(TemplateDoesNotExist):
|
||||
handler.get_template(template_id=0)
|
||||
|
||||
template_1_copy = handler.get_template(template_id=template_1.id)
|
||||
assert template_1_copy.id == template_1.id
|
||||
|
||||
# If the error is raised we know for sure that the query has resolved.
|
||||
with pytest.raises(AttributeError):
|
||||
handler.get_template(
|
||||
template_id=template_1.id,
|
||||
base_queryset=Template.objects.prefetch_related('UNKNOWN')
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_export_import_group_application(data_fixture):
|
||||
group = data_fixture.create_group()
|
||||
imported_group = data_fixture.create_group()
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
data_fixture.create_database_table(database=database)
|
||||
|
||||
handler = CoreHandler()
|
||||
exported_applications = handler.export_group_applications(group)
|
||||
imported_applications, id_mapping = handler.import_application_to_group(
|
||||
imported_group,
|
||||
exported_applications
|
||||
)
|
||||
|
||||
assert len(imported_applications) == 1
|
||||
imported_database = imported_applications[0]
|
||||
assert imported_database.id != database.id
|
||||
assert imported_database.name == database.name
|
||||
assert imported_database.order == database.order
|
||||
assert imported_database.table_set.all().count() == 1
|
||||
assert database.id in id_mapping['applications']
|
||||
assert id_mapping['applications'][database.id] == imported_database.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sync_all_templates():
|
||||
handler = CoreHandler()
|
||||
handler.sync_templates()
|
||||
|
||||
assert (
|
||||
Template.objects.count() ==
|
||||
len(list(Path(settings.APPLICATION_TEMPLATES_DIR).glob('*.json')))
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sync_templates(data_fixture):
|
||||
old_templates = settings.APPLICATION_TEMPLATES_DIR
|
||||
settings.APPLICATION_TEMPLATES_DIR = os.path.join(
|
||||
settings.BASE_DIR,
|
||||
'../../../tests/templates'
|
||||
)
|
||||
|
||||
group_1 = data_fixture.create_group()
|
||||
group_2 = data_fixture.create_group()
|
||||
group_3 = data_fixture.create_group()
|
||||
|
||||
category_1 = data_fixture.create_template_category(name='No templates')
|
||||
category_2 = data_fixture.create_template_category(name='Has template')
|
||||
template = data_fixture.create_template(
|
||||
slug='is-going-to-be-deleted',
|
||||
group=group_1,
|
||||
category=category_2
|
||||
)
|
||||
template_2 = data_fixture.create_template(
|
||||
slug='example-template',
|
||||
group=group_2,
|
||||
category=category_2,
|
||||
export_hash='IS_NOT_GOING_MATCH'
|
||||
)
|
||||
template_3 = data_fixture.create_template(
|
||||
slug='example-template-2',
|
||||
group=group_3,
|
||||
category=category_2,
|
||||
export_hash='f086c9b4b0dfea6956d0bb32af210277bb645ff3faebc5fb37a9eae85c433f2d',
|
||||
)
|
||||
|
||||
handler = CoreHandler()
|
||||
handler.sync_templates()
|
||||
|
||||
groups = Group.objects.all().order_by('id')
|
||||
assert len(groups) == 3
|
||||
assert groups[0].id == group_3.id
|
||||
assert groups[1].id not in [group_1.id, group_2.id]
|
||||
assert groups[2].id not in [group_1.id, group_2.id]
|
||||
|
||||
assert not TemplateCategory.objects.filter(id=category_1.id).exists()
|
||||
assert not TemplateCategory.objects.filter(id=category_2.id).exists()
|
||||
categories = TemplateCategory.objects.all()
|
||||
assert len(categories) == 1
|
||||
assert categories[0].name == 'Test category 1'
|
||||
|
||||
assert not Template.objects.filter(id=template.id).exists()
|
||||
assert Template.objects.filter(id=template_2.id).exists()
|
||||
assert Template.objects.filter(id=template_3.id).exists()
|
||||
|
||||
refreshed_template_2 = Template.objects.get(id=template_2.id)
|
||||
assert refreshed_template_2.name == 'Example template'
|
||||
assert refreshed_template_2.icon == 'file'
|
||||
assert (
|
||||
refreshed_template_2.export_hash ==
|
||||
'f086c9b4b0dfea6956d0bb32af210277bb645ff3faebc5fb37a9eae85c433f2d'
|
||||
)
|
||||
assert refreshed_template_2.keywords == 'Example,Template,For,Search'
|
||||
assert refreshed_template_2.categories.all().first().id == categories[0].id
|
||||
assert template_2.group_id != refreshed_template_2.group_id
|
||||
assert refreshed_template_2.group.name == 'Example template'
|
||||
assert refreshed_template_2.group.application_set.count() == 1
|
||||
|
||||
refreshed_template_3 = Template.objects.get(id=template_3.id)
|
||||
assert template_3.group_id == refreshed_template_3.group_id
|
||||
# We expect the group count to be zero because the export hash matches and
|
||||
# nothing was updated.
|
||||
assert refreshed_template_3.group.application_set.count() == 0
|
||||
|
||||
settings.APPLICATION_TEMPLATES_DIR = old_templates
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch('baserow.core.signals.application_created.send')
|
||||
def test_install_template(send_mock, data_fixture):
|
||||
old_templates = settings.APPLICATION_TEMPLATES_DIR
|
||||
settings.APPLICATION_TEMPLATES_DIR = os.path.join(
|
||||
settings.BASE_DIR,
|
||||
'../../../tests/templates'
|
||||
)
|
||||
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
group_2 = data_fixture.create_group()
|
||||
|
||||
handler = CoreHandler()
|
||||
handler.sync_templates()
|
||||
|
||||
template_2 = data_fixture.create_template(slug='does-not-exist')
|
||||
|
||||
with pytest.raises(TemplateFileDoesNotExist):
|
||||
handler.install_template(user, group, template_2)
|
||||
|
||||
template = Template.objects.get(slug='example-template')
|
||||
|
||||
with pytest.raises(UserNotInGroupError):
|
||||
handler.install_template(user, group_2, template)
|
||||
|
||||
applications, id_mapping = handler.install_template(user, group, template)
|
||||
assert len(applications) == 1
|
||||
assert applications[0].group_id == group.id
|
||||
assert applications[0].name == 'Event marketing'
|
||||
|
||||
send_mock.assert_called_once()
|
||||
assert send_mock.call_args[1]['application'].id == applications[0].id
|
||||
assert send_mock.call_args[1]['user'].id == user.id
|
||||
assert send_mock.call_args[1]['type_name'] == 'database'
|
||||
|
||||
settings.APPLICATION_TEMPLATES_DIR = old_templates
|
||||
|
|
|
@ -3,6 +3,8 @@ from pytz import timezone
|
|||
from freezegun import freeze_time
|
||||
from datetime import datetime
|
||||
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
|
||||
from baserow.core.models import GroupUser, Group
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroupError, UserInvalidGroupPermissionsError
|
||||
|
@ -45,6 +47,8 @@ def test_group_has_user(data_fixture):
|
|||
user = data_fixture.create_user()
|
||||
user_group = data_fixture.create_user_group(permissions='ADMIN')
|
||||
user_group_2 = data_fixture.create_user_group(permissions='MEMBER')
|
||||
user_group_3 = data_fixture.create_user_group()
|
||||
data_fixture.create_template(group=user_group_3.group)
|
||||
|
||||
assert user_group.group.has_user(user_group.user)
|
||||
assert not user_group.group.has_user(user)
|
||||
|
@ -75,6 +79,16 @@ def test_group_has_user(data_fixture):
|
|||
user_group.group.has_user(user_group.user, 'ADMIN', raise_error=True)
|
||||
user_group_2.group.has_user(user_group_2.user, 'MEMBER', raise_error=True)
|
||||
|
||||
assert user_group_3.group.has_user(None) is False
|
||||
assert user_group_2.group.has_user(None, allow_if_template=True) is False
|
||||
assert user_group_3.group.has_user(None, allow_if_template=True) is True
|
||||
|
||||
with pytest.raises(NotAuthenticated):
|
||||
user_group_3.group.has_user(None, raise_error=True)
|
||||
|
||||
with pytest.raises(NotAuthenticated):
|
||||
user_group_2.group.has_user(None, raise_error=True, allow_if_template=True)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_application_content_type_init(data_fixture):
|
||||
|
|
|
@ -23,7 +23,6 @@ from baserow.core.user.exceptions import (
|
|||
)
|
||||
from baserow.core.user.handler import UserHandler
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
|
@ -102,6 +101,22 @@ def test_create_user(data_fixture):
|
|||
user_handler.create_user('Test1', 'test@test.nl', 'password')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_first_ever_created_user_is_staff(data_fixture):
|
||||
user_handler = UserHandler()
|
||||
|
||||
data_fixture.update_settings(allow_new_signups=True)
|
||||
|
||||
first_user = user_handler.create_user('First Ever User', 'test@test.nl',
|
||||
'password')
|
||||
assert first_user.first_name == 'First Ever User'
|
||||
assert first_user.is_staff
|
||||
|
||||
second_user = user_handler.create_user('Second User', 'test2@test.nl', 'password')
|
||||
assert second_user.first_name == 'Second User'
|
||||
assert not second_user.is_staff
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_with_invitation(data_fixture):
|
||||
plugin_mock = MagicMock()
|
||||
|
|
|
@ -13,7 +13,7 @@ from django.core.files.storage import FileSystemStorage
|
|||
from baserow.core.models import UserFile
|
||||
from baserow.core.user_files.exceptions import (
|
||||
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached,
|
||||
MaximumUniqueTriesError
|
||||
MaximumUniqueTriesError, InvalidFileURLError
|
||||
)
|
||||
from baserow.core.user_files.handler import UserFileHandler
|
||||
|
||||
|
@ -263,7 +263,7 @@ def test_upload_user_file_by_url(data_fixture, tmpdir):
|
|||
|
||||
responses.add(
|
||||
responses.GET,
|
||||
'http://localhost/test.txt',
|
||||
'https://baserow.io/test.txt',
|
||||
body=b'Hello World',
|
||||
status=200,
|
||||
content_type="text/plain",
|
||||
|
@ -272,31 +272,30 @@ def test_upload_user_file_by_url(data_fixture, tmpdir):
|
|||
|
||||
responses.add(
|
||||
responses.GET,
|
||||
'http://localhost/not-found.pdf',
|
||||
body=b'Hello World',
|
||||
'https://baserow.io/not-found.pdf',
|
||||
status=404,
|
||||
content_type="application/pdf",
|
||||
stream=True,
|
||||
)
|
||||
|
||||
# Could not be reached because it it responds with a 404
|
||||
with pytest.raises(FileURLCouldNotBeReached):
|
||||
handler.upload_user_file_by_url(
|
||||
user,
|
||||
'http://localhost/test2.txt',
|
||||
'https://baserow.io/not-found.pdf',
|
||||
storage=storage
|
||||
)
|
||||
|
||||
# Only the http and https protocol are supported.
|
||||
with pytest.raises(InvalidFileURLError):
|
||||
handler.upload_user_file_by_url(
|
||||
user,
|
||||
'ftp://baserow.io/not-found.pdf',
|
||||
storage=storage
|
||||
)
|
||||
|
||||
with freeze_time('2020-01-01 12:00'):
|
||||
user_file = handler.upload_user_file_by_url(
|
||||
user,
|
||||
'http://localhost/test.txt',
|
||||
storage=storage
|
||||
)
|
||||
|
||||
with pytest.raises(FileURLCouldNotBeReached):
|
||||
handler.upload_user_file_by_url(
|
||||
user,
|
||||
'http://localhost/not-found.pdf',
|
||||
'https://baserow.io/test.txt',
|
||||
storage=storage
|
||||
)
|
||||
|
||||
|
@ -318,3 +317,26 @@ def test_upload_user_file_by_url(data_fixture, tmpdir):
|
|||
file_path = tmpdir.join('user_files', user_file.name)
|
||||
assert file_path.isfile()
|
||||
assert file_path.open().read() == 'Hello World'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_upload_user_file_by_url_within_private_network(data_fixture, tmpdir):
|
||||
user = data_fixture.create_user()
|
||||
|
||||
storage = FileSystemStorage(location=str(tmpdir), base_url='http://localhost')
|
||||
handler = UserFileHandler()
|
||||
|
||||
# Could not be reached because it is an internal private URL.
|
||||
with pytest.raises(FileURLCouldNotBeReached):
|
||||
handler.upload_user_file_by_url(
|
||||
user,
|
||||
'http://localhost/test.txt',
|
||||
storage=storage
|
||||
)
|
||||
|
||||
with pytest.raises(FileURLCouldNotBeReached):
|
||||
handler.upload_user_file_by_url(
|
||||
user,
|
||||
'http://192.168.1.1/test.txt',
|
||||
storage=storage
|
||||
)
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
from __future__ import print_function
|
||||
import psycopg2
|
||||
import pytest
|
||||
from django.db import connections
|
||||
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
||||
import sys
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -11,3 +16,80 @@ def data_fixture():
|
|||
def api_client():
|
||||
from rest_framework.test import APIClient
|
||||
return APIClient()
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--runslow", action="store_true", default=False, help="run slow tests"
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line("markers", "slow: mark test as slow to run")
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config, items):
|
||||
if config.getoption("--runslow"):
|
||||
# --runslow given in cli: do not skip slow tests
|
||||
return
|
||||
skip_slow = pytest.mark.skip(reason="need --runslow option to run")
|
||||
for item in items:
|
||||
if "slow" in item.keywords:
|
||||
item.add_marker(skip_slow)
|
||||
|
||||
|
||||
def run_non_transactional_raw_sql(sqls, dbinfo):
|
||||
conn = psycopg2.connect(host=dbinfo['HOST'], user=dbinfo['USER'],
|
||||
password=dbinfo['PASSWORD'],
|
||||
port=int(dbinfo['PORT']))
|
||||
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
cursor = conn.cursor()
|
||||
for sql in sqls:
|
||||
cursor.execute(sql)
|
||||
|
||||
conn.close()
|
||||
|
||||
|
||||
# Nicest way of printing to stderr sourced from
|
||||
# https://stackoverflow.com/questions/5574702/how-to-print-to-stderr-in-python
|
||||
def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def user_tables_in_separate_db(settings):
|
||||
"""
|
||||
Creates a temporary database and sets up baserow so it is used to store user tables.
|
||||
|
||||
Currently this has only been implemented at a function level scope as adding
|
||||
databases to settings.DATABASES causes pytest to assume they are extra replica dbs
|
||||
and spend ages setting them up as mirrors. Instead keeping this at the functional
|
||||
scope lets us keep it simple and quick.
|
||||
"""
|
||||
|
||||
default_db = settings.DATABASES['default']
|
||||
user_table_db_name = f'{default_db["NAME"]}_user_tables'
|
||||
|
||||
# Print to stderr to match pytest-django's behaviour for logging about test
|
||||
# database setup and teardown.
|
||||
eprint(f"Dropping and recreating {user_table_db_name} for test.")
|
||||
|
||||
settings.USER_TABLE_DATABASE = 'user_tables_database'
|
||||
settings.DATABASES['user_tables_database'] = dict(default_db)
|
||||
settings.DATABASES['user_tables_database']['NAME'] = user_table_db_name
|
||||
|
||||
# You cannot drop databases inside transactions and django provides no easy way
|
||||
# of turning them off temporarily. Instead we need to open our own connection so
|
||||
# we can turn off transactions to perform the required setup/teardown sql. See:
|
||||
# https://pytest-django.readthedocs.io/en/latest/database.html#using-a-template
|
||||
# -database-for-tests
|
||||
run_non_transactional_raw_sql([f'DROP DATABASE IF EXISTS {user_table_db_name}; ',
|
||||
f'CREATE DATABASE {user_table_db_name}'],
|
||||
default_db)
|
||||
|
||||
yield connections['user_tables_database']
|
||||
|
||||
# Close django's connection to the user table db so we can drop it.
|
||||
connections['user_tables_database'].close()
|
||||
|
||||
run_non_transactional_raw_sql([f'DROP DATABASE {user_table_db_name}'], default_db)
|
||||
|
|
3
backend/tests/fixtures/__init__.py
vendored
3
backend/tests/fixtures/__init__.py
vendored
|
@ -9,9 +9,10 @@ from .table import TableFixtures
|
|||
from .view import ViewFixtures
|
||||
from .field import FieldFixtures
|
||||
from .token import TokenFixtures
|
||||
from .template import TemplateFixtures
|
||||
|
||||
|
||||
class Fixtures(SettingsFixtures, UserFixtures, UserFileFixtures, GroupFixtures,
|
||||
ApplicationFixtures, TableFixtures, ViewFixtures, FieldFixtures,
|
||||
TokenFixtures):
|
||||
TokenFixtures, TemplateFixtures):
|
||||
fake = Faker()
|
||||
|
|
58
backend/tests/fixtures/field.py
vendored
58
backend/tests/fixtures/field.py
vendored
|
@ -1,14 +1,15 @@
|
|||
from django.db import connection
|
||||
from django.conf import settings
|
||||
from django.db import connections
|
||||
|
||||
from baserow.contrib.database.fields.models import (
|
||||
TextField, LongTextField, NumberField, BooleanField, DateField, LinkRowField,
|
||||
FileField, SingleSelectField, SelectOption
|
||||
FileField, SingleSelectField, SelectOption, URLField, EmailField, PhoneNumberField
|
||||
)
|
||||
|
||||
|
||||
class FieldFixtures:
|
||||
def create_model_field(self, table, field):
|
||||
with connection.schema_editor() as schema_editor:
|
||||
with connections[settings.USER_TABLE_DATABASE].schema_editor() as schema_editor:
|
||||
to_model = table.get_model(field_ids=[field.id])
|
||||
model_field = to_model._meta.get_field(field.db_column)
|
||||
schema_editor.add_field(to_model, model_field)
|
||||
|
@ -166,3 +167,54 @@ class FieldFixtures:
|
|||
self.create_model_field(kwargs['table'], field)
|
||||
|
||||
return field
|
||||
|
||||
def create_url_field(self, user=None, create_field=True, **kwargs):
|
||||
if 'table' not in kwargs:
|
||||
kwargs['table'] = self.create_database_table(user=user)
|
||||
|
||||
if 'name' not in kwargs:
|
||||
kwargs['name'] = self.fake.url()
|
||||
|
||||
if 'order' not in kwargs:
|
||||
kwargs['order'] = 0
|
||||
|
||||
field = URLField.objects.create(**kwargs)
|
||||
|
||||
if create_field:
|
||||
self.create_model_field(kwargs['table'], field)
|
||||
|
||||
return field
|
||||
|
||||
def create_email_field(self, user=None, create_field=True, **kwargs):
|
||||
if 'table' not in kwargs:
|
||||
kwargs['table'] = self.create_database_table(user=user)
|
||||
|
||||
if 'name' not in kwargs:
|
||||
kwargs['name'] = self.fake.email()
|
||||
|
||||
if 'order' not in kwargs:
|
||||
kwargs['order'] = 0
|
||||
|
||||
field = EmailField.objects.create(**kwargs)
|
||||
|
||||
if create_field:
|
||||
self.create_model_field(kwargs['table'], field)
|
||||
|
||||
return field
|
||||
|
||||
def create_phone_number_field(self, user=None, create_field=True, **kwargs):
|
||||
if 'table' not in kwargs:
|
||||
kwargs['table'] = self.create_database_table(user=user)
|
||||
|
||||
if 'name' not in kwargs:
|
||||
kwargs['name'] = self.fake.phone_number()
|
||||
|
||||
if 'order' not in kwargs:
|
||||
kwargs['order'] = 0
|
||||
|
||||
field = PhoneNumberField.objects.create(**kwargs)
|
||||
|
||||
if create_field:
|
||||
self.create_model_field(kwargs['table'], field)
|
||||
|
||||
return field
|
||||
|
|
6
backend/tests/fixtures/table.py
vendored
6
backend/tests/fixtures/table.py
vendored
|
@ -1,4 +1,5 @@
|
|||
from django.db import connection
|
||||
from django.conf import settings
|
||||
from django.db import connections
|
||||
|
||||
from baserow.contrib.database.table.models import Table
|
||||
|
||||
|
@ -17,7 +18,8 @@ class TableFixtures:
|
|||
table = Table.objects.create(**kwargs)
|
||||
|
||||
if create_table:
|
||||
with connection.schema_editor() as schema_editor:
|
||||
user_table_db = connections[settings.USER_TABLE_DATABASE]
|
||||
with user_table_db.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(table.get_model())
|
||||
|
||||
return table
|
||||
|
|
47
backend/tests/fixtures/template.py
vendored
Normal file
47
backend/tests/fixtures/template.py
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
from baserow.core.models import Template, TemplateCategory
|
||||
|
||||
|
||||
class TemplateFixtures:
|
||||
def create_template_category(self, template=None, templates=None, **kwargs):
|
||||
if 'name' not in kwargs:
|
||||
kwargs['name'] = self.fake.name()
|
||||
|
||||
category = TemplateCategory.objects.create(**kwargs)
|
||||
|
||||
if not templates:
|
||||
templates = []
|
||||
|
||||
if template:
|
||||
templates.append(template)
|
||||
|
||||
category.templates.add(*templates)
|
||||
|
||||
return category
|
||||
|
||||
def create_template(self, category=None, categories=None, **kwargs):
|
||||
if 'name' not in kwargs:
|
||||
kwargs['name'] = self.fake.name()
|
||||
|
||||
if 'slug' not in kwargs:
|
||||
kwargs['slug'] = self.fake.slug()
|
||||
|
||||
if 'icon' not in kwargs:
|
||||
kwargs['icon'] = 'document'
|
||||
|
||||
if 'group' not in kwargs:
|
||||
kwargs['group'] = self.create_group()
|
||||
|
||||
template = Template.objects.create(**kwargs)
|
||||
|
||||
if not categories:
|
||||
categories = []
|
||||
|
||||
if category:
|
||||
categories.append(category)
|
||||
|
||||
if len(categories) == 0:
|
||||
categories.append(self.create_template_category())
|
||||
|
||||
template.categories.add(*categories)
|
||||
|
||||
return template
|
220
backend/tests/templates/example-template-2.json
Normal file
220
backend/tests/templates/example-template-2.json
Normal file
|
@ -0,0 +1,220 @@
|
|||
{
|
||||
"baserow_template_version": 1,
|
||||
"name": "Example template",
|
||||
"icon": "file",
|
||||
"keywords": ["Example", "Template", "For", "Search"],
|
||||
"categories": ["Test category 1"],
|
||||
"export": [
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Event marketing",
|
||||
"order": 1,
|
||||
"type": "database",
|
||||
"tables": [
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Customers",
|
||||
"order": 1,
|
||||
"fields": [
|
||||
{
|
||||
"id": 8,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"type": "text",
|
||||
"name": "Last name",
|
||||
"order": 1,
|
||||
"primary": false,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"type": "long_text",
|
||||
"name": "Notes",
|
||||
"order": 2,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"type": "boolean",
|
||||
"name": "Active",
|
||||
"order": 3,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 3,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 11,
|
||||
"field_id": 8,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"field_id": 9,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"field_id": 10,
|
||||
"width": 400,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"field_id": 11,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_8": "Elon",
|
||||
"field_9": "Musk",
|
||||
"field_10": null,
|
||||
"field_11": "true"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_8": "Bill",
|
||||
"field_9": "Gates",
|
||||
"field_10": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce dignissim, urna eget rutrum sollicitudin, sapien diam interdum nisi, quis malesuada nibh eros a est.",
|
||||
"field_11": "false"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_8": "Mark",
|
||||
"field_9": "Zuckerburg",
|
||||
"field_10": null,
|
||||
"field_11": "true"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_8": "Jeffrey",
|
||||
"field_9": "Bezos",
|
||||
"field_10": null,
|
||||
"field_11": "true"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"name": "Projects",
|
||||
"order": 2,
|
||||
"fields": [
|
||||
{
|
||||
"id": 12,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 13,
|
||||
"type": "date",
|
||||
"name": "Started",
|
||||
"order": 1,
|
||||
"primary": false,
|
||||
"date_format": "EU",
|
||||
"date_include_time": false,
|
||||
"date_time_format": "24"
|
||||
},
|
||||
{
|
||||
"id": 14,
|
||||
"type": "boolean",
|
||||
"name": "Active",
|
||||
"order": 2,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 4,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 13,
|
||||
"field_id": 12,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 14,
|
||||
"field_id": 13,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"field_id": 14,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_12": "Tesla",
|
||||
"field_13": "2020-06-01",
|
||||
"field_14": "true"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_12": "SpaceX",
|
||||
"field_13": null,
|
||||
"field_14": "false"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_12": "Amazon",
|
||||
"field_13": "2018-01-01",
|
||||
"field_14": "false"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
220
backend/tests/templates/example-template-3.json
Normal file
220
backend/tests/templates/example-template-3.json
Normal file
|
@ -0,0 +1,220 @@
|
|||
{
|
||||
"baserow_template_version": 1,
|
||||
"name": "Example template",
|
||||
"icon": "file",
|
||||
"keywords": ["Example", "Template", "For", "Search"],
|
||||
"categories": ["Test category 1"],
|
||||
"export": [
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Event marketing",
|
||||
"order": 1,
|
||||
"type": "database",
|
||||
"tables": [
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Customers",
|
||||
"order": 1,
|
||||
"fields": [
|
||||
{
|
||||
"id": 8,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"type": "text",
|
||||
"name": "Last name",
|
||||
"order": 1,
|
||||
"primary": false,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"type": "long_text",
|
||||
"name": "Notes",
|
||||
"order": 2,
|
||||
"primary": false
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"type": "boolean",
|
||||
"name": "Active",
|
||||
"order": 3,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 3,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 11,
|
||||
"field_id": 8,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"field_id": 9,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"field_id": 10,
|
||||
"width": 400,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"field_id": 11,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_8": "Elon",
|
||||
"field_9": "Musk",
|
||||
"field_10": null,
|
||||
"field_11": "true"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_8": "Bill",
|
||||
"field_9": "Gates",
|
||||
"field_10": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce dignissim, urna eget rutrum sollicitudin, sapien diam interdum nisi, quis malesuada nibh eros a est.",
|
||||
"field_11": "false"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_8": "Mark",
|
||||
"field_9": "Zuckerburg",
|
||||
"field_10": null,
|
||||
"field_11": "true"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"order": "4.00000000000000000000",
|
||||
"field_8": "Jeffrey",
|
||||
"field_9": "Bezos",
|
||||
"field_10": null,
|
||||
"field_11": "true"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"name": "Projects",
|
||||
"order": 2,
|
||||
"fields": [
|
||||
{
|
||||
"id": 12,
|
||||
"type": "text",
|
||||
"name": "Name",
|
||||
"order": 0,
|
||||
"primary": true,
|
||||
"text_default": ""
|
||||
},
|
||||
{
|
||||
"id": 13,
|
||||
"type": "date",
|
||||
"name": "Started",
|
||||
"order": 1,
|
||||
"primary": false,
|
||||
"date_format": "EU",
|
||||
"date_include_time": false,
|
||||
"date_time_format": "24"
|
||||
},
|
||||
{
|
||||
"id": 14,
|
||||
"type": "boolean",
|
||||
"name": "Active",
|
||||
"order": 2,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"views": [
|
||||
{
|
||||
"id": 4,
|
||||
"type": "grid",
|
||||
"name": "Grid",
|
||||
"order": 1,
|
||||
"filter_type": "AND",
|
||||
"filters_disabled": false,
|
||||
"filters": [],
|
||||
"sortings": [],
|
||||
"field_options": [
|
||||
{
|
||||
"id": 13,
|
||||
"field_id": 12,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 14,
|
||||
"field_id": 13,
|
||||
"width": 200,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"field_id": 14,
|
||||
"width": 100,
|
||||
"hidden": false,
|
||||
"order": 32767
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"order": "1.00000000000000000000",
|
||||
"field_12": "Tesla",
|
||||
"field_13": "2020-06-01",
|
||||
"field_14": "true"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"order": "2.00000000000000000000",
|
||||
"field_12": "SpaceX",
|
||||
"field_13": null,
|
||||
"field_14": "false"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"order": "3.00000000000000000000",
|
||||
"field_12": "Amazon",
|
||||
"field_13": "2018-01-01",
|
||||
"field_14": "false"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue