mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-17 18:32:35 +00:00
Reformat entire codebase using black. See the file .git-blame-ignore-revs for how to remove this commit from your blame view.
This commit is contained in:
parent
d63a944b50
commit
9711d46b29
219 changed files with 13206 additions and 12747 deletions
backend
setup.py
src/baserow
__init__.py
api
__init__.py
applications
authentication.pyconfig.pydecorators.pyerrors.pyexceptions.pyextensions.pygroups
openapi.pypagination.pyschemas.pyserializers.pysettings
templates
urls.pyuser
user_files
utils.pyconfig
contrib/database
__init__.py
api
application_types.pyconfig.pydatabase_routers.pydb
fields
management/commands
models.py
|
@ -5,38 +5,34 @@ from setuptools import find_packages, setup
|
|||
|
||||
|
||||
PROJECT_DIR = os.path.dirname(__file__)
|
||||
REQUIREMENTS_DIR = os.path.join(PROJECT_DIR, 'requirements')
|
||||
VERSION = '1.1.0'
|
||||
REQUIREMENTS_DIR = os.path.join(PROJECT_DIR, "requirements")
|
||||
VERSION = "1.1.0"
|
||||
|
||||
|
||||
def get_requirements(env):
|
||||
with open(os.path.join(REQUIREMENTS_DIR, f'{env}.txt')) as fp:
|
||||
return [
|
||||
x.strip()
|
||||
for x in fp.read().split("\n")
|
||||
if not x.startswith("#")
|
||||
]
|
||||
with open(os.path.join(REQUIREMENTS_DIR, f"{env}.txt")) as fp:
|
||||
return [x.strip() for x in fp.read().split("\n") if not x.startswith("#")]
|
||||
|
||||
|
||||
install_requires = get_requirements('base')
|
||||
install_requires = get_requirements("base")
|
||||
|
||||
|
||||
setup(
|
||||
name='baserow',
|
||||
name="baserow",
|
||||
version=VERSION,
|
||||
url='https://baserow.io',
|
||||
scripts=['baserow'],
|
||||
author='Bram Wiepjes (Baserow)',
|
||||
author_email='bram@baserow.io',
|
||||
license='MIT',
|
||||
description='Baserow: open source no-code database backend.',
|
||||
long_description='Baserow is an open source no-code database tool and Airtable '
|
||||
'alternative. Easily create a relational database without any '
|
||||
'technical expertise. Build a table and define custom fields '
|
||||
'like text, number, file and many more.',
|
||||
platforms=['linux'],
|
||||
package_dir={'': 'src'},
|
||||
packages=find_packages('src'),
|
||||
url="https://baserow.io",
|
||||
scripts=["baserow"],
|
||||
author="Bram Wiepjes (Baserow)",
|
||||
author_email="bram@baserow.io",
|
||||
license="MIT",
|
||||
description="Baserow: open source no-code database backend.",
|
||||
long_description="Baserow is an open source no-code database tool and Airtable "
|
||||
"alternative. Easily create a relational database without any "
|
||||
"technical expertise. Build a table and define custom fields "
|
||||
"like text, number, file and many more.",
|
||||
platforms=["linux"],
|
||||
package_dir={"": "src"},
|
||||
packages=find_packages("src"),
|
||||
include_package_data=True,
|
||||
install_requires=install_requires
|
||||
install_requires=install_requires,
|
||||
)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from baserow.config.celery import app as celery_app
|
||||
|
||||
|
||||
__all__ = ['celery_app']
|
||||
__all__ = ["celery_app"]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from .extensions import ( # noqa: F401
|
||||
PolymorphicMappingSerializerExtension,
|
||||
PolymorphicCustomFieldRegistrySerializerExtension
|
||||
PolymorphicCustomFieldRegistrySerializerExtension,
|
||||
)
|
||||
|
||||
|
||||
default_app_config = 'baserow.api.config.ApiConfig'
|
||||
default_app_config = "baserow.api.config.ApiConfig"
|
||||
|
|
|
@ -2,7 +2,7 @@ from rest_framework.status import HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_APPLICATION_DOES_NOT_EXIST = (
|
||||
'ERROR_APPLICATION_DOES_NOT_EXIST',
|
||||
"ERROR_APPLICATION_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested application does not exist.'
|
||||
"The requested application does not exist.",
|
||||
)
|
||||
|
|
|
@ -12,42 +12,40 @@ from baserow.core.models import Application
|
|||
|
||||
class ApplicationSerializer(serializers.ModelSerializer):
|
||||
type = serializers.SerializerMethodField()
|
||||
group = GroupSerializer(help_text='The group that the application belongs to.')
|
||||
group = GroupSerializer(help_text="The group that the application belongs to.")
|
||||
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = ('id', 'name', 'order', 'type', 'group')
|
||||
extra_kwargs = {
|
||||
'id': {
|
||||
'read_only': True
|
||||
}
|
||||
}
|
||||
fields = ("id", "name", "order", "type", "group")
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_type(self, instance):
|
||||
# It could be that the application related to the instance is already in the
|
||||
# context else we can call the specific_class property to find it.
|
||||
application = self.context.get('application')
|
||||
application = self.context.get("application")
|
||||
if not application:
|
||||
application = application_type_registry.get_by_model(
|
||||
instance.specific_class)
|
||||
instance.specific_class
|
||||
)
|
||||
|
||||
return application.type
|
||||
|
||||
|
||||
class ApplicationCreateSerializer(serializers.ModelSerializer):
|
||||
type = serializers.ChoiceField(
|
||||
choices=lazy(application_type_registry.get_types, list)())
|
||||
choices=lazy(application_type_registry.get_types, list)()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = ('name', 'type')
|
||||
fields = ("name", "type")
|
||||
|
||||
|
||||
class ApplicationUpdateSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = ('name',)
|
||||
fields = ("name",)
|
||||
|
||||
|
||||
def get_application_serializer(instance, **kwargs):
|
||||
|
@ -67,4 +65,4 @@ def get_application_serializer(instance, **kwargs):
|
|||
if not serializer_class:
|
||||
serializer_class = ApplicationSerializer
|
||||
|
||||
return serializer_class(instance, context={'application': application}, **kwargs)
|
||||
return serializer_class(instance, context={"application": application}, **kwargs)
|
||||
|
|
|
@ -3,11 +3,11 @@ from django.conf.urls import url
|
|||
from .views import ApplicationsView, AllApplicationsView, ApplicationView
|
||||
|
||||
|
||||
app_name = 'baserow.api.group'
|
||||
app_name = "baserow.api.group"
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'group/(?P<group_id>[0-9]+)/$', ApplicationsView.as_view(), name='list'),
|
||||
url(r'(?P<application_id>[0-9]+)/$', ApplicationView.as_view(), name='item'),
|
||||
url(r'$', AllApplicationsView.as_view(), name='list'),
|
||||
url(r"group/(?P<group_id>[0-9]+)/$", ApplicationsView.as_view(), name="list"),
|
||||
url(r"(?P<application_id>[0-9]+)/$", ApplicationView.as_view(), name="item"),
|
||||
url(r"$", AllApplicationsView.as_view(), name="list"),
|
||||
]
|
||||
|
|
|
@ -15,13 +15,17 @@ from baserow.api.applications.errors import ERROR_APPLICATION_DOES_NOT_EXIST
|
|||
from baserow.core.models import Application
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroup, GroupDoesNotExist, ApplicationDoesNotExist
|
||||
UserNotInGroup,
|
||||
GroupDoesNotExist,
|
||||
ApplicationDoesNotExist,
|
||||
)
|
||||
from baserow.core.registries import application_type_registry
|
||||
|
||||
from .serializers import (
|
||||
ApplicationSerializer, ApplicationCreateSerializer, ApplicationUpdateSerializer,
|
||||
get_application_serializer
|
||||
ApplicationSerializer,
|
||||
ApplicationCreateSerializer,
|
||||
ApplicationUpdateSerializer,
|
||||
get_application_serializer,
|
||||
)
|
||||
|
||||
|
||||
|
@ -37,26 +41,22 @@ class AllApplicationsView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Applications'],
|
||||
operation_id='list_all_applications',
|
||||
tags=["Applications"],
|
||||
operation_id="list_all_applications",
|
||||
description=(
|
||||
'Lists all the applications that the authorized user has access to. The '
|
||||
'properties that belong to the application can differ per type. An '
|
||||
'application always belongs to a single group. All the applications of the '
|
||||
'groups that the user has access to are going to be listed here.'
|
||||
"Lists all the applications that the authorized user has access to. The "
|
||||
"properties that belong to the application can differ per type. An "
|
||||
"application always belongs to a single group. All the applications of the "
|
||||
"groups that the user has access to are going to be listed here."
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
'Applications',
|
||||
application_type_serializers,
|
||||
many=True
|
||||
"Applications", application_type_serializers, many=True
|
||||
),
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP'])
|
||||
}
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions({UserNotInGroup: ERROR_USER_NOT_IN_GROUP})
|
||||
def get(self, request):
|
||||
"""
|
||||
Responds with a list of serialized applications that belong to the user. If a
|
||||
|
@ -65,14 +65,11 @@ class AllApplicationsView(APIView):
|
|||
"""
|
||||
|
||||
applications = Application.objects.select_related(
|
||||
'content_type', 'group'
|
||||
).filter(
|
||||
group__users__in=[request.user]
|
||||
)
|
||||
"content_type", "group"
|
||||
).filter(group__users__in=[request.user])
|
||||
|
||||
data = [
|
||||
get_application_serializer(application).data
|
||||
for application in applications
|
||||
get_application_serializer(application).data for application in applications
|
||||
]
|
||||
return Response(data)
|
||||
|
||||
|
@ -81,7 +78,7 @@ class ApplicationsView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == 'GET':
|
||||
if self.request.method == "GET":
|
||||
return [AllowAny()]
|
||||
|
||||
return super().get_permissions()
|
||||
|
@ -89,36 +86,36 @@ class ApplicationsView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns only applications that are in the group related '
|
||||
'to the provided value.'
|
||||
description="Returns only applications that are in the group related "
|
||||
"to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Applications'],
|
||||
operation_id='list_applications',
|
||||
tags=["Applications"],
|
||||
operation_id="list_applications",
|
||||
description=(
|
||||
'Lists all the applications of the group related to the provided '
|
||||
'`group_id` parameter if the authorized user is in that group. If the'
|
||||
'group is related to a template, then this endpoint will be publicly '
|
||||
'accessible. The properties that belong to the application can differ per '
|
||||
'type. An application always belongs to a single group.'
|
||||
"Lists all the applications of the group related to the provided "
|
||||
"`group_id` parameter if the authorized user is in that group. If the"
|
||||
"group is related to a template, then this endpoint will be publicly "
|
||||
"accessible. The properties that belong to the application can differ per "
|
||||
"type. An application always belongs to a single group."
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
'Applications',
|
||||
application_type_serializers,
|
||||
many=True
|
||||
"Applications", application_type_serializers, many=True
|
||||
),
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_GROUP_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
def get(self, request, group_id):
|
||||
"""
|
||||
Responds with a list of serialized applications that belong to the user. If a
|
||||
|
@ -130,59 +127,57 @@ class ApplicationsView(APIView):
|
|||
group.has_user(request.user, raise_error=True, allow_if_template=True)
|
||||
|
||||
applications = Application.objects.select_related(
|
||||
'content_type', 'group'
|
||||
"content_type", "group"
|
||||
).filter(group=group)
|
||||
|
||||
data = [
|
||||
get_application_serializer(application).data
|
||||
for application in applications
|
||||
get_application_serializer(application).data for application in applications
|
||||
]
|
||||
return Response(data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Creates an application for the group related to the '
|
||||
'provided value.'
|
||||
description="Creates an application for the group related to the "
|
||||
"provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Applications'],
|
||||
operation_id='create_application',
|
||||
tags=["Applications"],
|
||||
operation_id="create_application",
|
||||
description=(
|
||||
'Creates a new application based on the provided type. The newly created '
|
||||
'application is going to be added to the group related to the provided '
|
||||
'`group_id` parameter. If the authorized user does not belong to the group '
|
||||
'an error will be returned.'
|
||||
"Creates a new application based on the provided type. The newly created "
|
||||
"application is going to be added to the group related to the provided "
|
||||
"`group_id` parameter. If the authorized user does not belong to the group "
|
||||
"an error will be returned."
|
||||
),
|
||||
request=ApplicationCreateSerializer,
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
'Applications',
|
||||
application_type_serializers
|
||||
"Applications", application_type_serializers
|
||||
),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema([
|
||||
'ERROR_GROUP_DOES_NOT_EXIST'
|
||||
])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(ApplicationCreateSerializer)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def post(self, request, data, group_id):
|
||||
"""Creates a new application for a user."""
|
||||
|
||||
group = CoreHandler().get_group(group_id)
|
||||
application = CoreHandler().create_application(
|
||||
request.user, group, data['type'], name=data['name']
|
||||
request.user, group, data["type"], name=data["name"]
|
||||
)
|
||||
|
||||
return Response(get_application_serializer(application).data)
|
||||
|
@ -194,35 +189,36 @@ class ApplicationView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='application_id',
|
||||
name="application_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns the application related to the provided value.'
|
||||
description="Returns the application related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Applications'],
|
||||
operation_id='get_application',
|
||||
tags=["Applications"],
|
||||
operation_id="get_application",
|
||||
description=(
|
||||
'Returns the requested application if the authorized user is in the '
|
||||
'application\'s group. The properties that belong to the application can '
|
||||
'differ per type.'
|
||||
"Returns the requested application if the authorized user is in the "
|
||||
"application's group. The properties that belong to the application can "
|
||||
"differ per type."
|
||||
),
|
||||
request=ApplicationCreateSerializer,
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
'Applications',
|
||||
application_type_serializers
|
||||
"Applications", application_type_serializers
|
||||
),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_APPLICATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions({
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def get(self, request, application_id):
|
||||
"""Selects a single application and responds with a serialized version."""
|
||||
|
||||
|
@ -233,84 +229,85 @@ class ApplicationView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='application_id',
|
||||
name="application_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the application related to the provided value.'
|
||||
description="Updates the application related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Applications'],
|
||||
operation_id='update_application',
|
||||
tags=["Applications"],
|
||||
operation_id="update_application",
|
||||
description=(
|
||||
'Updates the existing application related to the provided '
|
||||
'`application_id` param if the authorized user is in the application\'s '
|
||||
'group. It is not possible to change the type, but properties like the '
|
||||
'name can be changed.'
|
||||
"Updates the existing application related to the provided "
|
||||
"`application_id` param if the authorized user is in the application's "
|
||||
"group. It is not possible to change the type, but properties like the "
|
||||
"name can be changed."
|
||||
),
|
||||
request=ApplicationUpdateSerializer,
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
'Applications',
|
||||
application_type_serializers
|
||||
"Applications", application_type_serializers
|
||||
),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_APPLICATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(ApplicationUpdateSerializer)
|
||||
@map_exceptions({
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def patch(self, request, data, application_id):
|
||||
"""Updates the application if the user belongs to the group."""
|
||||
|
||||
application = CoreHandler().get_application(
|
||||
application_id,
|
||||
base_queryset=Application.objects.select_for_update()
|
||||
application_id, base_queryset=Application.objects.select_for_update()
|
||||
)
|
||||
application = CoreHandler().update_application(
|
||||
request.user, application, name=data['name']
|
||||
request.user, application, name=data["name"]
|
||||
)
|
||||
return Response(get_application_serializer(application).data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='application_id',
|
||||
name="application_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Deletes the application related to the provided value.'
|
||||
description="Deletes the application related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Applications'],
|
||||
operation_id='delete_application',
|
||||
tags=["Applications"],
|
||||
operation_id="delete_application",
|
||||
description=(
|
||||
'Deletes an application if the authorized user is in the application\'s '
|
||||
'group. All the related children are also going to be deleted. For example '
|
||||
'in case of a database application all the underlying tables, fields, '
|
||||
'views and rows are going to be deleted.'
|
||||
"Deletes an application if the authorized user is in the application's "
|
||||
"group. All the related children are also going to be deleted. For example "
|
||||
"in case of a database application all the underlying tables, fields, "
|
||||
"views and rows are going to be deleted."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_APPLICATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def delete(self, request, application_id):
|
||||
"""Deletes an existing application if the user belongs to the group."""
|
||||
|
||||
application = CoreHandler().get_application(
|
||||
application_id,
|
||||
base_queryset=Application.objects.select_for_update()
|
||||
application_id, base_queryset=Application.objects.select_for_update()
|
||||
)
|
||||
CoreHandler().delete_application(request.user, application)
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from django.utils.translation import ugettext as _
|
|||
from rest_framework import exceptions
|
||||
from rest_framework_jwt.authentication import (
|
||||
jwt_decode_handler,
|
||||
JSONWebTokenAuthentication as JWTJSONWebTokenAuthentication
|
||||
JSONWebTokenAuthentication as JWTJSONWebTokenAuthentication,
|
||||
)
|
||||
|
||||
|
||||
|
@ -26,37 +26,35 @@ class JSONWebTokenAuthentication(JWTJSONWebTokenAuthentication):
|
|||
try:
|
||||
payload = jwt_decode_handler(jwt_value)
|
||||
except jwt.ExpiredSignature:
|
||||
msg = _('Signature has expired.')
|
||||
raise exceptions.AuthenticationFailed({
|
||||
'detail': msg,
|
||||
'error': 'ERROR_SIGNATURE_HAS_EXPIRED'
|
||||
})
|
||||
msg = _("Signature has expired.")
|
||||
raise exceptions.AuthenticationFailed(
|
||||
{"detail": msg, "error": "ERROR_SIGNATURE_HAS_EXPIRED"}
|
||||
)
|
||||
except jwt.DecodeError:
|
||||
msg = _('Error decoding signature.')
|
||||
raise exceptions.AuthenticationFailed({
|
||||
'detail': msg,
|
||||
'error': 'ERROR_DECODING_SIGNATURE'
|
||||
})
|
||||
msg = _("Error decoding signature.")
|
||||
raise exceptions.AuthenticationFailed(
|
||||
{"detail": msg, "error": "ERROR_DECODING_SIGNATURE"}
|
||||
)
|
||||
except jwt.InvalidTokenError:
|
||||
raise exceptions.AuthenticationFailed()
|
||||
|
||||
user = self.authenticate_credentials(payload)
|
||||
|
||||
# @TODO this should actually somehow be moved to the ws app.
|
||||
user.web_socket_id = request.headers.get('WebSocketId')
|
||||
user.web_socket_id = request.headers.get("WebSocketId")
|
||||
|
||||
return user, jwt_value
|
||||
|
||||
|
||||
class JSONWebTokenAuthenticationExtension(OpenApiAuthenticationExtension):
|
||||
target_class = 'baserow.api.authentication.JSONWebTokenAuthentication'
|
||||
name = 'JWT'
|
||||
target_class = "baserow.api.authentication.JSONWebTokenAuthentication"
|
||||
name = "JWT"
|
||||
match_subclasses = True
|
||||
priority = -1
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
return {
|
||||
'type': 'http',
|
||||
'scheme': 'bearer',
|
||||
'bearerFormat': 'JWT your_token',
|
||||
"type": "http",
|
||||
"scheme": "bearer",
|
||||
"bearerFormat": "JWT your_token",
|
||||
}
|
||||
|
|
|
@ -2,4 +2,4 @@ from django.apps import AppConfig
|
|||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = 'baserow.api'
|
||||
name = "baserow.api"
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
from .utils import (
|
||||
map_exceptions as map_exceptions_utility, get_request, validate_data,
|
||||
validate_data_custom_fields
|
||||
map_exceptions as map_exceptions_utility,
|
||||
get_request,
|
||||
validate_data,
|
||||
validate_data_custom_fields,
|
||||
)
|
||||
from .exceptions import RequestBodyValidationException
|
||||
|
||||
|
@ -39,7 +41,9 @@ def map_exceptions(exceptions):
|
|||
def func_wrapper(*args, **kwargs):
|
||||
with map_exceptions_utility(exceptions):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return func_wrapper
|
||||
|
||||
return map_exceptions_decorator
|
||||
|
||||
|
||||
|
@ -82,17 +86,20 @@ def validate_body(serializer_class):
|
|||
def func_wrapper(*args, **kwargs):
|
||||
request = get_request(args)
|
||||
|
||||
if 'data' in kwargs:
|
||||
raise ValueError('The data attribute is already in the kwargs.')
|
||||
if "data" in kwargs:
|
||||
raise ValueError("The data attribute is already in the kwargs.")
|
||||
|
||||
kwargs['data'] = validate_data(serializer_class, request.data)
|
||||
kwargs["data"] = validate_data(serializer_class, request.data)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return func_wrapper
|
||||
|
||||
return validate_decorator
|
||||
|
||||
|
||||
def validate_body_custom_fields(registry, base_serializer_class=None,
|
||||
type_attribute_name='type'):
|
||||
def validate_body_custom_fields(
|
||||
registry, base_serializer_class=None, type_attribute_name="type"
|
||||
):
|
||||
"""
|
||||
This decorator can validate the request data dynamically using the generated
|
||||
serializer that belongs to the type instance. Based on a provided
|
||||
|
@ -122,25 +129,28 @@ def validate_body_custom_fields(registry, base_serializer_class=None,
|
|||
if not type_name:
|
||||
# If the type name isn't provided in the data we will raise a machine
|
||||
# readable validation error.
|
||||
raise RequestBodyValidationException({
|
||||
type_attribute_name: [
|
||||
{
|
||||
"error": "This field is required.",
|
||||
"code": "required"
|
||||
}
|
||||
]
|
||||
})
|
||||
raise RequestBodyValidationException(
|
||||
{
|
||||
type_attribute_name: [
|
||||
{"error": "This field is required.", "code": "required"}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
if 'data' in kwargs:
|
||||
raise ValueError('The data attribute is already in the kwargs.')
|
||||
if "data" in kwargs:
|
||||
raise ValueError("The data attribute is already in the kwargs.")
|
||||
|
||||
kwargs['data'] = validate_data_custom_fields(
|
||||
type_name, registry, request.data,
|
||||
kwargs["data"] = validate_data_custom_fields(
|
||||
type_name,
|
||||
registry,
|
||||
request.data,
|
||||
base_serializer_class=base_serializer_class,
|
||||
type_attribute_name=type_attribute_name
|
||||
type_attribute_name=type_attribute_name,
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return func_wrapper
|
||||
|
||||
return validate_decorator
|
||||
|
||||
|
||||
|
@ -174,12 +184,14 @@ def allowed_includes(*allowed):
|
|||
def validate_decorator(func):
|
||||
def func_wrapper(*args, **kwargs):
|
||||
request = get_request(args)
|
||||
raw_include = request.GET.get('include', None)
|
||||
includes = raw_include.split(',') if raw_include else []
|
||||
raw_include = request.GET.get("include", None)
|
||||
includes = raw_include.split(",") if raw_include else []
|
||||
|
||||
for include in allowed:
|
||||
kwargs[include] = include in includes
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return func_wrapper
|
||||
|
||||
return validate_decorator
|
||||
|
|
|
@ -2,20 +2,20 @@ from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_GROUP_DOES_NOT_EXIST = (
|
||||
'ERROR_GROUP_DOES_NOT_EXIST',
|
||||
"ERROR_GROUP_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested group does not exist.'
|
||||
"The requested group does not exist.",
|
||||
)
|
||||
ERROR_USER_INVALID_GROUP_PERMISSIONS = (
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS',
|
||||
"ERROR_USER_INVALID_GROUP_PERMISSIONS",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'You need {e.permissions} permissions.'
|
||||
"You need {e.permissions} permissions.",
|
||||
)
|
||||
ERROR_USER_NOT_IN_GROUP = 'ERROR_USER_NOT_IN_GROUP'
|
||||
BAD_TOKEN_SIGNATURE = 'BAD_TOKEN_SIGNATURE'
|
||||
EXPIRED_TOKEN_SIGNATURE = 'EXPIRED_TOKEN_SIGNATURE'
|
||||
ERROR_USER_NOT_IN_GROUP = "ERROR_USER_NOT_IN_GROUP"
|
||||
BAD_TOKEN_SIGNATURE = "BAD_TOKEN_SIGNATURE"
|
||||
EXPIRED_TOKEN_SIGNATURE = "EXPIRED_TOKEN_SIGNATURE"
|
||||
ERROR_HOSTNAME_IS_NOT_ALLOWED = (
|
||||
'ERROR_HOSTNAME_IS_NOT_ALLOWED',
|
||||
"ERROR_HOSTNAME_IS_NOT_ALLOWED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'Only the hostname of the web frontend is allowed.'
|
||||
"Only the hostname of the web frontend is allowed.",
|
||||
)
|
||||
|
|
|
@ -3,8 +3,7 @@ from rest_framework.exceptions import APIException
|
|||
|
||||
class RequestBodyValidationException(APIException):
|
||||
def __init__(self, detail=None, code=None):
|
||||
super().__init__({
|
||||
'error': 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
'detail': detail
|
||||
}, code=code)
|
||||
super().__init__(
|
||||
{"error": "ERROR_REQUEST_BODY_VALIDATION", "detail": detail}, code=code
|
||||
)
|
||||
self.status_code = 400
|
||||
|
|
|
@ -22,7 +22,7 @@ class PolymorphicMappingSerializerExtension(OpenApiSerializerExtension):
|
|||
)
|
||||
"""
|
||||
|
||||
target_class = 'baserow.api.utils.PolymorphicMappingSerializer'
|
||||
target_class = "baserow.api.utils.PolymorphicMappingSerializer"
|
||||
|
||||
def get_name(self):
|
||||
return self.target.component_name
|
||||
|
@ -36,20 +36,18 @@ class PolymorphicMappingSerializerExtension(OpenApiSerializerExtension):
|
|||
for key, serializer_class in mapping.items():
|
||||
sub_serializer = force_instance(serializer_class)
|
||||
resolved_sub_serializer = auto_schema.resolve_serializer(
|
||||
sub_serializer,
|
||||
direction
|
||||
sub_serializer, direction
|
||||
)
|
||||
sub_components.append((key, resolved_sub_serializer.ref))
|
||||
|
||||
return {
|
||||
'oneOf': [ref for _, ref in sub_components],
|
||||
'discriminator': {
|
||||
'propertyName': self.target.type_field_name,
|
||||
'mapping': {
|
||||
resource_type: ref['$ref']
|
||||
for resource_type, ref in sub_components
|
||||
}
|
||||
}
|
||||
"oneOf": [ref for _, ref in sub_components],
|
||||
"discriminator": {
|
||||
"propertyName": self.target.type_field_name,
|
||||
"mapping": {
|
||||
resource_type: ref["$ref"] for resource_type, ref in sub_components
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
@ -73,18 +71,16 @@ class PolymorphicCustomFieldRegistrySerializerExtension(
|
|||
)
|
||||
"""
|
||||
|
||||
target_class = 'baserow.api.utils.PolymorphicCustomFieldRegistrySerializer'
|
||||
target_class = "baserow.api.utils.PolymorphicCustomFieldRegistrySerializer"
|
||||
|
||||
def get_name(self):
|
||||
part_1 = self.target.registry.name.title()
|
||||
part_2 = self.target.base_class.__name__
|
||||
return f'{part_1}{part_2}'
|
||||
return f"{part_1}{part_2}"
|
||||
|
||||
def map_serializer(self, auto_schema, direction):
|
||||
mapping = {
|
||||
types.type: types.get_serializer_class(
|
||||
base_class=self.target.base_class
|
||||
)
|
||||
types.type: types.get_serializer_class(base_class=self.target.base_class)
|
||||
for types in self.target.registry.registry.values()
|
||||
}
|
||||
|
||||
|
|
|
@ -2,12 +2,12 @@ from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_GROUP_INVITATION_DOES_NOT_EXIST = (
|
||||
'ERROR_GROUP_INVITATION_DOES_NOT_EXIST',
|
||||
"ERROR_GROUP_INVITATION_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested group invitation does not exist.'
|
||||
"The requested group invitation does not exist.",
|
||||
)
|
||||
ERROR_GROUP_INVITATION_EMAIL_MISMATCH = (
|
||||
'ERROR_GROUP_INVITATION_EMAIL_MISMATCH',
|
||||
"ERROR_GROUP_INVITATION_EMAIL_MISMATCH",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'Your email address does not match with the invitation.'
|
||||
"Your email address does not match with the invitation.",
|
||||
)
|
||||
|
|
|
@ -9,28 +9,26 @@ from baserow.core.models import GroupInvitation
|
|||
class GroupInvitationSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = GroupInvitation
|
||||
fields = ('id', 'group', 'email', 'permissions', 'message', 'created_on')
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True}
|
||||
}
|
||||
fields = ("id", "group", "email", "permissions", "message", "created_on")
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
||||
class CreateGroupInvitationSerializer(serializers.ModelSerializer):
|
||||
base_url = serializers.URLField(
|
||||
help_text='The base URL where the user can publicly accept his invitation.'
|
||||
'The accept token is going to be appended to the base_url (base_url '
|
||||
'\'/token\').'
|
||||
help_text="The base URL where the user can publicly accept his invitation."
|
||||
"The accept token is going to be appended to the base_url (base_url "
|
||||
"'/token')."
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = GroupInvitation
|
||||
fields = ('email', 'permissions', 'message', 'base_url')
|
||||
fields = ("email", "permissions", "message", "base_url")
|
||||
|
||||
|
||||
class UpdateGroupInvitationSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = GroupInvitation
|
||||
fields = ('permissions',)
|
||||
fields = ("permissions",)
|
||||
|
||||
|
||||
class UserGroupInvitationSerializer(serializers.ModelSerializer):
|
||||
|
@ -45,12 +43,19 @@ class UserGroupInvitationSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = GroupInvitation
|
||||
fields = ('id', 'invited_by', 'group', 'email', 'message', 'created_on',
|
||||
'email_exists')
|
||||
fields = (
|
||||
"id",
|
||||
"invited_by",
|
||||
"group",
|
||||
"email",
|
||||
"message",
|
||||
"created_on",
|
||||
"email_exists",
|
||||
)
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True},
|
||||
'message': {'read_only': True},
|
||||
'created_on': {'read_only': True}
|
||||
"id": {"read_only": True},
|
||||
"message": {"read_only": True},
|
||||
"created_on": {"read_only": True},
|
||||
}
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
|
@ -63,4 +68,4 @@ class UserGroupInvitationSerializer(serializers.ModelSerializer):
|
|||
|
||||
@extend_schema_field(OpenApiTypes.BOOL)
|
||||
def get_email_exists(self, object):
|
||||
return object.email_exists if hasattr(object, 'email_exists') else None
|
||||
return object.email_exists if hasattr(object, "email_exists") else None
|
||||
|
|
|
@ -1,34 +1,31 @@
|
|||
from django.conf.urls import url
|
||||
|
||||
from .views import (
|
||||
GroupInvitationsView, GroupInvitationView, AcceptGroupInvitationView,
|
||||
RejectGroupInvitationView, GroupInvitationByTokenView
|
||||
GroupInvitationsView,
|
||||
GroupInvitationView,
|
||||
AcceptGroupInvitationView,
|
||||
RejectGroupInvitationView,
|
||||
GroupInvitationByTokenView,
|
||||
)
|
||||
|
||||
|
||||
app_name = 'baserow.api.groups.invitations'
|
||||
app_name = "baserow.api.groups.invitations"
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'group/(?P<group_id>[0-9]+)/$', GroupInvitationsView.as_view(), name='list'),
|
||||
url(r"group/(?P<group_id>[0-9]+)/$", GroupInvitationsView.as_view(), name="list"),
|
||||
url(r"token/(?P<token>.*)/$", GroupInvitationByTokenView.as_view(), name="token"),
|
||||
url(
|
||||
r'token/(?P<token>.*)/$',
|
||||
GroupInvitationByTokenView.as_view(),
|
||||
name='token'
|
||||
r"(?P<group_invitation_id>[0-9]+)/$", GroupInvitationView.as_view(), name="item"
|
||||
),
|
||||
url(
|
||||
r'(?P<group_invitation_id>[0-9]+)/$',
|
||||
GroupInvitationView.as_view(),
|
||||
name='item'
|
||||
),
|
||||
url(
|
||||
r'(?P<group_invitation_id>[0-9]+)/accept/$',
|
||||
r"(?P<group_invitation_id>[0-9]+)/accept/$",
|
||||
AcceptGroupInvitationView.as_view(),
|
||||
name='accept'
|
||||
name="accept",
|
||||
),
|
||||
url(
|
||||
r'(?P<group_invitation_id>[0-9]+)/reject/$',
|
||||
r"(?P<group_invitation_id>[0-9]+)/reject/$",
|
||||
RejectGroupInvitationView.as_view(),
|
||||
name='reject'
|
||||
name="reject",
|
||||
),
|
||||
]
|
||||
|
|
|
@ -13,27 +13,36 @@ from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
|||
|
||||
from baserow.api.decorators import validate_body, map_exceptions
|
||||
from baserow.api.errors import (
|
||||
ERROR_USER_NOT_IN_GROUP, ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
ERROR_GROUP_DOES_NOT_EXIST, ERROR_HOSTNAME_IS_NOT_ALLOWED,
|
||||
BAD_TOKEN_SIGNATURE
|
||||
ERROR_USER_NOT_IN_GROUP,
|
||||
ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
ERROR_GROUP_DOES_NOT_EXIST,
|
||||
ERROR_HOSTNAME_IS_NOT_ALLOWED,
|
||||
BAD_TOKEN_SIGNATURE,
|
||||
)
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.api.groups.serializers import GroupUserGroupSerializer
|
||||
from baserow.api.groups.users.errors import ERROR_GROUP_USER_ALREADY_EXISTS
|
||||
from baserow.api.groups.invitations.errors import (
|
||||
ERROR_GROUP_INVITATION_DOES_NOT_EXIST, ERROR_GROUP_INVITATION_EMAIL_MISMATCH
|
||||
ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
)
|
||||
from baserow.core.models import GroupInvitation
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroup, UserInvalidGroupPermissionsError, GroupDoesNotExist,
|
||||
GroupInvitationDoesNotExist, BaseURLHostnameNotAllowed,
|
||||
GroupInvitationEmailMismatch, GroupUserAlreadyExists
|
||||
UserNotInGroup,
|
||||
UserInvalidGroupPermissionsError,
|
||||
GroupDoesNotExist,
|
||||
GroupInvitationDoesNotExist,
|
||||
BaseURLHostnameNotAllowed,
|
||||
GroupInvitationEmailMismatch,
|
||||
GroupUserAlreadyExists,
|
||||
)
|
||||
|
||||
from .serializers import (
|
||||
GroupInvitationSerializer, CreateGroupInvitationSerializer,
|
||||
UpdateGroupInvitationSerializer, UserGroupInvitationSerializer
|
||||
GroupInvitationSerializer,
|
||||
CreateGroupInvitationSerializer,
|
||||
UpdateGroupInvitationSerializer,
|
||||
UserGroupInvitationSerializer,
|
||||
)
|
||||
|
||||
|
||||
|
@ -46,39 +55,40 @@ class GroupInvitationsView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns only invitations that are in the group related '
|
||||
'to the provided value.'
|
||||
description="Returns only invitations that are in the group related "
|
||||
"to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='list_group_invitations',
|
||||
tags=["Group invitations"],
|
||||
operation_id="list_group_invitations",
|
||||
description=(
|
||||
'Lists all the group invitations of the group related to the provided '
|
||||
'`group_id` parameter if the authorized user has admin rights to that '
|
||||
'group.'
|
||||
"Lists all the group invitations of the group related to the provided "
|
||||
"`group_id` parameter if the authorized user has admin rights to that "
|
||||
"group."
|
||||
),
|
||||
responses={
|
||||
200: GroupInvitationSerializer(many=True),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_USER_INVALID_GROUP_PERMISSIONS"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
})
|
||||
def get(self, request, group_id):
|
||||
"""Lists all the invitations of the provided group id."""
|
||||
|
||||
group = CoreHandler().get_group(group_id)
|
||||
group.has_user(request.user, 'ADMIN', raise_error=True)
|
||||
group.has_user(request.user, "ADMIN", raise_error=True)
|
||||
group_invitations = GroupInvitation.objects.filter(group=group)
|
||||
serializer = GroupInvitationSerializer(group_invitations, many=True)
|
||||
return Response(serializer.data)
|
||||
|
@ -86,48 +96,50 @@ class GroupInvitationsView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Creates a group invitation to the group related to the '
|
||||
'provided value.'
|
||||
description="Creates a group invitation to the group related to the "
|
||||
"provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='create_group_invitation',
|
||||
tags=["Group invitations"],
|
||||
operation_id="create_group_invitation",
|
||||
description=(
|
||||
'Creates a new group invitations for an email address if the authorized '
|
||||
'user has admin rights to the related group. An email containing a sign '
|
||||
'up link will be send to the user.'
|
||||
"Creates a new group invitations for an email address if the authorized "
|
||||
"user has admin rights to the related group. An email containing a sign "
|
||||
"up link will be send to the user."
|
||||
),
|
||||
request=CreateGroupInvitationSerializer,
|
||||
responses={
|
||||
200: GroupInvitationSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_USER_INVALID_GROUP_PERMISSIONS",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(CreateGroupInvitationSerializer)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
GroupUserAlreadyExists: ERROR_GROUP_USER_ALREADY_EXISTS,
|
||||
BaseURLHostnameNotAllowed: ERROR_HOSTNAME_IS_NOT_ALLOWED
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
GroupUserAlreadyExists: ERROR_GROUP_USER_ALREADY_EXISTS,
|
||||
BaseURLHostnameNotAllowed: ERROR_HOSTNAME_IS_NOT_ALLOWED,
|
||||
}
|
||||
)
|
||||
def post(self, request, data, group_id):
|
||||
"""Creates a new group invitation and sends it the provided email."""
|
||||
|
||||
group = CoreHandler().get_group(group_id)
|
||||
group_invitation = CoreHandler().create_group_invitation(
|
||||
request.user,
|
||||
group,
|
||||
**data
|
||||
request.user, group, **data
|
||||
)
|
||||
return Response(GroupInvitationSerializer(group_invitation).data)
|
||||
|
||||
|
@ -138,126 +150,130 @@ class GroupInvitationView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_invitation_id',
|
||||
name="group_invitation_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns the group invitation related to the provided '
|
||||
'value.'
|
||||
description="Returns the group invitation related to the provided "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='get_group_invitation',
|
||||
tags=["Group invitations"],
|
||||
operation_id="get_group_invitation",
|
||||
description=(
|
||||
'Returns the requested group invitation if the authorized user has admin '
|
||||
'right to the related group'
|
||||
"Returns the requested group invitation if the authorized user has admin "
|
||||
"right to the related group"
|
||||
),
|
||||
responses={
|
||||
200: GroupInvitationSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_USER_INVALID_GROUP_PERMISSIONS"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions({
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
def get(self, request, group_invitation_id):
|
||||
"""Selects a single group invitation and responds with a serialized version."""
|
||||
|
||||
group_invitation = CoreHandler().get_group_invitation(group_invitation_id)
|
||||
group_invitation.group.has_user(request.user, 'ADMIN', raise_error=True)
|
||||
group_invitation.group.has_user(request.user, "ADMIN", raise_error=True)
|
||||
return Response(GroupInvitationSerializer(group_invitation).data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_invitation_id',
|
||||
name="group_invitation_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the group invitation related to the provided '
|
||||
'value.'
|
||||
description="Updates the group invitation related to the provided "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='update_group_invitation',
|
||||
tags=["Group invitations"],
|
||||
operation_id="update_group_invitation",
|
||||
description=(
|
||||
'Updates the existing group invitation related to the provided '
|
||||
'`group_invitation_id` param if the authorized user has admin rights to '
|
||||
'the related group.'
|
||||
"Updates the existing group invitation related to the provided "
|
||||
"`group_invitation_id` param if the authorized user has admin rights to "
|
||||
"the related group."
|
||||
),
|
||||
request=UpdateGroupInvitationSerializer,
|
||||
responses={
|
||||
200: GroupInvitationSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_USER_INVALID_GROUP_PERMISSIONS",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(UpdateGroupInvitationSerializer)
|
||||
@map_exceptions({
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
def patch(self, request, data, group_invitation_id):
|
||||
"""Updates the group invitation if the user belongs to the group."""
|
||||
|
||||
group_invitation = CoreHandler().get_group_invitation(
|
||||
group_invitation_id,
|
||||
base_queryset=GroupInvitation.objects.select_for_update()
|
||||
base_queryset=GroupInvitation.objects.select_for_update(),
|
||||
)
|
||||
group_invitation = CoreHandler().update_group_invitation(
|
||||
request.user,
|
||||
group_invitation,
|
||||
**data
|
||||
request.user, group_invitation, **data
|
||||
)
|
||||
return Response(GroupInvitationSerializer(group_invitation).data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_invitation_id',
|
||||
name="group_invitation_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Deletes the group invitation related to the provided '
|
||||
'value.'
|
||||
description="Deletes the group invitation related to the provided "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='delete_group_invitation',
|
||||
tags=["Group invitations"],
|
||||
operation_id="delete_group_invitation",
|
||||
description=(
|
||||
'Deletes a group invitation if the authorized user has admin rights to '
|
||||
'the related group.'
|
||||
"Deletes a group invitation if the authorized user has admin rights to "
|
||||
"the related group."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_USER_INVALID_GROUP_PERMISSIONS"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
def delete(self, request, group_invitation_id):
|
||||
"""Deletes an existing group_invitation if the user belongs to the group."""
|
||||
|
||||
group_invitation = CoreHandler().get_group_invitation(
|
||||
group_invitation_id,
|
||||
base_queryset=GroupInvitation.objects.select_for_update()
|
||||
base_queryset=GroupInvitation.objects.select_for_update(),
|
||||
)
|
||||
CoreHandler().delete_group_invitation(request.user, group_invitation)
|
||||
return Response(status=204)
|
||||
|
@ -269,46 +285,47 @@ class AcceptGroupInvitationView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_invitation_id',
|
||||
name="group_invitation_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Accepts the group invitation related to the provided '
|
||||
'value.'
|
||||
description="Accepts the group invitation related to the provided "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='accept_group_invitation',
|
||||
tags=["Group invitations"],
|
||||
operation_id="accept_group_invitation",
|
||||
description=(
|
||||
'Accepts a group invitation with the given id if the email address of the '
|
||||
'user matches that of the invitation.'
|
||||
"Accepts a group invitation with the given id if the email address of the "
|
||||
"user matches that of the invitation."
|
||||
),
|
||||
request=None,
|
||||
responses={
|
||||
200: GroupUserGroupSerializer,
|
||||
400: get_error_schema(['ERROR_GROUP_INVITATION_EMAIL_MISMATCH']),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_GROUP_INVITATION_EMAIL_MISMATCH"]),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def post(self, request, group_invitation_id):
|
||||
"""Accepts a group invitation."""
|
||||
|
||||
try:
|
||||
group_invitation = GroupInvitation.objects.select_related('group').get(
|
||||
group_invitation = GroupInvitation.objects.select_related("group").get(
|
||||
id=group_invitation_id
|
||||
)
|
||||
except GroupInvitation.DoesNotExist:
|
||||
raise GroupInvitationDoesNotExist(
|
||||
f'The group invitation with id {group_invitation_id} does not exist.'
|
||||
f"The group invitation with id {group_invitation_id} does not exist."
|
||||
)
|
||||
|
||||
group_user = CoreHandler().accept_group_invitation(
|
||||
request.user,
|
||||
group_invitation
|
||||
request.user, group_invitation
|
||||
)
|
||||
return Response(GroupUserGroupSerializer(group_user).data)
|
||||
|
||||
|
@ -319,41 +336,43 @@ class RejectGroupInvitationView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_invitation_id',
|
||||
name="group_invitation_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Rejects the group invitation related to the provided '
|
||||
'value.'
|
||||
description="Rejects the group invitation related to the provided "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='reject_group_invitation',
|
||||
tags=["Group invitations"],
|
||||
operation_id="reject_group_invitation",
|
||||
description=(
|
||||
'Rejects a group invitation with the given id if the email address of the '
|
||||
'user matches that of the invitation.'
|
||||
"Rejects a group invitation with the given id if the email address of the "
|
||||
"user matches that of the invitation."
|
||||
),
|
||||
request=None,
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema(['ERROR_GROUP_INVITATION_EMAIL_MISMATCH']),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_GROUP_INVITATION_EMAIL_MISMATCH"]),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def post(self, request, group_invitation_id):
|
||||
"""Rejects a group invitation."""
|
||||
|
||||
try:
|
||||
group_invitation = GroupInvitation.objects.select_related('group').get(
|
||||
group_invitation = GroupInvitation.objects.select_related("group").get(
|
||||
id=group_invitation_id
|
||||
)
|
||||
except GroupInvitation.DoesNotExist:
|
||||
raise GroupInvitationDoesNotExist(
|
||||
f'The group invitation with id {group_invitation_id} does not exist.'
|
||||
f"The group invitation with id {group_invitation_id} does not exist."
|
||||
)
|
||||
|
||||
CoreHandler().reject_group_invitation(request.user, group_invitation)
|
||||
|
@ -366,40 +385,42 @@ class GroupInvitationByTokenView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='token',
|
||||
name="token",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.STR,
|
||||
description='Returns the group invitation related to the provided '
|
||||
'token.'
|
||||
description="Returns the group invitation related to the provided "
|
||||
"token.",
|
||||
)
|
||||
],
|
||||
tags=['Group invitations'],
|
||||
operation_id='get_group_invitation_by_token',
|
||||
tags=["Group invitations"],
|
||||
operation_id="get_group_invitation_by_token",
|
||||
description=(
|
||||
'Responds with the serialized group invitation if an invitation with the '
|
||||
'provided token is found.'
|
||||
"Responds with the serialized group invitation if an invitation with the "
|
||||
"provided token is found."
|
||||
),
|
||||
responses={
|
||||
200: UserGroupInvitationSerializer,
|
||||
400: get_error_schema(['BAD_TOKEN_SIGNATURE']),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["BAD_TOKEN_SIGNATURE"]),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions({
|
||||
BadSignature: BAD_TOKEN_SIGNATURE,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
BadSignature: BAD_TOKEN_SIGNATURE,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def get(self, request, token):
|
||||
"""
|
||||
Responds with the serialized group invitation if an invitation with the
|
||||
provided token is found.
|
||||
"""
|
||||
|
||||
exists_queryset = User.objects.filter(username=OuterRef('email'))
|
||||
exists_queryset = User.objects.filter(username=OuterRef("email"))
|
||||
group_invitation = CoreHandler().get_group_invitation_by_token(
|
||||
token,
|
||||
base_queryset=GroupInvitation.objects.annotate(
|
||||
email_exists=Exists(exists_queryset)
|
||||
)
|
||||
),
|
||||
)
|
||||
return Response(UserGroupInvitationSerializer(group_invitation).data)
|
||||
|
|
|
@ -1,20 +1,22 @@
|
|||
from drf_spectacular.plumbing import build_object_type
|
||||
|
||||
|
||||
group_user_schema = build_object_type({
|
||||
'order': {
|
||||
'type': 'int',
|
||||
'description': 'The order of the group, lowest first.',
|
||||
'example': 0
|
||||
},
|
||||
'id': {
|
||||
'type': 'int',
|
||||
'description': 'The unique identifier of the group.',
|
||||
'example': 1
|
||||
},
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The name given to the group.',
|
||||
'example': 'Bram\'s group'
|
||||
group_user_schema = build_object_type(
|
||||
{
|
||||
"order": {
|
||||
"type": "int",
|
||||
"description": "The order of the group, lowest first.",
|
||||
"example": 0,
|
||||
},
|
||||
"id": {
|
||||
"type": "int",
|
||||
"description": "The unique identifier of the group.",
|
||||
"example": 1,
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name given to the group.",
|
||||
"example": "Bram's group",
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
|
|
@ -5,22 +5,20 @@ from baserow.core.models import Group
|
|||
from .users.serializers import GroupUserGroupSerializer
|
||||
|
||||
|
||||
__all__ = ['GroupUserGroupSerializer']
|
||||
__all__ = ["GroupUserGroupSerializer"]
|
||||
|
||||
|
||||
class GroupSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = ('id', 'name',)
|
||||
extra_kwargs = {
|
||||
'id': {
|
||||
'read_only': True
|
||||
}
|
||||
}
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
)
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
||||
class OrderGroupsSerializer(serializers.Serializer):
|
||||
groups = serializers.ListField(
|
||||
child=serializers.IntegerField(),
|
||||
help_text='Group ids in the desired order.'
|
||||
child=serializers.IntegerField(), help_text="Group ids in the desired order."
|
||||
)
|
||||
|
|
|
@ -6,12 +6,12 @@ from .users import urls as user_urls
|
|||
from .invitations import urls as invitation_urls
|
||||
|
||||
|
||||
app_name = 'baserow.api.groups'
|
||||
app_name = "baserow.api.groups"
|
||||
|
||||
urlpatterns = [
|
||||
path('users/', include(user_urls, namespace='users')),
|
||||
path('invitations/', include(invitation_urls, namespace='invitations')),
|
||||
url(r'^$', GroupsView.as_view(), name='list'),
|
||||
url(r'(?P<group_id>[0-9]+)/$', GroupView.as_view(), name='item'),
|
||||
url(r'order/$', GroupOrderView.as_view(), name='order'),
|
||||
path("users/", include(user_urls, namespace="users")),
|
||||
path("invitations/", include(invitation_urls, namespace="invitations")),
|
||||
url(r"^$", GroupsView.as_view(), name="list"),
|
||||
url(r"(?P<group_id>[0-9]+)/$", GroupView.as_view(), name="item"),
|
||||
url(r"order/$", GroupOrderView.as_view(), name="order"),
|
||||
]
|
||||
|
|
|
@ -2,12 +2,12 @@ from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_GROUP_USER_DOES_NOT_EXIST = (
|
||||
'ERROR_GROUP_USER_DOES_NOT_EXIST',
|
||||
"ERROR_GROUP_USER_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested group user does not exist.'
|
||||
"The requested group user does not exist.",
|
||||
)
|
||||
ERROR_GROUP_USER_ALREADY_EXISTS = (
|
||||
'ERROR_GROUP_USER_ALREADY_EXISTS',
|
||||
"ERROR_GROUP_USER_ALREADY_EXISTS",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The user is already a member of the group.'
|
||||
"The user is already a member of the group.",
|
||||
)
|
||||
|
|
|
@ -17,7 +17,7 @@ class GroupUserSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = GroupUser
|
||||
fields = ('id', 'name', 'email', 'group', 'permissions', 'created_on')
|
||||
fields = ("id", "name", "email", "group", "permissions", "created_on")
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_name(self, object):
|
||||
|
@ -36,7 +36,7 @@ class GroupUserGroupSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = GroupUser
|
||||
fields = ('order', 'permissions')
|
||||
fields = ("order", "permissions")
|
||||
|
||||
def to_representation(self, instance):
|
||||
from baserow.api.groups.serializers import GroupSerializer
|
||||
|
@ -49,4 +49,4 @@ class GroupUserGroupSerializer(serializers.ModelSerializer):
|
|||
class UpdateGroupUserSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = GroupUser
|
||||
fields = ('permissions',)
|
||||
fields = ("permissions",)
|
||||
|
|
|
@ -3,9 +3,9 @@ from django.conf.urls import url
|
|||
from .views import GroupUsersView, GroupUserView
|
||||
|
||||
|
||||
app_name = 'baserow.api.groups.users'
|
||||
app_name = "baserow.api.groups.users"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'group/(?P<group_id>[0-9]+)/$', GroupUsersView.as_view(), name='list'),
|
||||
url(r'(?P<group_user_id>[0-9]+)/$', GroupUserView.as_view(), name='item'),
|
||||
url(r"group/(?P<group_id>[0-9]+)/$", GroupUsersView.as_view(), name="list"),
|
||||
url(r"(?P<group_user_id>[0-9]+)/$", GroupUserView.as_view(), name="item"),
|
||||
]
|
||||
|
|
|
@ -9,20 +9,25 @@ from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
|||
|
||||
from baserow.api.decorators import validate_body, map_exceptions
|
||||
from baserow.api.errors import (
|
||||
ERROR_GROUP_DOES_NOT_EXIST, ERROR_USER_NOT_IN_GROUP,
|
||||
ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
ERROR_GROUP_DOES_NOT_EXIST,
|
||||
ERROR_USER_NOT_IN_GROUP,
|
||||
ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
)
|
||||
from baserow.api.groups.users.errors import ERROR_GROUP_USER_DOES_NOT_EXIST
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.models import GroupUser
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroup, UserInvalidGroupPermissionsError, GroupDoesNotExist,
|
||||
GroupUserDoesNotExist
|
||||
UserNotInGroup,
|
||||
UserInvalidGroupPermissionsError,
|
||||
GroupDoesNotExist,
|
||||
GroupUserDoesNotExist,
|
||||
)
|
||||
|
||||
from .serializers import (
|
||||
GroupUserSerializer, GroupUserGroupSerializer, UpdateGroupUserSerializer
|
||||
GroupUserSerializer,
|
||||
GroupUserGroupSerializer,
|
||||
UpdateGroupUserSerializer,
|
||||
)
|
||||
|
||||
|
||||
|
@ -30,39 +35,40 @@ class GroupUsersView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the group user related to the provided value.'
|
||||
description="Updates the group user related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Groups'],
|
||||
operation_id='list_group_users',
|
||||
tags=["Groups"],
|
||||
operation_id="list_group_users",
|
||||
description=(
|
||||
'Lists all the users that are in a group if the authorized user has admin '
|
||||
'permissions to the related group. To add a user to a group an invitation '
|
||||
'must be send first.'
|
||||
"Lists all the users that are in a group if the authorized user has admin "
|
||||
"permissions to the related group. To add a user to a group an invitation "
|
||||
"must be send first."
|
||||
),
|
||||
responses={
|
||||
200: GroupUserSerializer(many=True),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_USER_INVALID_GROUP_PERMISSIONS'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_DOES_NOT_EXIST']),
|
||||
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_USER_INVALID_GROUP_PERMISSIONS"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
})
|
||||
def get(self, request, group_id):
|
||||
"""Responds with a list of serialized users that are part of the group."""
|
||||
|
||||
group = CoreHandler().get_group(group_id)
|
||||
group.has_user(request.user, 'ADMIN', True)
|
||||
group_users = GroupUser.objects.filter(group=group).select_related('group')
|
||||
group.has_user(request.user, "ADMIN", True)
|
||||
group_users = GroupUser.objects.filter(group=group).select_related("group")
|
||||
serializer = GroupUserSerializer(group_users, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
@ -73,88 +79,86 @@ class GroupUserView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_user_id',
|
||||
name="group_user_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the group user related to the provided value.'
|
||||
description="Updates the group user related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Groups'],
|
||||
operation_id='update_group_user',
|
||||
tags=["Groups"],
|
||||
operation_id="update_group_user",
|
||||
description=(
|
||||
'Updates the existing group user related to the provided '
|
||||
'`group_user_id` param if the authorized user has admin rights to '
|
||||
'the related group.'
|
||||
"Updates the existing group user related to the provided "
|
||||
"`group_user_id` param if the authorized user has admin rights to "
|
||||
"the related group."
|
||||
),
|
||||
request=UpdateGroupUserSerializer,
|
||||
responses={
|
||||
200: GroupUserGroupSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_USER_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_USER_INVALID_GROUP_PERMISSIONS",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_USER_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(UpdateGroupUserSerializer)
|
||||
@map_exceptions({
|
||||
GroupUserDoesNotExist: ERROR_GROUP_USER_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupUserDoesNotExist: ERROR_GROUP_USER_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
def patch(self, request, data, group_user_id):
|
||||
"""Updates the group user if the user has admin permissions to the group."""
|
||||
|
||||
group_user = CoreHandler().get_group_user(
|
||||
group_user_id,
|
||||
base_queryset=GroupUser.objects.select_for_update()
|
||||
)
|
||||
group_user = CoreHandler().update_group_user(
|
||||
request.user,
|
||||
group_user,
|
||||
**data
|
||||
group_user_id, base_queryset=GroupUser.objects.select_for_update()
|
||||
)
|
||||
group_user = CoreHandler().update_group_user(request.user, group_user, **data)
|
||||
return Response(GroupUserGroupSerializer(group_user).data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_user_id',
|
||||
name="group_user_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Deletes the group user related to the provided '
|
||||
'value.'
|
||||
description="Deletes the group user related to the provided " "value.",
|
||||
)
|
||||
],
|
||||
tags=['Groups'],
|
||||
operation_id='delete_group_user',
|
||||
tags=["Groups"],
|
||||
operation_id="delete_group_user",
|
||||
description=(
|
||||
'Deletes a group user if the authorized user has admin rights to '
|
||||
'the related group.'
|
||||
"Deletes a group user if the authorized user has admin rights to "
|
||||
"the related group."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_USER_INVALID_GROUP_PERMISSIONS"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
GroupUserDoesNotExist: ERROR_GROUP_USER_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupUserDoesNotExist: ERROR_GROUP_USER_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
def delete(self, request, group_user_id):
|
||||
"""Deletes an existing group_user if the user belongs to the group."""
|
||||
|
||||
group_user = CoreHandler().get_group_user(
|
||||
group_user_id,
|
||||
base_queryset=GroupUser.objects.select_for_update()
|
||||
group_user_id, base_queryset=GroupUser.objects.select_for_update()
|
||||
)
|
||||
CoreHandler().delete_group_user(request.user, group_user)
|
||||
return Response(status=204)
|
||||
|
|
|
@ -10,15 +10,18 @@ from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
|||
|
||||
from baserow.api.decorators import validate_body, map_exceptions
|
||||
from baserow.api.errors import (
|
||||
ERROR_USER_NOT_IN_GROUP, ERROR_GROUP_DOES_NOT_EXIST,
|
||||
ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
ERROR_USER_NOT_IN_GROUP,
|
||||
ERROR_GROUP_DOES_NOT_EXIST,
|
||||
ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
)
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.api.groups.users.serializers import GroupUserGroupSerializer
|
||||
from baserow.core.models import GroupUser, Group
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroup, GroupDoesNotExist, UserInvalidGroupPermissionsError
|
||||
UserNotInGroup,
|
||||
GroupDoesNotExist,
|
||||
UserInvalidGroupPermissionsError,
|
||||
)
|
||||
|
||||
from .serializers import GroupSerializer, OrderGroupsSerializer
|
||||
|
@ -29,46 +32,42 @@ class GroupsView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Groups'],
|
||||
operation_id='list_groups',
|
||||
tags=["Groups"],
|
||||
operation_id="list_groups",
|
||||
description=(
|
||||
'Lists all the groups of the authorized user. A group can contain '
|
||||
'multiple applications like a database. Multiple users can have '
|
||||
'access to a group. For example each company could have their own group '
|
||||
'containing databases related to that company. The order of the groups '
|
||||
'are custom for each user. The order is configurable via the '
|
||||
'**order_groups** endpoint.'
|
||||
"Lists all the groups of the authorized user. A group can contain "
|
||||
"multiple applications like a database. Multiple users can have "
|
||||
"access to a group. For example each company could have their own group "
|
||||
"containing databases related to that company. The order of the groups "
|
||||
"are custom for each user. The order is configurable via the "
|
||||
"**order_groups** endpoint."
|
||||
),
|
||||
responses={
|
||||
200: build_array_type(group_user_schema)
|
||||
}
|
||||
responses={200: build_array_type(group_user_schema)},
|
||||
)
|
||||
def get(self, request):
|
||||
"""Responds with a list of serialized groups where the user is part of."""
|
||||
|
||||
groups = GroupUser.objects.filter(user=request.user).select_related('group')
|
||||
groups = GroupUser.objects.filter(user=request.user).select_related("group")
|
||||
serializer = GroupUserGroupSerializer(groups, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Groups'],
|
||||
operation_id='create_group',
|
||||
tags=["Groups"],
|
||||
operation_id="create_group",
|
||||
description=(
|
||||
'Creates a new group where only the authorized user has access to. No '
|
||||
'initial data like database applications are added, they have to be '
|
||||
'created via other endpoints.'
|
||||
"Creates a new group where only the authorized user has access to. No "
|
||||
"initial data like database applications are added, they have to be "
|
||||
"created via other endpoints."
|
||||
),
|
||||
request=GroupSerializer,
|
||||
responses={
|
||||
200: group_user_schema
|
||||
}
|
||||
responses={200: group_user_schema},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(GroupSerializer)
|
||||
def post(self, request, data):
|
||||
"""Creates a new group for a user."""
|
||||
|
||||
group_user = CoreHandler().create_group(request.user, name=data['name'])
|
||||
group_user = CoreHandler().create_group(request.user, name=data["name"])
|
||||
return Response(GroupUserGroupSerializer(group_user).data)
|
||||
|
||||
|
||||
|
@ -78,90 +77,94 @@ class GroupView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the group related to the provided value.'
|
||||
description="Updates the group related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Groups'],
|
||||
operation_id='update_group',
|
||||
tags=["Groups"],
|
||||
operation_id="update_group",
|
||||
description=(
|
||||
'Updates the existing group related to the provided `group_id` parameter '
|
||||
'if the authorized user belongs to the group. It is not yet possible to '
|
||||
'add additional users to a group.'
|
||||
"Updates the existing group related to the provided `group_id` parameter "
|
||||
"if the authorized user belongs to the group. It is not yet possible to "
|
||||
"add additional users to a group."
|
||||
),
|
||||
request=GroupSerializer,
|
||||
responses={
|
||||
200: GroupSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"ERROR_USER_INVALID_GROUP_PERMISSIONS",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(GroupSerializer)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
def patch(self, request, data, group_id):
|
||||
"""Updates the group if it belongs to a user."""
|
||||
|
||||
group = CoreHandler().get_group(
|
||||
group_id,
|
||||
base_queryset=Group.objects.select_for_update()
|
||||
)
|
||||
group = CoreHandler().update_group(
|
||||
request.user,
|
||||
group,
|
||||
name=data['name']
|
||||
group_id, base_queryset=Group.objects.select_for_update()
|
||||
)
|
||||
group = CoreHandler().update_group(request.user, group, name=data["name"])
|
||||
return Response(GroupSerializer(group).data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Deletes the group related to the provided value.'
|
||||
description="Deletes the group related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Groups'],
|
||||
operation_id='delete_group',
|
||||
tags=["Groups"],
|
||||
operation_id="delete_group",
|
||||
description=(
|
||||
'Deletes an existing group if the authorized user belongs to the group. '
|
||||
'All the applications, databases, tables etc that were in the group are '
|
||||
'going to be deleted also.'
|
||||
"Deletes an existing group if the authorized user belongs to the group. "
|
||||
"All the applications, databases, tables etc that were in the group are "
|
||||
"going to be deleted also."
|
||||
),
|
||||
request=GroupSerializer,
|
||||
responses={
|
||||
200: group_user_schema,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
'ERROR_USER_INVALID_GROUP_PERMISSIONS'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"ERROR_USER_INVALID_GROUP_PERMISSIONS",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
UserInvalidGroupPermissionsError: ERROR_USER_INVALID_GROUP_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
def delete(self, request, group_id):
|
||||
"""Deletes an existing group if it belongs to a user."""
|
||||
|
||||
group = CoreHandler().get_group(
|
||||
group_id,
|
||||
base_queryset=Group.objects.select_for_update()
|
||||
group_id, base_queryset=Group.objects.select_for_update()
|
||||
)
|
||||
CoreHandler().delete_group(request.user, group)
|
||||
CoreHandler().delete_group(request.user, group)
|
||||
return Response(status=204)
|
||||
|
||||
|
||||
|
@ -169,12 +172,12 @@ class GroupOrderView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Groups'],
|
||||
operation_id='order_groups',
|
||||
tags=["Groups"],
|
||||
operation_id="order_groups",
|
||||
description=(
|
||||
'Changes the order of the provided group ids to the matching position that '
|
||||
'the id has in the list. If the authorized user does not belong to the '
|
||||
'group it will be ignored. The order will be custom for each user.'
|
||||
"Changes the order of the provided group ids to the matching position that "
|
||||
"the id has in the list. If the authorized user does not belong to the "
|
||||
"group it will be ignored. The order will be custom for each user."
|
||||
),
|
||||
request=OrderGroupsSerializer,
|
||||
responses={
|
||||
|
@ -185,5 +188,5 @@ class GroupOrderView(APIView):
|
|||
def post(self, request, data):
|
||||
"""Updates to order of some groups for a user."""
|
||||
|
||||
CoreHandler().order_groups(request.user, data['groups'])
|
||||
CoreHandler().order_groups(request.user, data["groups"])
|
||||
return Response(status=204)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
from drf_spectacular.openapi import AutoSchema as RegularAutoSchema
|
||||
|
||||
from .utils import (
|
||||
PolymorphicMappingSerializer, PolymorphicCustomFieldRegistrySerializer
|
||||
PolymorphicMappingSerializer,
|
||||
PolymorphicCustomFieldRegistrySerializer,
|
||||
)
|
||||
|
||||
|
||||
|
@ -13,12 +14,9 @@ class AutoSchema(RegularAutoSchema):
|
|||
"""
|
||||
|
||||
if (
|
||||
(
|
||||
isinstance(serializer, PolymorphicMappingSerializer) or
|
||||
isinstance(serializer, PolymorphicCustomFieldRegistrySerializer)
|
||||
) and
|
||||
serializer.many
|
||||
):
|
||||
isinstance(serializer, PolymorphicMappingSerializer)
|
||||
or isinstance(serializer, PolymorphicCustomFieldRegistrySerializer)
|
||||
) and serializer.many:
|
||||
return True
|
||||
|
||||
return super()._is_list_view(serializer)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
from rest_framework.exceptions import NotFound, APIException
|
||||
from rest_framework.status import HTTP_400_BAD_REQUEST
|
||||
from rest_framework.pagination import (
|
||||
PageNumberPagination as RestFrameworkPageNumberPagination
|
||||
PageNumberPagination as RestFrameworkPageNumberPagination,
|
||||
)
|
||||
|
||||
|
||||
class PageNumberPagination(RestFrameworkPageNumberPagination):
|
||||
page_size = 100
|
||||
page_size_query_param = 'size'
|
||||
page_size_query_param = "size"
|
||||
|
||||
def __init__(self, limit_page_size=None, *args, **kwargs):
|
||||
self.limit_page_size = limit_page_size
|
||||
|
@ -17,10 +17,12 @@ class PageNumberPagination(RestFrameworkPageNumberPagination):
|
|||
page_size = super().get_page_size(request)
|
||||
|
||||
if self.limit_page_size and page_size > self.limit_page_size:
|
||||
exception = APIException({
|
||||
'error': 'ERROR_PAGE_SIZE_LIMIT',
|
||||
'detail': f'The page size is limited to {self.limit_page_size}.'
|
||||
})
|
||||
exception = APIException(
|
||||
{
|
||||
"error": "ERROR_PAGE_SIZE_LIMIT",
|
||||
"detail": f"The page size is limited to {self.limit_page_size}.",
|
||||
}
|
||||
)
|
||||
exception.status_code = HTTP_400_BAD_REQUEST
|
||||
raise exception
|
||||
|
||||
|
@ -32,9 +34,6 @@ class PageNumberPagination(RestFrameworkPageNumberPagination):
|
|||
try:
|
||||
return super().paginate_queryset(*args, **kwargs)
|
||||
except NotFound as e:
|
||||
exception = APIException({
|
||||
'error': 'ERROR_INVALID_PAGE',
|
||||
'detail': str(e)
|
||||
})
|
||||
exception = APIException({"error": "ERROR_INVALID_PAGE", "detail": str(e)})
|
||||
exception.status_code = HTTP_400_BAD_REQUEST
|
||||
raise exception
|
||||
|
|
|
@ -2,24 +2,26 @@ from drf_spectacular.plumbing import build_object_type
|
|||
|
||||
|
||||
def get_error_schema(errors=None):
|
||||
return build_object_type({
|
||||
'error': {
|
||||
'type': 'string',
|
||||
'description': 'Machine readable error indicating what went wrong.',
|
||||
'enum': errors
|
||||
},
|
||||
'detail': {
|
||||
'oneOf': [
|
||||
{
|
||||
'type': 'string',
|
||||
'format': 'string',
|
||||
'description': 'Human readable details about what went wrong.'
|
||||
},
|
||||
{
|
||||
'type': 'object',
|
||||
'format': 'object',
|
||||
'description': 'Machine readable object about what went wrong.'
|
||||
}
|
||||
]
|
||||
return build_object_type(
|
||||
{
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Machine readable error indicating what went wrong.",
|
||||
"enum": errors,
|
||||
},
|
||||
"detail": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "string",
|
||||
"description": "Human readable details about what went wrong.",
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"format": "object",
|
||||
"description": "Machine readable object about what went wrong.",
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from baserow.contrib.database.api.views.grid.serializers import \
|
||||
GridViewFieldOptionsField
|
||||
from baserow.contrib.database.api.views.grid.serializers import (
|
||||
GridViewFieldOptionsField,
|
||||
)
|
||||
|
||||
|
||||
def get_example_pagination_serializer_class(results_serializer_class,
|
||||
add_field_options=False):
|
||||
def get_example_pagination_serializer_class(
|
||||
results_serializer_class, add_field_options=False
|
||||
):
|
||||
"""
|
||||
Generates a pagination like response serializer that has the provided serializer
|
||||
class as results. It is only used for example purposes in combination with the
|
||||
|
@ -21,27 +23,23 @@ def get_example_pagination_serializer_class(results_serializer_class,
|
|||
"""
|
||||
|
||||
fields = {
|
||||
'count': serializers.IntegerField(help_text='The total amount of results.'),
|
||||
'next': serializers.URLField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
help_text='URL to the next page.'
|
||||
"count": serializers.IntegerField(help_text="The total amount of results."),
|
||||
"next": serializers.URLField(
|
||||
allow_blank=True, allow_null=True, help_text="URL to the next page."
|
||||
),
|
||||
'previous': serializers.URLField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
help_text='URL to the previous page.'
|
||||
"previous": serializers.URLField(
|
||||
allow_blank=True, allow_null=True, help_text="URL to the previous page."
|
||||
),
|
||||
'results': results_serializer_class(many=True)
|
||||
"results": results_serializer_class(many=True),
|
||||
}
|
||||
|
||||
serializer_name = 'PaginationSerializer'
|
||||
serializer_name = "PaginationSerializer"
|
||||
if add_field_options:
|
||||
fields['field_options'] = GridViewFieldOptionsField(required=False)
|
||||
serializer_name = serializer_name + 'WithFieldOptions'
|
||||
fields["field_options"] = GridViewFieldOptionsField(required=False)
|
||||
serializer_name = serializer_name + "WithFieldOptions"
|
||||
|
||||
return type(
|
||||
serializer_name + results_serializer_class.__name__,
|
||||
(serializers.Serializer,),
|
||||
fields
|
||||
fields,
|
||||
)
|
||||
|
|
|
@ -6,7 +6,7 @@ from baserow.core.models import Settings
|
|||
class SettingsSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Settings
|
||||
fields = ('allow_new_signups',)
|
||||
fields = ("allow_new_signups",)
|
||||
extra_kwargs = {
|
||||
'allow_new_signups': {'required': False},
|
||||
"allow_new_signups": {"required": False},
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@ from django.conf.urls import url
|
|||
from .views import SettingsView, UpdateSettingsView
|
||||
|
||||
|
||||
app_name = 'baserow.api.settings'
|
||||
app_name = "baserow.api.settings"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^update/$', UpdateSettingsView.as_view(), name='update'),
|
||||
url(r'^$', SettingsView.as_view(), name='get'),
|
||||
url(r"^update/$", UpdateSettingsView.as_view(), name="update"),
|
||||
url(r"^$", SettingsView.as_view(), name="get"),
|
||||
]
|
||||
|
|
|
@ -16,9 +16,9 @@ class SettingsView(APIView):
|
|||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Settings'],
|
||||
operation_id='get_settings',
|
||||
description='Responds with all the admin configured settings.',
|
||||
tags=["Settings"],
|
||||
operation_id="get_settings",
|
||||
description="Responds with all the admin configured settings.",
|
||||
responses={
|
||||
200: SettingsSerializer,
|
||||
},
|
||||
|
@ -37,10 +37,10 @@ class UpdateSettingsView(APIView):
|
|||
permission_classes = (IsAdminUser,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Settings'],
|
||||
operation_id='update_settings',
|
||||
tags=["Settings"],
|
||||
operation_id="update_settings",
|
||||
description=(
|
||||
'Updates the admin configured settings if the user has admin permissions.'
|
||||
"Updates the admin configured settings if the user has admin permissions."
|
||||
),
|
||||
request=SettingsSerializer,
|
||||
responses={
|
||||
|
|
|
@ -2,12 +2,12 @@ from rest_framework.status import HTTP_404_NOT_FOUND, HTTP_400_BAD_REQUEST
|
|||
|
||||
|
||||
ERROR_TEMPLATE_DOES_NOT_EXIST = (
|
||||
'ERROR_TEMPLATE_DOES_NOT_EXIST',
|
||||
"ERROR_TEMPLATE_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested template does not exist.'
|
||||
"The requested template does not exist.",
|
||||
)
|
||||
ERROR_TEMPLATE_FILE_DOES_NOT_EXIST = (
|
||||
'ERROR_TEMPLATE_FILE_DOES_NOT_EXIST',
|
||||
"ERROR_TEMPLATE_FILE_DOES_NOT_EXIST",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The requested template file does not exist anymore.'
|
||||
"The requested template file does not exist anymore.",
|
||||
)
|
||||
|
|
|
@ -10,14 +10,14 @@ from baserow.core.models import TemplateCategory, Template
|
|||
|
||||
class TemplateSerializer(serializers.ModelSerializer):
|
||||
is_default = serializers.SerializerMethodField(
|
||||
help_text='Indicates if the template must be selected by default. The '
|
||||
'web-frontend automatically selects the first `is_default` template '
|
||||
'that it can find.'
|
||||
help_text="Indicates if the template must be selected by default. The "
|
||||
"web-frontend automatically selects the first `is_default` template "
|
||||
"that it can find."
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Template
|
||||
fields = ('id', 'name', 'icon', 'keywords', 'group_id', 'is_default')
|
||||
fields = ("id", "name", "icon", "keywords", "group_id", "is_default")
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_is_default(self, instance):
|
||||
|
@ -29,4 +29,4 @@ class TemplateCategoriesSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = TemplateCategory
|
||||
fields = ('id', 'name', 'templates')
|
||||
fields = ("id", "name", "templates")
|
||||
|
|
|
@ -3,14 +3,14 @@ from django.conf.urls import url
|
|||
from .views import TemplatesView, InstallTemplateView
|
||||
|
||||
|
||||
app_name = 'baserow.api.templates'
|
||||
app_name = "baserow.api.templates"
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(
|
||||
r'install/(?P<group_id>[0-9]+)/(?P<template_id>[0-9]+)/$',
|
||||
r"install/(?P<group_id>[0-9]+)/(?P<template_id>[0-9]+)/$",
|
||||
InstallTemplateView.as_view(),
|
||||
name='install'
|
||||
name="install",
|
||||
),
|
||||
url(r'$', TemplatesView.as_view(), name='list'),
|
||||
url(r"$", TemplatesView.as_view(), name="list"),
|
||||
]
|
||||
|
|
|
@ -15,8 +15,10 @@ from baserow.api.applications.views import application_type_serializers
|
|||
from baserow.core.models import TemplateCategory
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroup, GroupDoesNotExist, TemplateDoesNotExist,
|
||||
TemplateFileDoesNotExist
|
||||
UserNotInGroup,
|
||||
GroupDoesNotExist,
|
||||
TemplateDoesNotExist,
|
||||
TemplateFileDoesNotExist,
|
||||
)
|
||||
|
||||
from .errors import ERROR_TEMPLATE_DOES_NOT_EXIST, ERROR_TEMPLATE_FILE_DOES_NOT_EXIST
|
||||
|
@ -26,23 +28,21 @@ class TemplatesView(APIView):
|
|||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Templates'],
|
||||
operation_id='list_templates',
|
||||
tags=["Templates"],
|
||||
operation_id="list_templates",
|
||||
description=(
|
||||
'Lists all the template categories and the related templates that are in '
|
||||
'that category. The template\'s `group_id` can be used for previewing '
|
||||
'purposes because that group contains the applications that are in the '
|
||||
'template. All the `get` and `list` endpoints related to that group are '
|
||||
'publicly accessible.'
|
||||
"Lists all the template categories and the related templates that are in "
|
||||
"that category. The template's `group_id` can be used for previewing "
|
||||
"purposes because that group contains the applications that are in the "
|
||||
"template. All the `get` and `list` endpoints related to that group are "
|
||||
"publicly accessible."
|
||||
),
|
||||
responses={
|
||||
200: TemplateCategoriesSerializer(many=True)
|
||||
}
|
||||
responses={200: TemplateCategoriesSerializer(many=True)},
|
||||
)
|
||||
def get(self, request):
|
||||
"""Responds with a list of all template categories and templates."""
|
||||
|
||||
categories = TemplateCategory.objects.all().prefetch_related('templates')
|
||||
categories = TemplateCategory.objects.all().prefetch_related("templates")
|
||||
serializer = TemplateCategoriesSerializer(categories, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
@ -51,50 +51,48 @@ class InstallTemplateView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Templates'],
|
||||
operation_id='install_template',
|
||||
tags=["Templates"],
|
||||
operation_id="install_template",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='group_id',
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='The id related to the group where the template '
|
||||
'applications must be installed into.'
|
||||
description="The id related to the group where the template "
|
||||
"applications must be installed into.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='template_id',
|
||||
name="template_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='The id related to the template that must be installed.'
|
||||
)
|
||||
description="The id related to the template that must be installed.",
|
||||
),
|
||||
],
|
||||
description=(
|
||||
'Installs the applications of the given template into the given group if '
|
||||
'the user has access to that group. The response contains those newly '
|
||||
'created applications.'
|
||||
"Installs the applications of the given template into the given group if "
|
||||
"the user has access to that group. The response contains those newly "
|
||||
"created applications."
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicMappingSerializer(
|
||||
'Applications',
|
||||
application_type_serializers,
|
||||
many=True
|
||||
"Applications", application_type_serializers, many=True
|
||||
),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_TEMPLATE_FILE_DOES_NOT_EXIST'
|
||||
]),
|
||||
404: get_error_schema([
|
||||
'ERROR_GROUP_DOES_NOT_EXIST',
|
||||
'ERROR_TEMPLATE_DOES_NOT_EXIST'
|
||||
])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_TEMPLATE_FILE_DOES_NOT_EXIST"]
|
||||
),
|
||||
404: get_error_schema(
|
||||
["ERROR_GROUP_DOES_NOT_EXIST", "ERROR_TEMPLATE_DOES_NOT_EXIST"]
|
||||
),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TemplateDoesNotExist: ERROR_TEMPLATE_DOES_NOT_EXIST,
|
||||
TemplateFileDoesNotExist: ERROR_TEMPLATE_FILE_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TemplateDoesNotExist: ERROR_TEMPLATE_DOES_NOT_EXIST,
|
||||
TemplateFileDoesNotExist: ERROR_TEMPLATE_FILE_DOES_NOT_EXIST
|
||||
})
|
||||
def get(self, request, group_id, template_id):
|
||||
"""Install a template into a group."""
|
||||
|
||||
|
@ -102,13 +100,10 @@ class InstallTemplateView(APIView):
|
|||
group = handler.get_group(group_id)
|
||||
template = handler.get_template(template_id)
|
||||
applications, id_mapping = handler.install_template(
|
||||
request.user,
|
||||
group,
|
||||
template
|
||||
request.user, group, template
|
||||
)
|
||||
|
||||
data = [
|
||||
get_application_serializer(application).data
|
||||
for application in applications
|
||||
get_application_serializer(application).data for application in applications
|
||||
]
|
||||
return Response(data)
|
||||
|
|
|
@ -12,19 +12,23 @@ from .templates import urls as templates_urls
|
|||
from .applications import urls as application_urls
|
||||
|
||||
|
||||
app_name = 'baserow.api'
|
||||
app_name = "baserow.api"
|
||||
|
||||
urlpatterns = [
|
||||
path('schema.json', SpectacularJSONAPIView.as_view(), name='json_schema'),
|
||||
path(
|
||||
'redoc/',
|
||||
SpectacularRedocView.as_view(url_name='api:json_schema'),
|
||||
name='redoc'
|
||||
),
|
||||
path('settings/', include(settings_urls, namespace='settings')),
|
||||
path('user/', include(user_urls, namespace='user')),
|
||||
path('user-files/', include(user_files_urls, namespace='user_files')),
|
||||
path('groups/', include(group_urls, namespace='groups')),
|
||||
path('templates/', include(templates_urls, namespace='templates')),
|
||||
path('applications/', include(application_urls, namespace='applications'))
|
||||
] + application_type_registry.api_urls + plugin_registry.api_urls
|
||||
urlpatterns = (
|
||||
[
|
||||
path("schema.json", SpectacularJSONAPIView.as_view(), name="json_schema"),
|
||||
path(
|
||||
"redoc/",
|
||||
SpectacularRedocView.as_view(url_name="api:json_schema"),
|
||||
name="redoc",
|
||||
),
|
||||
path("settings/", include(settings_urls, namespace="settings")),
|
||||
path("user/", include(user_urls, namespace="user")),
|
||||
path("user-files/", include(user_files_urls, namespace="user_files")),
|
||||
path("groups/", include(group_urls, namespace="groups")),
|
||||
path("templates/", include(templates_urls, namespace="templates")),
|
||||
path("applications/", include(application_urls, namespace="applications")),
|
||||
]
|
||||
+ application_type_registry.api_urls
|
||||
+ plugin_registry.api_urls
|
||||
)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
ERROR_ALREADY_EXISTS = 'ERROR_EMAIL_ALREADY_EXISTS'
|
||||
ERROR_USER_NOT_FOUND = 'ERROR_USER_NOT_FOUND'
|
||||
ERROR_INVALID_OLD_PASSWORD = 'ERROR_INVALID_OLD_PASSWORD'
|
||||
ERROR_DISABLED_SIGNUP = 'ERROR_DISABLED_SIGNUP'
|
||||
ERROR_ALREADY_EXISTS = "ERROR_EMAIL_ALREADY_EXISTS"
|
||||
ERROR_USER_NOT_FOUND = "ERROR_USER_NOT_FOUND"
|
||||
ERROR_INVALID_OLD_PASSWORD = "ERROR_INVALID_OLD_PASSWORD"
|
||||
ERROR_DISABLED_SIGNUP = "ERROR_DISABLED_SIGNUP"
|
||||
|
|
|
@ -3,6 +3,6 @@ from .serializers import UserSerializer
|
|||
|
||||
def jwt_response_payload_handler(token, user=None, request=None):
|
||||
return {
|
||||
'token': token,
|
||||
'user': UserSerializer(user, context={'request': request}).data
|
||||
"token": token,
|
||||
"user": UserSerializer(user, context={"request": request}).data,
|
||||
}
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
from drf_spectacular.plumbing import build_object_type
|
||||
|
||||
|
||||
create_user_response_schema = build_object_type({
|
||||
'user': {
|
||||
'type': 'object',
|
||||
'description': 'An object containing information related to the user.',
|
||||
'properties': {
|
||||
'first_name': {
|
||||
'type': 'string',
|
||||
'description': 'The first name of related user.'
|
||||
create_user_response_schema = build_object_type(
|
||||
{
|
||||
"user": {
|
||||
"type": "object",
|
||||
"description": "An object containing information related to the user.",
|
||||
"properties": {
|
||||
"first_name": {
|
||||
"type": "string",
|
||||
"description": "The first name of related user.",
|
||||
},
|
||||
"username": {
|
||||
"type": "string",
|
||||
"format": "email",
|
||||
"description": "The username of the related user. This is always "
|
||||
"an email address.",
|
||||
},
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'format': 'email',
|
||||
'description': 'The username of the related user. This is always '
|
||||
'an email address.'
|
||||
}
|
||||
}
|
||||
},
|
||||
'token': {
|
||||
'type': 'string'
|
||||
},
|
||||
"token": {"type": "string"},
|
||||
}
|
||||
})
|
||||
)
|
||||
authenticate_user_schema = create_user_response_schema
|
||||
|
|
|
@ -13,40 +13,40 @@ User = get_user_model()
|
|||
class UserSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ('first_name', 'username', 'password', 'is_staff')
|
||||
fields = ("first_name", "username", "password", "is_staff")
|
||||
extra_kwargs = {
|
||||
'password': {'write_only': True},
|
||||
'is_staff': {'read_only': True},
|
||||
"password": {"write_only": True},
|
||||
"is_staff": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class RegisterSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(max_length=32)
|
||||
email = serializers.EmailField(
|
||||
help_text='The email address is also going to be the username.'
|
||||
help_text="The email address is also going to be the username."
|
||||
)
|
||||
password = serializers.CharField(max_length=256)
|
||||
authenticate = serializers.BooleanField(
|
||||
required=False,
|
||||
default=False,
|
||||
help_text='Indicates whether an authentication token should be generated and '
|
||||
'be included in the response.'
|
||||
help_text="Indicates whether an authentication token should be generated and "
|
||||
"be included in the response.",
|
||||
)
|
||||
group_invitation_token = serializers.CharField(
|
||||
required=False,
|
||||
help_text='If provided and valid, the user accepts the group invitation and '
|
||||
'will have access to the group after signing up.'
|
||||
help_text="If provided and valid, the user accepts the group invitation and "
|
||||
"will have access to the group after signing up.",
|
||||
)
|
||||
|
||||
|
||||
class SendResetPasswordEmailBodyValidationSerializer(serializers.Serializer):
|
||||
email = serializers.EmailField(
|
||||
help_text='The email address of the user that has requested a password reset.'
|
||||
help_text="The email address of the user that has requested a password reset."
|
||||
)
|
||||
base_url = serializers.URLField(
|
||||
help_text='The base URL where the user can reset his password. The reset '
|
||||
'token is going to be appended to the base_url (base_url '
|
||||
'\'/token\').'
|
||||
help_text="The base URL where the user can reset his password. The reset "
|
||||
"token is going to be appended to the base_url (base_url "
|
||||
"'/token')."
|
||||
)
|
||||
|
||||
|
||||
|
@ -79,7 +79,7 @@ class NormalizedEmailWebTokenSerializer(JSONWebTokenSerializer):
|
|||
"""
|
||||
|
||||
validated_data = super().validate(attrs)
|
||||
update_last_login(None, validated_data['user'])
|
||||
update_last_login(None, validated_data["user"])
|
||||
return validated_data
|
||||
|
||||
|
||||
|
|
|
@ -1,36 +1,30 @@
|
|||
from django.conf.urls import url
|
||||
|
||||
from .views import (
|
||||
UserView, SendResetPasswordEmailView, ResetPasswordView, ChangePasswordView,
|
||||
DashboardView, ObtainJSONWebToken, RefreshJSONWebToken, VerifyJSONWebToken
|
||||
UserView,
|
||||
SendResetPasswordEmailView,
|
||||
ResetPasswordView,
|
||||
ChangePasswordView,
|
||||
DashboardView,
|
||||
ObtainJSONWebToken,
|
||||
RefreshJSONWebToken,
|
||||
VerifyJSONWebToken,
|
||||
)
|
||||
|
||||
|
||||
app_name = 'baserow.api.user'
|
||||
app_name = "baserow.api.user"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^token-auth/$', ObtainJSONWebToken.as_view(), name='token_auth'),
|
||||
url(r'^token-refresh/$', RefreshJSONWebToken.as_view(), name='token_refresh'),
|
||||
url(r'^token-verify/$', VerifyJSONWebToken.as_view(), name='token_verify'),
|
||||
url(r"^token-auth/$", ObtainJSONWebToken.as_view(), name="token_auth"),
|
||||
url(r"^token-refresh/$", RefreshJSONWebToken.as_view(), name="token_refresh"),
|
||||
url(r"^token-verify/$", VerifyJSONWebToken.as_view(), name="token_verify"),
|
||||
url(
|
||||
r'^send-reset-password-email/$',
|
||||
r"^send-reset-password-email/$",
|
||||
SendResetPasswordEmailView.as_view(),
|
||||
name='send_reset_password_email'
|
||||
name="send_reset_password_email",
|
||||
),
|
||||
url(
|
||||
r'^reset-password/$',
|
||||
ResetPasswordView.as_view(),
|
||||
name='reset_password'
|
||||
),
|
||||
url(
|
||||
r'^change-password/$',
|
||||
ChangePasswordView.as_view(),
|
||||
name='change_password'
|
||||
),
|
||||
url(
|
||||
r'^dashboard/$',
|
||||
DashboardView.as_view(),
|
||||
name='dashboard'
|
||||
),
|
||||
url(r'^$', UserView.as_view(), name='index')
|
||||
url(r"^reset-password/$", ResetPasswordView.as_view(), name="reset_password"),
|
||||
url(r"^change-password/$", ChangePasswordView.as_view(), name="change_password"),
|
||||
url(r"^dashboard/$", DashboardView.as_view(), name="dashboard"),
|
||||
url(r"^$", UserView.as_view(), name="index"),
|
||||
]
|
||||
|
|
|
@ -12,35 +12,48 @@ from rest_framework_jwt.settings import api_settings
|
|||
from rest_framework_jwt.views import (
|
||||
ObtainJSONWebToken as RegularObtainJSONWebToken,
|
||||
RefreshJSONWebToken as RegularRefreshJSONWebToken,
|
||||
VerifyJSONWebToken as RegularVerifyJSONWebToken
|
||||
VerifyJSONWebToken as RegularVerifyJSONWebToken,
|
||||
)
|
||||
|
||||
from baserow.api.decorators import map_exceptions, validate_body
|
||||
from baserow.api.errors import (
|
||||
BAD_TOKEN_SIGNATURE, EXPIRED_TOKEN_SIGNATURE, ERROR_HOSTNAME_IS_NOT_ALLOWED
|
||||
BAD_TOKEN_SIGNATURE,
|
||||
EXPIRED_TOKEN_SIGNATURE,
|
||||
ERROR_HOSTNAME_IS_NOT_ALLOWED,
|
||||
)
|
||||
from baserow.api.groups.invitations.errors import (
|
||||
ERROR_GROUP_INVITATION_DOES_NOT_EXIST, ERROR_GROUP_INVITATION_EMAIL_MISMATCH
|
||||
ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
)
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.exceptions import (
|
||||
BaseURLHostnameNotAllowed, GroupInvitationEmailMismatch,
|
||||
GroupInvitationDoesNotExist
|
||||
BaseURLHostnameNotAllowed,
|
||||
GroupInvitationEmailMismatch,
|
||||
GroupInvitationDoesNotExist,
|
||||
)
|
||||
from baserow.core.models import GroupInvitation
|
||||
from baserow.core.user.handler import UserHandler
|
||||
from baserow.core.user.exceptions import (
|
||||
UserAlreadyExist, UserNotFound, InvalidPassword, DisabledSignupError
|
||||
UserAlreadyExist,
|
||||
UserNotFound,
|
||||
InvalidPassword,
|
||||
DisabledSignupError,
|
||||
)
|
||||
|
||||
from .serializers import (
|
||||
RegisterSerializer, UserSerializer, SendResetPasswordEmailBodyValidationSerializer,
|
||||
ResetPasswordBodyValidationSerializer, ChangePasswordBodyValidationSerializer,
|
||||
NormalizedEmailWebTokenSerializer, DashboardSerializer
|
||||
RegisterSerializer,
|
||||
UserSerializer,
|
||||
SendResetPasswordEmailBodyValidationSerializer,
|
||||
ResetPasswordBodyValidationSerializer,
|
||||
ChangePasswordBodyValidationSerializer,
|
||||
NormalizedEmailWebTokenSerializer,
|
||||
DashboardSerializer,
|
||||
)
|
||||
from .errors import (
|
||||
ERROR_ALREADY_EXISTS, ERROR_USER_NOT_FOUND, ERROR_INVALID_OLD_PASSWORD,
|
||||
ERROR_DISABLED_SIGNUP
|
||||
ERROR_ALREADY_EXISTS,
|
||||
ERROR_USER_NOT_FOUND,
|
||||
ERROR_INVALID_OLD_PASSWORD,
|
||||
ERROR_DISABLED_SIGNUP,
|
||||
)
|
||||
from .schemas import create_user_response_schema, authenticate_user_schema
|
||||
|
||||
|
@ -59,26 +72,26 @@ class ObtainJSONWebToken(RegularObtainJSONWebToken):
|
|||
serializer_class = NormalizedEmailWebTokenSerializer
|
||||
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
operation_id='token_auth',
|
||||
tags=["User"],
|
||||
operation_id="token_auth",
|
||||
description=(
|
||||
'Authenticates an existing user based on their username, which is their '
|
||||
'email address, and their password. If successful a JWT token will be '
|
||||
'generated that can be used to authorize for other endpoints that require '
|
||||
'authorization. The token will be valid for {valid} minutes, so it has to '
|
||||
'be refreshed using the **token_refresh** endpoint before that '
|
||||
'time.'.format(
|
||||
valid=int(settings.JWT_AUTH['JWT_EXPIRATION_DELTA'].seconds / 60)
|
||||
"Authenticates an existing user based on their username, which is their "
|
||||
"email address, and their password. If successful a JWT token will be "
|
||||
"generated that can be used to authorize for other endpoints that require "
|
||||
"authorization. The token will be valid for {valid} minutes, so it has to "
|
||||
"be refreshed using the **token_refresh** endpoint before that "
|
||||
"time.".format(
|
||||
valid=int(settings.JWT_AUTH["JWT_EXPIRATION_DELTA"].seconds / 60)
|
||||
)
|
||||
),
|
||||
responses={
|
||||
200: authenticate_user_schema,
|
||||
400: {
|
||||
'description': 'A user with the provided username and password is '
|
||||
'not found.'
|
||||
}
|
||||
"description": "A user with the provided username and password is "
|
||||
"not found."
|
||||
},
|
||||
},
|
||||
auth=[None]
|
||||
auth=[None],
|
||||
)
|
||||
def post(self, *args, **kwargs):
|
||||
return super().post(*args, **kwargs)
|
||||
|
@ -86,20 +99,20 @@ class ObtainJSONWebToken(RegularObtainJSONWebToken):
|
|||
|
||||
class RefreshJSONWebToken(RegularRefreshJSONWebToken):
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
operation_id='token_refresh',
|
||||
tags=["User"],
|
||||
operation_id="token_refresh",
|
||||
description=(
|
||||
'Refreshes an existing JWT token. If the the token is valid, a new '
|
||||
'token will be included in the response. It will be valid for {valid} '
|
||||
'minutes.'.format(
|
||||
valid=int(settings.JWT_AUTH['JWT_EXPIRATION_DELTA'].seconds / 60)
|
||||
"Refreshes an existing JWT token. If the the token is valid, a new "
|
||||
"token will be included in the response. It will be valid for {valid} "
|
||||
"minutes.".format(
|
||||
valid=int(settings.JWT_AUTH["JWT_EXPIRATION_DELTA"].seconds / 60)
|
||||
)
|
||||
),
|
||||
responses={
|
||||
200: authenticate_user_schema,
|
||||
400: {'description': 'The token is invalid or expired.'}
|
||||
400: {"description": "The token is invalid or expired."},
|
||||
},
|
||||
auth=[None]
|
||||
auth=[None],
|
||||
)
|
||||
def post(self, *args, **kwargs):
|
||||
return super().post(*args, **kwargs)
|
||||
|
@ -107,14 +120,14 @@ class RefreshJSONWebToken(RegularRefreshJSONWebToken):
|
|||
|
||||
class VerifyJSONWebToken(RegularVerifyJSONWebToken):
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
operation_id='token_verify',
|
||||
description='Verifies if the a token is still valid.',
|
||||
tags=["User"],
|
||||
operation_id="token_verify",
|
||||
description="Verifies if the a token is still valid.",
|
||||
responses={
|
||||
200: authenticate_user_schema,
|
||||
400: {'description': 'The token is invalid or expired.'}
|
||||
400: {"description": "The token is invalid or expired."},
|
||||
},
|
||||
auth=[None]
|
||||
auth=[None],
|
||||
)
|
||||
def post(self, *args, **kwargs):
|
||||
return super().post(*args, **kwargs)
|
||||
|
@ -124,46 +137,52 @@ class UserView(APIView):
|
|||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
tags=["User"],
|
||||
request=RegisterSerializer,
|
||||
operation_id='create_user',
|
||||
operation_id="create_user",
|
||||
description=(
|
||||
'Creates a new user based on the provided values. If desired an '
|
||||
'authentication token can be generated right away. After creating an '
|
||||
'account the initial group containing a database is created.'
|
||||
"Creates a new user based on the provided values. If desired an "
|
||||
"authentication token can be generated right away. After creating an "
|
||||
"account the initial group containing a database is created."
|
||||
),
|
||||
responses={
|
||||
200: create_user_response_schema,
|
||||
400: get_error_schema([
|
||||
'ERROR_ALREADY_EXISTS', 'ERROR_GROUP_INVITATION_DOES_NOT_EXIST'
|
||||
'ERROR_REQUEST_BODY_VALIDATION', 'BAD_TOKEN_SIGNATURE'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GROUP_INVITATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_ALREADY_EXISTS",
|
||||
"ERROR_GROUP_INVITATION_DOES_NOT_EXIST"
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"BAD_TOKEN_SIGNATURE",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GROUP_INVITATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
auth=[None]
|
||||
auth=[None],
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
UserAlreadyExist: ERROR_ALREADY_EXISTS,
|
||||
BadSignature: BAD_TOKEN_SIGNATURE,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
DisabledSignupError: ERROR_DISABLED_SIGNUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
UserAlreadyExist: ERROR_ALREADY_EXISTS,
|
||||
BadSignature: BAD_TOKEN_SIGNATURE,
|
||||
GroupInvitationDoesNotExist: ERROR_GROUP_INVITATION_DOES_NOT_EXIST,
|
||||
GroupInvitationEmailMismatch: ERROR_GROUP_INVITATION_EMAIL_MISMATCH,
|
||||
DisabledSignupError: ERROR_DISABLED_SIGNUP,
|
||||
}
|
||||
)
|
||||
@validate_body(RegisterSerializer)
|
||||
def post(self, request, data):
|
||||
"""Registers a new user."""
|
||||
|
||||
user = UserHandler().create_user(
|
||||
name=data['name'],
|
||||
email=data['email'],
|
||||
password=data['password'],
|
||||
group_invitation_token=data.get('group_invitation_token')
|
||||
name=data["name"],
|
||||
email=data["email"],
|
||||
password=data["password"],
|
||||
group_invitation_token=data.get("group_invitation_token"),
|
||||
)
|
||||
|
||||
response = {'user': UserSerializer(user).data}
|
||||
response = {"user": UserSerializer(user).data}
|
||||
|
||||
if data['authenticate']:
|
||||
if data["authenticate"]:
|
||||
payload = jwt_payload_handler(user)
|
||||
token = jwt_encode_handler(payload)
|
||||
response.update(token=token)
|
||||
|
@ -175,31 +194,28 @@ class SendResetPasswordEmailView(APIView):
|
|||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
tags=["User"],
|
||||
request=SendResetPasswordEmailBodyValidationSerializer,
|
||||
operation_id='send_password_reset_email',
|
||||
operation_id="send_password_reset_email",
|
||||
description=(
|
||||
'Sends an email containing the password reset link to the email address '
|
||||
'of the user. This will only be done if a user is found with the given '
|
||||
'email address. The endpoint will not fail if the email address is not '
|
||||
'found. The link is going to the valid for {valid} hours.'.format(
|
||||
"Sends an email containing the password reset link to the email address "
|
||||
"of the user. This will only be done if a user is found with the given "
|
||||
"email address. The endpoint will not fail if the email address is not "
|
||||
"found. The link is going to the valid for {valid} hours.".format(
|
||||
valid=int(settings.RESET_PASSWORD_TOKEN_MAX_AGE / 60 / 60)
|
||||
)
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema([
|
||||
'ERROR_REQUEST_BODY_VALIDATION',
|
||||
'ERROR_HOSTNAME_IS_NOT_ALLOWED'
|
||||
])
|
||||
400: get_error_schema(
|
||||
["ERROR_REQUEST_BODY_VALIDATION", "ERROR_HOSTNAME_IS_NOT_ALLOWED"]
|
||||
),
|
||||
},
|
||||
auth=[None],
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body(SendResetPasswordEmailBodyValidationSerializer)
|
||||
@map_exceptions({
|
||||
BaseURLHostnameNotAllowed: ERROR_HOSTNAME_IS_NOT_ALLOWED
|
||||
})
|
||||
@map_exceptions({BaseURLHostnameNotAllowed: ERROR_HOSTNAME_IS_NOT_ALLOWED})
|
||||
def post(self, request, data):
|
||||
"""
|
||||
If the email is found, an email containing the password reset link is send to
|
||||
|
@ -209,112 +225,115 @@ class SendResetPasswordEmailView(APIView):
|
|||
handler = UserHandler()
|
||||
|
||||
try:
|
||||
user = handler.get_user(email=data['email'])
|
||||
handler.send_reset_password_email(user, data['base_url'])
|
||||
user = handler.get_user(email=data["email"])
|
||||
handler.send_reset_password_email(user, data["base_url"])
|
||||
except UserNotFound:
|
||||
pass
|
||||
|
||||
return Response('', status=204)
|
||||
return Response("", status=204)
|
||||
|
||||
|
||||
class ResetPasswordView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
tags=["User"],
|
||||
request=ResetPasswordBodyValidationSerializer,
|
||||
operation_id='reset_password',
|
||||
operation_id="reset_password",
|
||||
description=(
|
||||
'Changes the password of a user if the reset token is valid. The '
|
||||
'**send_password_reset_email** endpoint sends an email to the user '
|
||||
'containing the token. That token can be used to change the password '
|
||||
'here without providing the old password.'
|
||||
"Changes the password of a user if the reset token is valid. The "
|
||||
"**send_password_reset_email** endpoint sends an email to the user "
|
||||
"containing the token. That token can be used to change the password "
|
||||
"here without providing the old password."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema([
|
||||
'BAD_TOKEN_SIGNATURE', 'EXPIRED_TOKEN_SIGNATURE',
|
||||
'ERROR_USER_NOT_FOUND', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
])
|
||||
400: get_error_schema(
|
||||
[
|
||||
"BAD_TOKEN_SIGNATURE",
|
||||
"EXPIRED_TOKEN_SIGNATURE",
|
||||
"ERROR_USER_NOT_FOUND",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
]
|
||||
),
|
||||
},
|
||||
auth=[None]
|
||||
auth=[None],
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
BadSignature: BAD_TOKEN_SIGNATURE,
|
||||
SignatureExpired: EXPIRED_TOKEN_SIGNATURE,
|
||||
UserNotFound: ERROR_USER_NOT_FOUND
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
BadSignature: BAD_TOKEN_SIGNATURE,
|
||||
SignatureExpired: EXPIRED_TOKEN_SIGNATURE,
|
||||
UserNotFound: ERROR_USER_NOT_FOUND,
|
||||
}
|
||||
)
|
||||
@validate_body(ResetPasswordBodyValidationSerializer)
|
||||
def post(self, request, data):
|
||||
"""Changes users password if the provided token is valid."""
|
||||
|
||||
handler = UserHandler()
|
||||
handler.reset_password(data['token'], data['password'])
|
||||
handler.reset_password(data["token"], data["password"])
|
||||
|
||||
return Response('', status=204)
|
||||
return Response("", status=204)
|
||||
|
||||
|
||||
class ChangePasswordView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
tags=["User"],
|
||||
request=ChangePasswordBodyValidationSerializer,
|
||||
operation_id='change_password',
|
||||
operation_id="change_password",
|
||||
description=(
|
||||
'Changes the password of an authenticated user, but only if the old '
|
||||
'password matches.'
|
||||
"Changes the password of an authenticated user, but only if the old "
|
||||
"password matches."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema([
|
||||
'ERROR_INVALID_OLD_PASSWORD',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
])
|
||||
}
|
||||
400: get_error_schema(
|
||||
["ERROR_INVALID_OLD_PASSWORD", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
InvalidPassword: ERROR_INVALID_OLD_PASSWORD,
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
InvalidPassword: ERROR_INVALID_OLD_PASSWORD,
|
||||
}
|
||||
)
|
||||
@validate_body(ChangePasswordBodyValidationSerializer)
|
||||
def post(self, request, data):
|
||||
"""Changes the authenticated user's password if the old password is correct."""
|
||||
|
||||
handler = UserHandler()
|
||||
handler.change_password(request.user, data['old_password'],
|
||||
data['new_password'])
|
||||
handler.change_password(
|
||||
request.user, data["old_password"], data["new_password"]
|
||||
)
|
||||
|
||||
return Response('', status=204)
|
||||
return Response("", status=204)
|
||||
|
||||
|
||||
class DashboardView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['User'],
|
||||
operation_id='dashboard',
|
||||
tags=["User"],
|
||||
operation_id="dashboard",
|
||||
description=(
|
||||
'Lists all the relevant user information that for example could be shown '
|
||||
'on a dashboard. It will contain all the pending group invitations for '
|
||||
'that user.'
|
||||
"Lists all the relevant user information that for example could be shown "
|
||||
"on a dashboard. It will contain all the pending group invitations for "
|
||||
"that user."
|
||||
),
|
||||
responses={
|
||||
200: DashboardSerializer
|
||||
}
|
||||
responses={200: DashboardSerializer},
|
||||
)
|
||||
@transaction.atomic
|
||||
def get(self, request):
|
||||
"""Lists all the data related to the user dashboard page."""
|
||||
|
||||
group_invitations = GroupInvitation.objects.select_related(
|
||||
'group',
|
||||
'invited_by'
|
||||
).filter(
|
||||
email=request.user.username
|
||||
"group", "invited_by"
|
||||
).filter(email=request.user.username)
|
||||
dashboard_serializer = DashboardSerializer(
|
||||
{"group_invitations": group_invitations}
|
||||
)
|
||||
dashboard_serializer = DashboardSerializer({
|
||||
'group_invitations': group_invitations
|
||||
})
|
||||
return Response(dashboard_serializer.data)
|
||||
|
|
|
@ -1,35 +1,36 @@
|
|||
from rest_framework.status import (
|
||||
HTTP_400_BAD_REQUEST, HTTP_413_REQUEST_ENTITY_TOO_LARGE
|
||||
HTTP_400_BAD_REQUEST,
|
||||
HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||
)
|
||||
|
||||
|
||||
ERROR_INVALID_FILE = (
|
||||
'ERROR_INVALID_FILE',
|
||||
"ERROR_INVALID_FILE",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'No file has been provided or the file is invalid.'
|
||||
"No file has been provided or the file is invalid.",
|
||||
)
|
||||
ERROR_FILE_SIZE_TOO_LARGE = (
|
||||
'ERROR_FILE_SIZE_TOO_LARGE',
|
||||
"ERROR_FILE_SIZE_TOO_LARGE",
|
||||
HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||
'The provided file is too large. Max {e.max_size_mb}MB is allowed.'
|
||||
"The provided file is too large. Max {e.max_size_mb}MB is allowed.",
|
||||
)
|
||||
ERROR_FILE_URL_COULD_NOT_BE_REACHED = (
|
||||
'ERROR_FILE_URL_COULD_NOT_BE_REACHED',
|
||||
"ERROR_FILE_URL_COULD_NOT_BE_REACHED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The provided URL could not be reached.'
|
||||
"The provided URL could not be reached.",
|
||||
)
|
||||
ERROR_INVALID_FILE_URL = (
|
||||
'ERROR_INVALID_FILE_URL',
|
||||
"ERROR_INVALID_FILE_URL",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The provided URL is not valid.'
|
||||
"The provided URL is not valid.",
|
||||
)
|
||||
ERROR_INVALID_USER_FILE_NAME_ERROR = (
|
||||
'ERROR_INVALID_USER_FILE_NAME_ERROR',
|
||||
"ERROR_INVALID_USER_FILE_NAME_ERROR",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The user file name {e.name} is invalid.'
|
||||
"The user file name {e.name} is invalid.",
|
||||
)
|
||||
ERROR_USER_FILE_DOES_NOT_EXIST = (
|
||||
'ERROR_USER_FILE_DOES_NOT_EXIST',
|
||||
"ERROR_USER_FILE_DOES_NOT_EXIST",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The user file {e.name_or_id} does not exist.'
|
||||
"The user file {e.name_or_id} does not exist.",
|
||||
)
|
||||
|
|
|
@ -23,41 +23,49 @@ class UserFileURLAndThumbnailsSerializerMixin(serializers.Serializer):
|
|||
|
||||
@extend_schema_field(OpenApiTypes.URI)
|
||||
def get_url(self, instance):
|
||||
name = self.get_instance_attr(instance, 'name')
|
||||
name = self.get_instance_attr(instance, "name")
|
||||
path = UserFileHandler().user_file_path(name)
|
||||
url = default_storage.url(path)
|
||||
return url
|
||||
|
||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||
def get_thumbnails(self, instance):
|
||||
if not self.get_instance_attr(instance, 'is_image'):
|
||||
if not self.get_instance_attr(instance, "is_image"):
|
||||
return None
|
||||
|
||||
name = self.get_instance_attr(instance, 'name')
|
||||
name = self.get_instance_attr(instance, "name")
|
||||
|
||||
return {
|
||||
thumbnail_name: {
|
||||
'url': default_storage.url(
|
||||
UserFileHandler().user_file_thumbnail_path(
|
||||
name,
|
||||
thumbnail_name
|
||||
)
|
||||
"url": default_storage.url(
|
||||
UserFileHandler().user_file_thumbnail_path(name, thumbnail_name)
|
||||
),
|
||||
'width': size[0],
|
||||
'height': size[1]
|
||||
"width": size[0],
|
||||
"height": size[1],
|
||||
}
|
||||
for thumbnail_name, size in settings.USER_THUMBNAILS.items()
|
||||
}
|
||||
|
||||
|
||||
class UserFileSerializer(UserFileURLAndThumbnailsSerializerMixin,
|
||||
serializers.ModelSerializer):
|
||||
class UserFileSerializer(
|
||||
UserFileURLAndThumbnailsSerializerMixin, serializers.ModelSerializer
|
||||
):
|
||||
name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = UserFile
|
||||
fields = ('size', 'mime_type', 'is_image', 'image_width', 'image_height',
|
||||
'uploaded_at', 'url', 'thumbnails', 'name', 'original_name')
|
||||
fields = (
|
||||
"size",
|
||||
"mime_type",
|
||||
"is_image",
|
||||
"image_width",
|
||||
"image_height",
|
||||
"uploaded_at",
|
||||
"url",
|
||||
"thumbnails",
|
||||
"name",
|
||||
"original_name",
|
||||
)
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_name(self, instance):
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
from django.conf.urls import url
|
||||
|
||||
from .views import (
|
||||
UploadFileView, UploadViaURLView
|
||||
)
|
||||
from .views import UploadFileView, UploadViaURLView
|
||||
|
||||
|
||||
app_name = 'baserow.api.user'
|
||||
app_name = "baserow.api.user"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^upload-file/$', UploadFileView.as_view(), name='upload_file'),
|
||||
url(r'^upload-via-url/$', UploadViaURLView.as_view(), name='upload_via_url'),
|
||||
url(r"^upload-file/$", UploadFileView.as_view(), name="upload_file"),
|
||||
url(r"^upload-via-url/$", UploadViaURLView.as_view(), name="upload_via_url"),
|
||||
]
|
||||
|
|
|
@ -8,4 +8,4 @@ def user_file_name_validator(value):
|
|||
try:
|
||||
UserFile.deconstruct_name(value)
|
||||
except InvalidUserFileNameError:
|
||||
raise ValidationError('The user file name is invalid.', code='invalid')
|
||||
raise ValidationError("The user file name is invalid.", code="invalid")
|
||||
|
|
|
@ -11,15 +11,19 @@ from rest_framework.permissions import IsAuthenticated
|
|||
from baserow.api.decorators import map_exceptions, validate_body
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.user_files.exceptions import (
|
||||
InvalidFileStreamError, FileSizeTooLargeError, FileURLCouldNotBeReached,
|
||||
InvalidFileURLError
|
||||
InvalidFileStreamError,
|
||||
FileSizeTooLargeError,
|
||||
FileURLCouldNotBeReached,
|
||||
InvalidFileURLError,
|
||||
)
|
||||
from baserow.core.user_files.handler import UserFileHandler
|
||||
|
||||
from .serializers import UserFileSerializer, UserFileUploadViaURLRequestSerializer
|
||||
from .errors import (
|
||||
ERROR_INVALID_FILE, ERROR_FILE_SIZE_TOO_LARGE, ERROR_FILE_URL_COULD_NOT_BE_REACHED,
|
||||
ERROR_INVALID_FILE_URL
|
||||
ERROR_INVALID_FILE,
|
||||
ERROR_FILE_SIZE_TOO_LARGE,
|
||||
ERROR_FILE_URL_COULD_NOT_BE_REACHED,
|
||||
ERROR_INVALID_FILE_URL,
|
||||
)
|
||||
|
||||
|
||||
|
@ -28,30 +32,32 @@ class UploadFileView(APIView):
|
|||
parser_classes = (MultiPartParser,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['User files'],
|
||||
operation_id='upload_file',
|
||||
tags=["User files"],
|
||||
operation_id="upload_file",
|
||||
description=(
|
||||
'Uploads a file to Baserow by uploading the file contents directly. A '
|
||||
'`file` multipart is expected containing the file contents.'
|
||||
"Uploads a file to Baserow by uploading the file contents directly. A "
|
||||
"`file` multipart is expected containing the file contents."
|
||||
),
|
||||
request=None,
|
||||
responses={
|
||||
200: UserFileSerializer,
|
||||
400: get_error_schema(['ERROR_INVALID_FILE', 'ERROR_FILE_SIZE_TOO_LARGE'])
|
||||
}
|
||||
400: get_error_schema(["ERROR_INVALID_FILE", "ERROR_FILE_SIZE_TOO_LARGE"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
InvalidFileStreamError: ERROR_INVALID_FILE,
|
||||
FileSizeTooLargeError: ERROR_FILE_SIZE_TOO_LARGE
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
InvalidFileStreamError: ERROR_INVALID_FILE,
|
||||
FileSizeTooLargeError: ERROR_FILE_SIZE_TOO_LARGE,
|
||||
}
|
||||
)
|
||||
def post(self, request):
|
||||
"""Uploads a file by uploading the contents directly."""
|
||||
|
||||
if 'file' not in request.FILES:
|
||||
raise InvalidFileStreamError('No file was provided.')
|
||||
if "file" not in request.FILES:
|
||||
raise InvalidFileStreamError("No file was provided.")
|
||||
|
||||
file = request.FILES.get('file')
|
||||
file = request.FILES.get("file")
|
||||
user_file = UserFileHandler().upload_user_file(request.user, file.name, file)
|
||||
serializer = UserFileSerializer(user_file)
|
||||
return Response(serializer.data)
|
||||
|
@ -61,34 +67,38 @@ class UploadViaURLView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['User files'],
|
||||
operation_id='upload_via_url',
|
||||
tags=["User files"],
|
||||
operation_id="upload_via_url",
|
||||
description=(
|
||||
'Uploads a file to Baserow by downloading it from the provided URL.'
|
||||
"Uploads a file to Baserow by downloading it from the provided URL."
|
||||
),
|
||||
request=UserFileUploadViaURLRequestSerializer,
|
||||
responses={
|
||||
200: UserFileSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_INVALID_FILE',
|
||||
'ERROR_FILE_SIZE_TOO_LARGE',
|
||||
'ERROR_FILE_URL_COULD_NOT_BE_REACHED',
|
||||
'ERROR_INVALID_FILE_URL'
|
||||
])
|
||||
}
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_INVALID_FILE",
|
||||
"ERROR_FILE_SIZE_TOO_LARGE",
|
||||
"ERROR_FILE_URL_COULD_NOT_BE_REACHED",
|
||||
"ERROR_INVALID_FILE_URL",
|
||||
]
|
||||
),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
InvalidFileStreamError: ERROR_INVALID_FILE,
|
||||
FileSizeTooLargeError: ERROR_FILE_SIZE_TOO_LARGE,
|
||||
FileURLCouldNotBeReached: ERROR_FILE_URL_COULD_NOT_BE_REACHED,
|
||||
InvalidFileURLError: ERROR_INVALID_FILE_URL
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
InvalidFileStreamError: ERROR_INVALID_FILE,
|
||||
FileSizeTooLargeError: ERROR_FILE_SIZE_TOO_LARGE,
|
||||
FileURLCouldNotBeReached: ERROR_FILE_URL_COULD_NOT_BE_REACHED,
|
||||
InvalidFileURLError: ERROR_INVALID_FILE_URL,
|
||||
}
|
||||
)
|
||||
@validate_body(UserFileUploadViaURLRequestSerializer)
|
||||
def post(self, request, data):
|
||||
"""Uploads a user file by downloading it from the provided URL."""
|
||||
|
||||
url = data['url']
|
||||
url = data["url"]
|
||||
user_file = UserFileHandler().upload_user_file_by_url(request.user, url)
|
||||
serializer = UserFileSerializer(user_file)
|
||||
return Response(serializer.data)
|
||||
|
|
|
@ -44,7 +44,7 @@ def map_exceptions(mapping):
|
|||
except tuple(mapping.keys()) as e:
|
||||
value = mapping.get(e.__class__)
|
||||
status_code = status.HTTP_400_BAD_REQUEST
|
||||
detail = ''
|
||||
detail = ""
|
||||
|
||||
if isinstance(value, str):
|
||||
error = value
|
||||
|
@ -55,10 +55,7 @@ def map_exceptions(mapping):
|
|||
if len(value) > 2 and value[2] is not None:
|
||||
detail = value[2].format(e=e)
|
||||
|
||||
exc = APIException({
|
||||
'error': error,
|
||||
'detail': detail
|
||||
})
|
||||
exc = APIException({"error": error, "detail": detail})
|
||||
exc.status_code = status_code
|
||||
|
||||
raise exc
|
||||
|
@ -81,19 +78,12 @@ def validate_data(serializer_class, data):
|
|||
def serialize_errors_recursive(error):
|
||||
if isinstance(error, dict):
|
||||
return {
|
||||
key: serialize_errors_recursive(errors)
|
||||
for key, errors in error.items()
|
||||
key: serialize_errors_recursive(errors) for key, errors in error.items()
|
||||
}
|
||||
elif isinstance(error, list):
|
||||
return [
|
||||
serialize_errors_recursive(errors)
|
||||
for errors in error
|
||||
]
|
||||
return [serialize_errors_recursive(errors) for errors in error]
|
||||
else:
|
||||
return {
|
||||
'error': force_text(error),
|
||||
'code': error.code
|
||||
}
|
||||
return {"error": force_text(error), "code": error.code}
|
||||
|
||||
serializer = serializer_class(data=data)
|
||||
if not serializer.is_valid():
|
||||
|
@ -103,8 +93,9 @@ def validate_data(serializer_class, data):
|
|||
return serializer.data
|
||||
|
||||
|
||||
def validate_data_custom_fields(type_name, registry, data, base_serializer_class=None,
|
||||
type_attribute_name='type'):
|
||||
def validate_data_custom_fields(
|
||||
type_name, registry, data, base_serializer_class=None, type_attribute_name="type"
|
||||
):
|
||||
"""
|
||||
Validates the provided data with the serializer generated by the registry based on
|
||||
the provided type_name and provided base_serializer_class.
|
||||
|
@ -131,16 +122,18 @@ def validate_data_custom_fields(type_name, registry, data, base_serializer_class
|
|||
except InstanceTypeDoesNotExist:
|
||||
# If the provided type name doesn't exist we will raise a machine
|
||||
# readable validation error.
|
||||
raise RequestBodyValidationException({
|
||||
type_attribute_name: [
|
||||
{
|
||||
"error": f'\"{type_name}\" is not a valid choice.',
|
||||
"code": "invalid_choice"
|
||||
}
|
||||
]
|
||||
})
|
||||
raise RequestBodyValidationException(
|
||||
{
|
||||
type_attribute_name: [
|
||||
{
|
||||
"error": f'"{type_name}" is not a valid choice.',
|
||||
"code": "invalid_choice",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
serializer_kwargs = {'base_class': base_serializer_class}
|
||||
serializer_kwargs = {"base_class": base_serializer_class}
|
||||
serializer_class = type_instance.get_serializer_class(**serializer_kwargs)
|
||||
return validate_data(serializer_class, data)
|
||||
|
||||
|
@ -158,13 +151,14 @@ def get_request(args):
|
|||
"""
|
||||
|
||||
if len(args) < 2 or not isinstance(args[1], Request):
|
||||
raise ValueError('There must be a request in the args.')
|
||||
raise ValueError("There must be a request in the args.")
|
||||
|
||||
return args[1]
|
||||
|
||||
|
||||
def type_from_data_or_registry(data, registry, model_instance,
|
||||
type_attribute_name='type'):
|
||||
def type_from_data_or_registry(
|
||||
data, registry, model_instance, type_attribute_name="type"
|
||||
):
|
||||
"""
|
||||
Returns the type in the provided data else the type will be returned via the
|
||||
registry.
|
||||
|
@ -189,8 +183,7 @@ def type_from_data_or_registry(data, registry, model_instance,
|
|||
return registry.get_by_model(model_instance.specific_class).type
|
||||
|
||||
|
||||
def get_serializer_class(model, field_names, field_overrides=None,
|
||||
base_class=None):
|
||||
def get_serializer_class(model, field_names, field_overrides=None, base_class=None):
|
||||
"""
|
||||
Generates a model serializer based on the provided field names and field overrides.
|
||||
|
||||
|
@ -221,8 +214,8 @@ def get_serializer_class(model, field_names, field_overrides=None,
|
|||
|
||||
extends_meta = object
|
||||
|
||||
if hasattr(base_class, 'Meta'):
|
||||
extends_meta = getattr(base_class, 'Meta')
|
||||
if hasattr(base_class, "Meta"):
|
||||
extends_meta = getattr(base_class, "Meta")
|
||||
field_names = list(extends_meta.fields) + list(field_names)
|
||||
|
||||
class Meta(extends_meta):
|
||||
|
@ -230,12 +223,12 @@ def get_serializer_class(model, field_names, field_overrides=None,
|
|||
model = model_
|
||||
fields = list(field_names)
|
||||
|
||||
attrs = {'Meta': Meta}
|
||||
attrs = {"Meta": Meta}
|
||||
|
||||
if field_overrides:
|
||||
attrs.update(field_overrides)
|
||||
|
||||
return type(str(model_.__name__ + 'Serializer'), (base_class, ), attrs)
|
||||
return type(str(model_.__name__ + "Serializer"), (base_class,), attrs)
|
||||
|
||||
|
||||
class PolymorphicCustomFieldRegistrySerializer:
|
||||
|
@ -244,7 +237,7 @@ class PolymorphicCustomFieldRegistrySerializer:
|
|||
extension class.
|
||||
"""
|
||||
|
||||
def __init__(self, registry, base_class, type_field_name='type', many=False):
|
||||
def __init__(self, registry, base_class, type_field_name="type", many=False):
|
||||
self.read_only = False
|
||||
self.registry = registry
|
||||
self.base_class = base_class
|
||||
|
@ -257,7 +250,7 @@ class PolymorphicMappingSerializer:
|
|||
A placeholder class for the `PolymorphicMappingSerializerExtension` extension class.
|
||||
"""
|
||||
|
||||
def __init__(self, component_name, mapping, type_field_name='type', many=False):
|
||||
def __init__(self, component_name, mapping, type_field_name="type", many=False):
|
||||
self.read_only = False
|
||||
self.component_name = component_name
|
||||
self.mapping = mapping
|
||||
|
|
|
@ -9,7 +9,4 @@ from baserow.ws.routers import websocket_router
|
|||
django.setup()
|
||||
|
||||
|
||||
application = ProtocolTypeRouter({
|
||||
'http': AsgiHandler(),
|
||||
'websocket': websocket_router
|
||||
})
|
||||
application = ProtocolTypeRouter({"http": AsgiHandler(), "websocket": websocket_router})
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from celery import Celery
|
||||
|
||||
|
||||
app = Celery('baserow')
|
||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
app = Celery("baserow")
|
||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
app.autodiscover_tasks()
|
||||
|
|
|
@ -8,79 +8,77 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|||
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = os.getenv('SECRET_KEY', 'CHANGE_THIS_TO_SOMETHING_SECRET_IN_PRODUCTION')
|
||||
SECRET_KEY = os.getenv("SECRET_KEY", "CHANGE_THIS_TO_SOMETHING_SECRET_IN_PRODUCTION")
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = False
|
||||
|
||||
ALLOWED_HOSTS = ['localhost']
|
||||
ALLOWED_HOSTS = ["localhost"]
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
|
||||
'rest_framework',
|
||||
'corsheaders',
|
||||
'channels',
|
||||
'mjml',
|
||||
'drf_spectacular',
|
||||
|
||||
'baserow.core',
|
||||
'baserow.api',
|
||||
'baserow.ws',
|
||||
'baserow.contrib.database'
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"rest_framework",
|
||||
"corsheaders",
|
||||
"channels",
|
||||
"mjml",
|
||||
"drf_spectacular",
|
||||
"baserow.core",
|
||||
"baserow.api",
|
||||
"baserow.ws",
|
||||
"baserow.contrib.database",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'corsheaders.middleware.CorsMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
"corsheaders.middleware.CorsMiddleware",
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'baserow.config.urls'
|
||||
ROOT_URLCONF = "baserow.config.urls"
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'baserow.config.wsgi.application'
|
||||
ASGI_APPLICATION = 'baserow.config.asgi.application'
|
||||
WSGI_APPLICATION = "baserow.config.wsgi.application"
|
||||
ASGI_APPLICATION = "baserow.config.asgi.application"
|
||||
|
||||
REDIS_HOST = os.getenv('REDIS_HOST', 'redis')
|
||||
REDIS_PORT = os.getenv('REDIS_PORT', '6379')
|
||||
REDIS_USERNAME = os.getenv('REDIS_USER', '')
|
||||
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
|
||||
REDIS_PROTOCOL = os.getenv('REDIS_PROTOCOL', 'redis')
|
||||
REDIS_HOST = os.getenv("REDIS_HOST", "redis")
|
||||
REDIS_PORT = os.getenv("REDIS_PORT", "6379")
|
||||
REDIS_USERNAME = os.getenv("REDIS_USER", "")
|
||||
REDIS_PASSWORD = os.getenv("REDIS_PASSWORD", "")
|
||||
REDIS_PROTOCOL = os.getenv("REDIS_PROTOCOL", "redis")
|
||||
REDIS_URL = (
|
||||
f'{REDIS_PROTOCOL}://{REDIS_USERNAME}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
||||
f"{REDIS_PROTOCOL}://{REDIS_USERNAME}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0"
|
||||
)
|
||||
|
||||
CELERY_BROKER_URL = REDIS_URL
|
||||
|
||||
CHANNEL_LAYERS = {
|
||||
'default': {
|
||||
'BACKEND': 'channels_redis.core.RedisChannelLayer',
|
||||
'CONFIG': {
|
||||
"default": {
|
||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||
"CONFIG": {
|
||||
"hosts": [REDIS_URL],
|
||||
},
|
||||
},
|
||||
|
@ -91,37 +89,37 @@ CHANNEL_LAYERS = {
|
|||
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': os.getenv('DATABASE_NAME', 'baserow'),
|
||||
'USER': os.getenv('DATABASE_USER', 'baserow'),
|
||||
'PASSWORD': os.getenv('DATABASE_PASSWORD', 'baserow'),
|
||||
'HOST': os.getenv('DATABASE_HOST', 'db'),
|
||||
'PORT': os.getenv('DATABASE_PORT', '5432'),
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": os.getenv("DATABASE_NAME", "baserow"),
|
||||
"USER": os.getenv("DATABASE_USER", "baserow"),
|
||||
"PASSWORD": os.getenv("DATABASE_PASSWORD", "baserow"),
|
||||
"HOST": os.getenv("DATABASE_HOST", "db"),
|
||||
"PORT": os.getenv("DATABASE_PORT", "5432"),
|
||||
}
|
||||
}
|
||||
|
||||
# Should contain the database connection name of the database where the user tables
|
||||
# are stored. This can be different than the default database because there are not
|
||||
# going to be any relations between the application schema and the user schema.
|
||||
USER_TABLE_DATABASE = 'default'
|
||||
USER_TABLE_DATABASE = "default"
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.'
|
||||
'UserAttributeSimilarityValidator',
|
||||
"NAME": "django.contrib.auth.password_validation."
|
||||
"UserAttributeSimilarityValidator",
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -129,9 +127,9 @@ AUTH_PASSWORD_VALIDATORS = [
|
|||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/2.2/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
LANGUAGE_CODE = "en-us"
|
||||
|
||||
TIME_ZONE = 'UTC'
|
||||
TIME_ZONE = "UTC"
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
|
@ -143,79 +141,73 @@ USE_TZ = True
|
|||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/2.2/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
STATIC_URL = "/static/"
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"baserow.api.authentication.JSONWebTokenAuthentication",
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'baserow.api.authentication.JSONWebTokenAuthentication',
|
||||
),
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
),
|
||||
'DEFAULT_SCHEMA_CLASS': 'baserow.api.openapi.AutoSchema'
|
||||
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
|
||||
"DEFAULT_SCHEMA_CLASS": "baserow.api.openapi.AutoSchema",
|
||||
}
|
||||
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
CORS_ALLOW_HEADERS = list(default_headers) + [
|
||||
'WebSocketId',
|
||||
"WebSocketId",
|
||||
]
|
||||
|
||||
|
||||
JWT_AUTH = {
|
||||
'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=60 * 60),
|
||||
'JWT_ALLOW_REFRESH': True,
|
||||
'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7),
|
||||
'JWT_RESPONSE_PAYLOAD_HANDLER': 'baserow.api.user.jwt.'
|
||||
'jwt_response_payload_handler'
|
||||
"JWT_EXPIRATION_DELTA": datetime.timedelta(seconds=60 * 60),
|
||||
"JWT_ALLOW_REFRESH": True,
|
||||
"JWT_REFRESH_EXPIRATION_DELTA": datetime.timedelta(days=7),
|
||||
"JWT_RESPONSE_PAYLOAD_HANDLER": "baserow.api.user.jwt."
|
||||
"jwt_response_payload_handler",
|
||||
}
|
||||
|
||||
SPECTACULAR_SETTINGS = {
|
||||
'TITLE': 'Baserow API spec',
|
||||
'DESCRIPTION': '',
|
||||
'CONTACT': {
|
||||
'url': 'https://baserow.io/contact'
|
||||
"TITLE": "Baserow API spec",
|
||||
"DESCRIPTION": "",
|
||||
"CONTACT": {"url": "https://baserow.io/contact"},
|
||||
"LICENSE": {
|
||||
"name": "MIT",
|
||||
"url": "https://gitlab.com/bramw/baserow/-/blob/master/LICENSE",
|
||||
},
|
||||
'LICENSE': {
|
||||
'name': 'MIT',
|
||||
'url': 'https://gitlab.com/bramw/baserow/-/blob/master/LICENSE'
|
||||
},
|
||||
'VERSION': '1.1.0',
|
||||
'SERVE_INCLUDE_SCHEMA': False,
|
||||
'TAGS': [
|
||||
{'name': 'Settings'},
|
||||
{'name': 'User'},
|
||||
{'name': 'User files'},
|
||||
{'name': 'Groups'},
|
||||
{'name': 'Group invitations'},
|
||||
{'name': 'Templates'},
|
||||
{'name': 'Applications'},
|
||||
{'name': 'Database tables'},
|
||||
{'name': 'Database table fields'},
|
||||
{'name': 'Database table views'},
|
||||
{'name': 'Database table view filters'},
|
||||
{'name': 'Database table view sortings'},
|
||||
{'name': 'Database table grid view'},
|
||||
{'name': 'Database table rows'},
|
||||
{'name': 'Database tokens'}
|
||||
"VERSION": "1.1.0",
|
||||
"SERVE_INCLUDE_SCHEMA": False,
|
||||
"TAGS": [
|
||||
{"name": "Settings"},
|
||||
{"name": "User"},
|
||||
{"name": "User files"},
|
||||
{"name": "Groups"},
|
||||
{"name": "Group invitations"},
|
||||
{"name": "Templates"},
|
||||
{"name": "Applications"},
|
||||
{"name": "Database tables"},
|
||||
{"name": "Database table fields"},
|
||||
{"name": "Database table views"},
|
||||
{"name": "Database table view filters"},
|
||||
{"name": "Database table view sortings"},
|
||||
{"name": "Database table grid view"},
|
||||
{"name": "Database table rows"},
|
||||
{"name": "Database tokens"},
|
||||
],
|
||||
}
|
||||
|
||||
DATABASE_ROUTERS = ('baserow.contrib.database.database_routers.TablesDatabaseRouter',)
|
||||
DATABASE_ROUTERS = ("baserow.contrib.database.database_routers.TablesDatabaseRouter",)
|
||||
|
||||
# The storage must always overwrite existing files.
|
||||
DEFAULT_FILE_STORAGE = 'baserow.core.storage.OverwriteFileSystemStorage'
|
||||
DEFAULT_FILE_STORAGE = "baserow.core.storage.OverwriteFileSystemStorage"
|
||||
|
||||
MJML_BACKEND_MODE = 'tcpserver'
|
||||
MJML_BACKEND_MODE = "tcpserver"
|
||||
MJML_TCPSERVERS = [
|
||||
(os.getenv('MJML_SERVER_HOST', 'mjml'), int(os.getenv('MJML_SERVER_PORT', 28101))),
|
||||
(os.getenv("MJML_SERVER_HOST", "mjml"), int(os.getenv("MJML_SERVER_PORT", 28101))),
|
||||
]
|
||||
|
||||
PUBLIC_BACKEND_URL = os.getenv('PUBLIC_BACKEND_URL', 'http://localhost:8000')
|
||||
PUBLIC_WEB_FRONTEND_URL = os.getenv('PUBLIC_WEB_FRONTEND_URL', 'http://localhost:3000')
|
||||
PRIVATE_BACKEND_URL = os.getenv('PRIVATE_BACKEND_URL', 'http://backend:8000')
|
||||
PUBLIC_BACKEND_URL = os.getenv("PUBLIC_BACKEND_URL", "http://localhost:8000")
|
||||
PUBLIC_WEB_FRONTEND_URL = os.getenv("PUBLIC_WEB_FRONTEND_URL", "http://localhost:3000")
|
||||
PRIVATE_BACKEND_URL = os.getenv("PRIVATE_BACKEND_URL", "http://backend:8000")
|
||||
PUBLIC_BACKEND_HOSTNAME = urlparse(PUBLIC_BACKEND_URL).hostname
|
||||
PUBLIC_WEB_FRONTEND_HOSTNAME = urlparse(PUBLIC_WEB_FRONTEND_URL).hostname
|
||||
PRIVATE_BACKEND_HOSTNAME = urlparse(PRIVATE_BACKEND_URL).hostname
|
||||
|
@ -226,44 +218,41 @@ if PUBLIC_BACKEND_HOSTNAME:
|
|||
if PRIVATE_BACKEND_HOSTNAME:
|
||||
ALLOWED_HOSTS.append(PRIVATE_BACKEND_HOSTNAME)
|
||||
|
||||
FROM_EMAIL = os.getenv('FROM_EMAIL', 'no-reply@localhost')
|
||||
FROM_EMAIL = os.getenv("FROM_EMAIL", "no-reply@localhost")
|
||||
RESET_PASSWORD_TOKEN_MAX_AGE = 60 * 60 * 48 # 48 hours
|
||||
ROW_PAGE_SIZE_LIMIT = 200 # Indicates how many rows can be requested at once.
|
||||
|
||||
# The amount of rows that can be imported when creating a table.
|
||||
INITIAL_TABLE_DATA_LIMIT = None
|
||||
if 'INITIAL_TABLE_DATA_LIMIT' in os.environ:
|
||||
INITIAL_TABLE_DATA_LIMIT = int(os.getenv('INITIAL_TABLE_DATA_LIMIT'))
|
||||
if "INITIAL_TABLE_DATA_LIMIT" in os.environ:
|
||||
INITIAL_TABLE_DATA_LIMIT = int(os.getenv("INITIAL_TABLE_DATA_LIMIT"))
|
||||
|
||||
MEDIA_URL_PATH = '/media/'
|
||||
MEDIA_URL = os.getenv('MEDIA_URL', urljoin(PUBLIC_BACKEND_URL, MEDIA_URL_PATH))
|
||||
MEDIA_ROOT = os.getenv('MEDIA_ROOT', '/media')
|
||||
MEDIA_URL_PATH = "/media/"
|
||||
MEDIA_URL = os.getenv("MEDIA_URL", urljoin(PUBLIC_BACKEND_URL, MEDIA_URL_PATH))
|
||||
MEDIA_ROOT = os.getenv("MEDIA_ROOT", "/media")
|
||||
|
||||
# Indicates the directory where the user files and user thumbnails are stored.
|
||||
USER_FILES_DIRECTORY = 'user_files'
|
||||
USER_THUMBNAILS_DIRECTORY = 'thumbnails'
|
||||
USER_FILES_DIRECTORY = "user_files"
|
||||
USER_THUMBNAILS_DIRECTORY = "thumbnails"
|
||||
USER_FILE_SIZE_LIMIT = 1024 * 1024 * 20 # 20MB
|
||||
|
||||
if os.getenv('EMAIL_SMTP', ''):
|
||||
if os.getenv("EMAIL_SMTP", ""):
|
||||
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
|
||||
EMAIL_USE_TLS = bool(os.getenv('EMAIL_SMPT_USE_TLS', ''))
|
||||
EMAIL_HOST = os.getenv('EMAIL_SMTP_HOST', 'localhost')
|
||||
EMAIL_PORT = os.getenv('EMAIL_SMTP_PORT', '25')
|
||||
EMAIL_HOST_USER = os.getenv('EMAIL_SMTP_USER', '')
|
||||
EMAIL_HOST_PASSWORD = os.getenv('EMAIL_SMTP_PASSWORD', '')
|
||||
EMAIL_USE_TLS = bool(os.getenv("EMAIL_SMPT_USE_TLS", ""))
|
||||
EMAIL_HOST = os.getenv("EMAIL_SMTP_HOST", "localhost")
|
||||
EMAIL_PORT = os.getenv("EMAIL_SMTP_PORT", "25")
|
||||
EMAIL_HOST_USER = os.getenv("EMAIL_SMTP_USER", "")
|
||||
EMAIL_HOST_PASSWORD = os.getenv("EMAIL_SMTP_PASSWORD", "")
|
||||
|
||||
|
||||
# Configurable thumbnails that are going to be generated when a user uploads an image
|
||||
# file.
|
||||
USER_THUMBNAILS = {
|
||||
'tiny': [None, 21],
|
||||
'small': [48, 48]
|
||||
}
|
||||
USER_THUMBNAILS = {"tiny": [None, 21], "small": [48, 48]}
|
||||
|
||||
# The directory that contains the all the templates in JSON format. When for example
|
||||
# the `sync_templates` management command is called, then the templates in the
|
||||
# database will be synced with these files.
|
||||
APPLICATION_TEMPLATES_DIR = os.path.join(BASE_DIR, '../../../templates')
|
||||
APPLICATION_TEMPLATES_DIR = os.path.join(BASE_DIR, "../../../templates")
|
||||
# The template that must be selected when the user first opens the templates select
|
||||
# modal.
|
||||
DEFAULT_APPLICATION_TEMPLATE = 'project-management'
|
||||
DEFAULT_APPLICATION_TEMPLATE = "project-management"
|
||||
|
|
|
@ -2,4 +2,4 @@ from .base import * # noqa: F403, F401
|
|||
|
||||
|
||||
DEBUG = True
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||
|
|
|
@ -2,7 +2,7 @@ from .base import * # noqa: F403, F401
|
|||
|
||||
|
||||
DEBUG = True
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||
|
||||
try:
|
||||
from .local import * # noqa: F403, F401
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
from .base import * # noqa: F403, F401
|
||||
|
||||
|
||||
CELERY_BROKER_BACKEND = 'memory'
|
||||
CELERY_BROKER_BACKEND = "memory"
|
||||
CELERY_TASK_ALWAYS_EAGER = True
|
||||
CELERY_TASK_EAGER_PROPAGATES = True
|
||||
|
||||
CHANNEL_LAYERS = {
|
||||
"default": {
|
||||
"BACKEND": "channels.layers.InMemoryChannelLayer"
|
||||
}
|
||||
}
|
||||
CHANNEL_LAYERS = {"default": {"BACKEND": "channels.layers.InMemoryChannelLayer"}}
|
||||
|
||||
USER_FILES_DIRECTORY = 'user_files'
|
||||
USER_THUMBNAILS_DIRECTORY = 'thumbnails'
|
||||
USER_THUMBNAILS = {'tiny': [21, 21]}
|
||||
USER_FILES_DIRECTORY = "user_files"
|
||||
USER_THUMBNAILS_DIRECTORY = "thumbnails"
|
||||
USER_THUMBNAILS = {"tiny": [21, 21]}
|
||||
|
|
|
@ -8,13 +8,14 @@ from baserow.core.registries import plugin_registry
|
|||
|
||||
|
||||
def health(request):
|
||||
return HttpResponse('OK')
|
||||
return HttpResponse("OK")
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^api/', include('baserow.api.urls', namespace='api')),
|
||||
url(r'^_health$', health, name='health_check')
|
||||
] + plugin_registry.urls + static(
|
||||
settings.MEDIA_URL_PATH,
|
||||
document_root=settings.MEDIA_ROOT
|
||||
urlpatterns = (
|
||||
[
|
||||
url(r"^api/", include("baserow.api.urls", namespace="api")),
|
||||
url(r"^_health$", health, name="health_check"),
|
||||
]
|
||||
+ plugin_registry.urls
|
||||
+ static(settings.MEDIA_URL_PATH, document_root=settings.MEDIA_ROOT)
|
||||
)
|
||||
|
|
|
@ -1 +1 @@
|
|||
default_app_config = 'baserow.contrib.database.config.DatabaseConfig'
|
||||
default_app_config = "baserow.contrib.database.config.DatabaseConfig"
|
||||
|
|
|
@ -2,7 +2,7 @@ from rest_framework.status import HTTP_400_BAD_REQUEST
|
|||
|
||||
|
||||
ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP = (
|
||||
'ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP',
|
||||
"ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The provided database does not belong to the related group.'
|
||||
"The provided database does not belong to the related group.",
|
||||
)
|
||||
|
|
|
@ -2,41 +2,41 @@ from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_FIELD_DOES_NOT_EXIST = (
|
||||
'ERROR_FIELD_DOES_NOT_EXIST',
|
||||
"ERROR_FIELD_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested field does not exist.'
|
||||
"The requested field does not exist.",
|
||||
)
|
||||
ERROR_CANNOT_DELETE_PRIMARY_FIELD = 'ERROR_CANNOT_DELETE_PRIMARY_FIELD'
|
||||
ERROR_CANNOT_CHANGE_FIELD_TYPE = 'ERROR_CANNOT_CHANGE_FIELD_TYPE'
|
||||
ERROR_CANNOT_DELETE_PRIMARY_FIELD = "ERROR_CANNOT_DELETE_PRIMARY_FIELD"
|
||||
ERROR_CANNOT_CHANGE_FIELD_TYPE = "ERROR_CANNOT_CHANGE_FIELD_TYPE"
|
||||
ERROR_LINK_ROW_TABLE_NOT_PROVIDED = (
|
||||
'ERROR_LINK_ROW_TABLE_NOT_PROVIDED',
|
||||
"ERROR_LINK_ROW_TABLE_NOT_PROVIDED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The `link_row_table` must be provided.'
|
||||
"The `link_row_table` must be provided.",
|
||||
)
|
||||
ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE = 'ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE'
|
||||
ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE = "ERROR_LINK_ROW_TABLE_NOT_IN_SAME_DATABASE"
|
||||
ERROR_FIELD_NOT_IN_TABLE = (
|
||||
'ERROR_FIELD_NOT_IN_TABLE',
|
||||
"ERROR_FIELD_NOT_IN_TABLE",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The provided field does not belong in the related table.'
|
||||
"The provided field does not belong in the related table.",
|
||||
)
|
||||
ERROR_ORDER_BY_FIELD_NOT_FOUND = (
|
||||
'ERROR_ORDER_BY_FIELD_NOT_FOUND',
|
||||
"ERROR_ORDER_BY_FIELD_NOT_FOUND",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The field {e.field_name} was not found in the table.'
|
||||
"The field {e.field_name} was not found in the table.",
|
||||
)
|
||||
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE = (
|
||||
'ERROR_ORDER_BY_FIELD_NOT_POSSIBLE',
|
||||
"ERROR_ORDER_BY_FIELD_NOT_POSSIBLE",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'It is not possible to order by {e.field_name} because the field type '
|
||||
'{e.field_type} does not support filtering.'
|
||||
"It is not possible to order by {e.field_name} because the field type "
|
||||
"{e.field_type} does not support filtering.",
|
||||
)
|
||||
ERROR_FILTER_FIELD_NOT_FOUND = (
|
||||
'ERROR_FILTER_FIELD_NOT_FOUND',
|
||||
"ERROR_FILTER_FIELD_NOT_FOUND",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The field {e.field_name} was not found in the table.'
|
||||
"The field {e.field_name} was not found in the table.",
|
||||
)
|
||||
ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE = (
|
||||
'ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE',
|
||||
"ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The field type {e.field_type} is not compatible with the primary field.'
|
||||
"The field type {e.field_type} is not compatible with the primary field.",
|
||||
)
|
||||
|
|
|
@ -16,17 +16,17 @@ class FieldSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = Field
|
||||
fields = ('id', 'table_id', 'name', 'order', 'type', 'primary')
|
||||
fields = ("id", "table_id", "name", "order", "type", "primary")
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True},
|
||||
'table_id': {'read_only': True},
|
||||
"id": {"read_only": True},
|
||||
"table_id": {"read_only": True},
|
||||
}
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_type(self, instance):
|
||||
# It could be that the field related to the instance is already in the context
|
||||
# else we can call the specific_class property to find it.
|
||||
field = self.context.get('instance_type')
|
||||
field = self.context.get("instance_type")
|
||||
|
||||
if not field:
|
||||
field = field_type_registry.get_by_model(instance.specific_class)
|
||||
|
@ -42,58 +42,57 @@ class SelectOptionSerializer(serializers.Serializer):
|
|||
|
||||
class CreateFieldSerializer(serializers.ModelSerializer):
|
||||
type = serializers.ChoiceField(
|
||||
choices=lazy(field_type_registry.get_types, list)(),
|
||||
required=True
|
||||
choices=lazy(field_type_registry.get_types, list)(), required=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Field
|
||||
fields = ('name', 'type')
|
||||
fields = ("name", "type")
|
||||
|
||||
|
||||
class UpdateFieldSerializer(serializers.ModelSerializer):
|
||||
type = serializers.ChoiceField(
|
||||
choices=lazy(field_type_registry.get_types, list)(),
|
||||
required=False
|
||||
choices=lazy(field_type_registry.get_types, list)(), required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Field
|
||||
fields = ('name', 'type')
|
||||
fields = ("name", "type")
|
||||
extra_kwargs = {
|
||||
'name': {'required': False},
|
||||
"name": {"required": False},
|
||||
}
|
||||
|
||||
|
||||
class LinkRowValueSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField(help_text='The unique identifier of the row in the '
|
||||
'related table.')
|
||||
id = serializers.IntegerField(
|
||||
help_text="The unique identifier of the row in the " "related table."
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
value_field_name = kwargs.pop('value_field_name', 'value')
|
||||
value_field_name = kwargs.pop("value_field_name", "value")
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['value'] = serializers.CharField(
|
||||
help_text='The primary field\'s value as a string of the row in the '
|
||||
'related table.',
|
||||
self.fields["value"] = serializers.CharField(
|
||||
help_text="The primary field's value as a string of the row in the "
|
||||
"related table.",
|
||||
source=value_field_name,
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
|
||||
|
||||
class FileFieldRequestSerializer(serializers.Serializer):
|
||||
visible_name = serializers.CharField(
|
||||
required=False,
|
||||
help_text='A visually editable name for the field.'
|
||||
required=False, help_text="A visually editable name for the field."
|
||||
)
|
||||
name = serializers.CharField(
|
||||
required=True,
|
||||
validators=[user_file_name_validator],
|
||||
help_text='Accepts the name of the already uploaded user file.'
|
||||
help_text="Accepts the name of the already uploaded user file.",
|
||||
)
|
||||
|
||||
|
||||
class FileFieldResponseSerializer(UserFileURLAndThumbnailsSerializerMixin,
|
||||
serializers.Serializer):
|
||||
class FileFieldResponseSerializer(
|
||||
UserFileURLAndThumbnailsSerializerMixin, serializers.Serializer
|
||||
):
|
||||
visible_name = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
size = serializers.IntegerField()
|
||||
|
|
|
@ -5,9 +5,9 @@ from baserow.contrib.database.fields.registries import field_type_registry
|
|||
from .views import FieldsView, FieldView
|
||||
|
||||
|
||||
app_name = 'baserow.contrib.database.api.fields'
|
||||
app_name = "baserow.contrib.database.api.fields"
|
||||
|
||||
urlpatterns = field_type_registry.api_urls + [
|
||||
url(r'table/(?P<table_id>[0-9]+)/$', FieldsView.as_view(), name='list'),
|
||||
url(r'(?P<field_id>[0-9]+)/$', FieldView.as_view(), name='item'),
|
||||
url(r"table/(?P<table_id>[0-9]+)/$", FieldsView.as_view(), name="list"),
|
||||
url(r"(?P<field_id>[0-9]+)/$", FieldView.as_view(), name="item"),
|
||||
]
|
||||
|
|
|
@ -18,26 +18,27 @@ from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIS
|
|||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.contrib.database.table.exceptions import TableDoesNotExist
|
||||
from baserow.contrib.database.api.fields.errors import (
|
||||
ERROR_CANNOT_DELETE_PRIMARY_FIELD, ERROR_CANNOT_CHANGE_FIELD_TYPE,
|
||||
ERROR_FIELD_DOES_NOT_EXIST
|
||||
ERROR_CANNOT_DELETE_PRIMARY_FIELD,
|
||||
ERROR_CANNOT_CHANGE_FIELD_TYPE,
|
||||
ERROR_FIELD_DOES_NOT_EXIST,
|
||||
)
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
CannotDeletePrimaryField, CannotChangeFieldType, FieldDoesNotExist
|
||||
CannotDeletePrimaryField,
|
||||
CannotChangeFieldType,
|
||||
FieldDoesNotExist,
|
||||
)
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
|
||||
from .serializers import (
|
||||
FieldSerializer, CreateFieldSerializer, UpdateFieldSerializer
|
||||
)
|
||||
from .serializers import FieldSerializer, CreateFieldSerializer, UpdateFieldSerializer
|
||||
|
||||
|
||||
class FieldsView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == 'GET':
|
||||
if self.request.method == "GET":
|
||||
return [AllowAny()]
|
||||
|
||||
return super().get_permissions()
|
||||
|
@ -45,37 +46,37 @@ class FieldsView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id',
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns only the fields of the table related to the '
|
||||
'provided value.'
|
||||
description="Returns only the fields of the table related to the "
|
||||
"provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database table fields'],
|
||||
operation_id='list_database_table_fields',
|
||||
tags=["Database table fields"],
|
||||
operation_id="list_database_table_fields",
|
||||
description=(
|
||||
'Lists all the fields of the table related to the provided parameter if '
|
||||
'the user has access to the related database\'s group. If the group is '
|
||||
'related to a template, then this endpoint will be publicly accessible. A '
|
||||
'table consists of fields and each field can have a different type. Each '
|
||||
'type can have different properties. A field is comparable with a regular '
|
||||
'table\'s column.'
|
||||
"Lists all the fields of the table related to the provided parameter if "
|
||||
"the user has access to the related database's group. If the group is "
|
||||
"related to a template, then this endpoint will be publicly accessible. A "
|
||||
"table consists of fields and each field can have a different type. Each "
|
||||
"type can have different properties. A field is comparable with a regular "
|
||||
"table's column."
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicCustomFieldRegistrySerializer(
|
||||
field_type_registry,
|
||||
FieldSerializer,
|
||||
many=True
|
||||
field_type_registry, FieldSerializer, many=True
|
||||
),
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_TABLE_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_TABLE_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@method_permission_classes([AllowAny])
|
||||
def get(self, request, table_id):
|
||||
"""
|
||||
|
@ -84,9 +85,10 @@ class FieldsView(APIView):
|
|||
"""
|
||||
|
||||
table = TableHandler().get_table(table_id)
|
||||
table.database.group.has_user(request.user, raise_error=True,
|
||||
allow_if_template=True)
|
||||
fields = Field.objects.filter(table=table).select_related('content_type')
|
||||
table.database.group.has_user(
|
||||
request.user, raise_error=True, allow_if_template=True
|
||||
)
|
||||
fields = Field.objects.filter(table=table).select_related("content_type")
|
||||
|
||||
data = [
|
||||
field_type_registry.get_serializer(field, FieldSerializer).data
|
||||
|
@ -97,47 +99,48 @@ class FieldsView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id',
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Creates a new field for the provided table related to the '
|
||||
'value.'
|
||||
description="Creates a new field for the provided table related to the "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Database table fields'],
|
||||
operation_id='create_database_table_field',
|
||||
tags=["Database table fields"],
|
||||
operation_id="create_database_table_field",
|
||||
description=(
|
||||
'Creates a new field for the table related to the provided `table_id` '
|
||||
'parameter if the authorized user has access to the related database\'s '
|
||||
'group. Depending on the type, different properties can optionally be '
|
||||
'set.'
|
||||
"Creates a new field for the table related to the provided `table_id` "
|
||||
"parameter if the authorized user has access to the related database's "
|
||||
"group. Depending on the type, different properties can optionally be "
|
||||
"set."
|
||||
),
|
||||
request=PolymorphicCustomFieldRegistrySerializer(
|
||||
field_type_registry,
|
||||
CreateFieldSerializer
|
||||
field_type_registry, CreateFieldSerializer
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicCustomFieldRegistrySerializer(
|
||||
field_type_registry,
|
||||
FieldSerializer
|
||||
field_type_registry, FieldSerializer
|
||||
),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_TABLE_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_TABLE_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@validate_body_custom_fields(
|
||||
field_type_registry, base_serializer_class=CreateFieldSerializer)
|
||||
@map_exceptions({
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
field_type_registry, base_serializer_class=CreateFieldSerializer
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def post(self, request, data, table_id):
|
||||
"""Creates a new field for a table."""
|
||||
|
||||
type_name = data.pop('type')
|
||||
type_name = data.pop("type")
|
||||
field_type = field_type_registry.get(type_name)
|
||||
table = TableHandler().get_table(table_id)
|
||||
table.database.group.has_user(request.user, raise_error=True)
|
||||
|
@ -158,32 +161,33 @@ class FieldView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='field_id',
|
||||
name="field_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns the field related to the provided value.'
|
||||
description="Returns the field related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database table fields'],
|
||||
operation_id='get_database_table_field',
|
||||
tags=["Database table fields"],
|
||||
operation_id="get_database_table_field",
|
||||
description=(
|
||||
'Returns the existing field if the authorized user has access to the '
|
||||
'related database\'s group. Depending on the type different properties'
|
||||
'could be returned.'
|
||||
"Returns the existing field if the authorized user has access to the "
|
||||
"related database's group. Depending on the type different properties"
|
||||
"could be returned."
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicCustomFieldRegistrySerializer(
|
||||
field_type_registry,
|
||||
FieldSerializer
|
||||
field_type_registry, FieldSerializer
|
||||
),
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_FIELD_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_FIELD_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
def get(self, request, field_id):
|
||||
"""Selects a single field and responds with a serialized version."""
|
||||
|
||||
|
@ -195,57 +199,64 @@ class FieldView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='field_id',
|
||||
name="field_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the field related to the provided value.'
|
||||
description="Updates the field related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database table fields'],
|
||||
operation_id='update_database_table_field',
|
||||
tags=["Database table fields"],
|
||||
operation_id="update_database_table_field",
|
||||
description=(
|
||||
'Updates the existing field if the authorized user has access to the '
|
||||
'related database\'s group. The type can also be changed and depending on '
|
||||
'that type, different additional properties can optionally be set. If you '
|
||||
'change the field type it could happen that the data conversion fails, in '
|
||||
'that case the `ERROR_CANNOT_CHANGE_FIELD_TYPE` is returned, but this '
|
||||
'rarely happens. If a data value cannot be converted it is set to `null` '
|
||||
'so data might go lost.'
|
||||
"Updates the existing field if the authorized user has access to the "
|
||||
"related database's group. The type can also be changed and depending on "
|
||||
"that type, different additional properties can optionally be set. If you "
|
||||
"change the field type it could happen that the data conversion fails, in "
|
||||
"that case the `ERROR_CANNOT_CHANGE_FIELD_TYPE` is returned, but this "
|
||||
"rarely happens. If a data value cannot be converted it is set to `null` "
|
||||
"so data might go lost."
|
||||
),
|
||||
request=PolymorphicCustomFieldRegistrySerializer(
|
||||
field_type_registry,
|
||||
UpdateFieldSerializer
|
||||
field_type_registry, UpdateFieldSerializer
|
||||
),
|
||||
responses={
|
||||
200: PolymorphicCustomFieldRegistrySerializer(
|
||||
field_type_registry,
|
||||
FieldSerializer
|
||||
field_type_registry, FieldSerializer
|
||||
),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_CANNOT_CHANGE_FIELD_TYPE',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_FIELD_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_CANNOT_CHANGE_FIELD_TYPE",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_FIELD_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
CannotChangeFieldType: ERROR_CANNOT_CHANGE_FIELD_TYPE
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
CannotChangeFieldType: ERROR_CANNOT_CHANGE_FIELD_TYPE,
|
||||
}
|
||||
)
|
||||
def patch(self, request, field_id):
|
||||
"""Updates the field if the user belongs to the group."""
|
||||
|
||||
field = FieldHandler().get_field(
|
||||
field_id,
|
||||
base_queryset=Field.objects.select_for_update()
|
||||
).specific
|
||||
field = (
|
||||
FieldHandler()
|
||||
.get_field(field_id, base_queryset=Field.objects.select_for_update())
|
||||
.specific
|
||||
)
|
||||
type_name = type_from_data_or_registry(request.data, field_type_registry, field)
|
||||
field_type = field_type_registry.get(type_name)
|
||||
data = validate_data_custom_fields(type_name, field_type_registry, request.data,
|
||||
base_serializer_class=UpdateFieldSerializer)
|
||||
data = validate_data_custom_fields(
|
||||
type_name,
|
||||
field_type_registry,
|
||||
request.data,
|
||||
base_serializer_class=UpdateFieldSerializer,
|
||||
)
|
||||
|
||||
# Because each field type can raise custom exceptions at while updating the
|
||||
# field we need to be able to map those to the correct API exceptions which are
|
||||
|
@ -259,34 +270,36 @@ class FieldView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='field_id',
|
||||
name="field_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Deletes the field related to the provided value.'
|
||||
description="Deletes the field related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database table fields'],
|
||||
operation_id='delete_database_table_field',
|
||||
tags=["Database table fields"],
|
||||
operation_id="delete_database_table_field",
|
||||
description=(
|
||||
'Deletes the existing field if the authorized user has access to the '
|
||||
'related database\'s group. Note that all the related data to that field '
|
||||
'is also deleted. Primary fields cannot be deleted because their value '
|
||||
'represents the row.'
|
||||
"Deletes the existing field if the authorized user has access to the "
|
||||
"related database's group. Note that all the related data to that field "
|
||||
"is also deleted. Primary fields cannot be deleted because their value "
|
||||
"represents the row."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_CANNOT_DELETE_PRIMARY_FIELD'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_FIELD_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_CANNOT_DELETE_PRIMARY_FIELD"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_FIELD_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
CannotDeletePrimaryField: ERROR_CANNOT_DELETE_PRIMARY_FIELD
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
FieldDoesNotExist: ERROR_FIELD_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
CannotDeletePrimaryField: ERROR_CANNOT_DELETE_PRIMARY_FIELD,
|
||||
}
|
||||
)
|
||||
def delete(self, request, field_id):
|
||||
"""Deletes an existing field if the user belongs to the group."""
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ from rest_framework.status import HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_ROW_DOES_NOT_EXIST = (
|
||||
'ERROR_ROW_DOES_NOT_EXIST',
|
||||
"ERROR_ROW_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested row does not exist.'
|
||||
"The requested row does not exist.",
|
||||
)
|
||||
|
|
|
@ -12,15 +12,14 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class RowSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
fields = ('id', 'order',)
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True},
|
||||
'order': {'read_only': True}
|
||||
}
|
||||
fields = (
|
||||
"id",
|
||||
"order",
|
||||
)
|
||||
extra_kwargs = {"id": {"read_only": True}, "order": {"read_only": True}}
|
||||
|
||||
|
||||
def get_row_serializer_class(model, base_class=None, is_response=False,
|
||||
field_ids=None):
|
||||
def get_row_serializer_class(model, base_class=None, is_response=False, field_ids=None):
|
||||
"""
|
||||
Generates a Django rest framework model serializer based on the available fields
|
||||
that belong to this model. For each table field, used to generate this serializer,
|
||||
|
@ -46,17 +45,16 @@ def get_row_serializer_class(model, base_class=None, is_response=False,
|
|||
|
||||
field_objects = model._field_objects
|
||||
field_names = [
|
||||
field['name']
|
||||
field["name"]
|
||||
for field in field_objects.values()
|
||||
if field_ids is None or field['field'].id in field_ids
|
||||
if field_ids is None or field["field"].id in field_ids
|
||||
]
|
||||
field_overrides = {
|
||||
field['name']:
|
||||
field['type'].get_response_serializer_field(field['field'])
|
||||
if is_response else
|
||||
field['type'].get_serializer_field(field['field'])
|
||||
field["name"]: field["type"].get_response_serializer_field(field["field"])
|
||||
if is_response
|
||||
else field["type"].get_serializer_field(field["field"])
|
||||
for field in field_objects.values()
|
||||
if field_ids is None or field['field'].id in field_ids
|
||||
if field_ids is None or field["field"].id in field_ids
|
||||
}
|
||||
return get_serializer_class(model, field_names, field_overrides, base_class)
|
||||
|
||||
|
@ -73,13 +71,11 @@ def get_example_row_serializer_class(add_id=False):
|
|||
:rtype: Serializer
|
||||
"""
|
||||
|
||||
if not hasattr(get_example_row_serializer_class, 'cache'):
|
||||
if not hasattr(get_example_row_serializer_class, "cache"):
|
||||
get_example_row_serializer_class.cache = {}
|
||||
|
||||
class_name = (
|
||||
'ExampleRowResponseSerializer'
|
||||
if add_id else
|
||||
'ExampleRowRequestSerializer'
|
||||
"ExampleRowResponseSerializer" if add_id else "ExampleRowRequestSerializer"
|
||||
)
|
||||
|
||||
if class_name in get_example_row_serializer_class.cache:
|
||||
|
@ -88,21 +84,24 @@ def get_example_row_serializer_class(add_id=False):
|
|||
fields = {}
|
||||
|
||||
if add_id:
|
||||
fields['id'] = serializers.IntegerField(
|
||||
read_only=True,
|
||||
help_text='The unique identifier of the row in the table.'
|
||||
fields["id"] = serializers.IntegerField(
|
||||
read_only=True, help_text="The unique identifier of the row in the table."
|
||||
)
|
||||
fields['order'] = serializers.DecimalField(
|
||||
max_digits=40, decimal_places=20, required=False,
|
||||
help_text='Indicates the position of the row, lowest first and highest '
|
||||
'last.'
|
||||
fields["order"] = serializers.DecimalField(
|
||||
max_digits=40,
|
||||
decimal_places=20,
|
||||
required=False,
|
||||
help_text="Indicates the position of the row, lowest first and highest "
|
||||
"last.",
|
||||
)
|
||||
|
||||
field_types = field_type_registry.registry.values()
|
||||
|
||||
if len(field_types) == 0:
|
||||
logger.warning('The field types appear to be empty. This module is probably '
|
||||
'imported before the fields have been registered.')
|
||||
logger.warning(
|
||||
"The field types appear to be empty. This module is probably "
|
||||
"imported before the fields have been registered."
|
||||
)
|
||||
|
||||
for i, field_type in enumerate(field_types):
|
||||
# In order to generate a serializer we need a model instance. This method is
|
||||
|
@ -113,15 +112,16 @@ def get_example_row_serializer_class(add_id=False):
|
|||
defaults = model_default_values(field_type.model_class)
|
||||
instance = dict_to_object(defaults)
|
||||
kwargs = {
|
||||
'help_text': f'This field represents the `{field_type.type}` field. The '
|
||||
f'number in field_{i + 1} is in a normal request or response '
|
||||
f'the id of the field. '
|
||||
f'{field_type.get_serializer_help_text(instance)}'
|
||||
"help_text": f"This field represents the `{field_type.type}` field. The "
|
||||
f"number in field_{i + 1} is in a normal request or response "
|
||||
f"the id of the field. "
|
||||
f"{field_type.get_serializer_help_text(instance)}"
|
||||
}
|
||||
get_field_method = \
|
||||
'get_response_serializer_field' if add_id else 'get_serializer_field'
|
||||
get_field_method = (
|
||||
"get_response_serializer_field" if add_id else "get_serializer_field"
|
||||
)
|
||||
serializer_field = getattr(field_type, get_field_method)(instance, **kwargs)
|
||||
fields[f'field_{i + 1}'] = serializer_field
|
||||
fields[f"field_{i + 1}"] = serializer_field
|
||||
|
||||
class_object = type(class_name, (serializers.Serializer,), fields)
|
||||
get_example_row_serializer_class.cache[class_name] = class_object
|
||||
|
@ -132,6 +132,8 @@ def get_example_row_serializer_class(add_id=False):
|
|||
example_pagination_row_serializer_class = get_example_pagination_serializer_class(
|
||||
get_example_row_serializer_class(True)
|
||||
)
|
||||
example_pagination_row_serializer_class_with_field_options = \
|
||||
example_pagination_row_serializer_class_with_field_options = (
|
||||
get_example_pagination_serializer_class(
|
||||
get_example_row_serializer_class(True), add_field_options=True)
|
||||
get_example_row_serializer_class(True), add_field_options=True
|
||||
)
|
||||
)
|
||||
|
|
|
@ -3,10 +3,13 @@ from django.conf.urls import url
|
|||
from .views import RowsView, RowView
|
||||
|
||||
|
||||
app_name = 'baserow.contrib.database.api.rows'
|
||||
app_name = "baserow.contrib.database.api.rows"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'table/(?P<table_id>[0-9]+)/$', RowsView.as_view(), name='list'),
|
||||
url(r'table/(?P<table_id>[0-9]+)/(?P<row_id>[0-9]+)/$', RowView.as_view(),
|
||||
name='item'),
|
||||
url(r"table/(?P<table_id>[0-9]+)/$", RowsView.as_view(), name="list"),
|
||||
url(
|
||||
r"table/(?P<table_id>[0-9]+)/(?P<row_id>[0-9]+)/$",
|
||||
RowView.as_view(),
|
||||
name="item",
|
||||
),
|
||||
]
|
||||
|
|
|
@ -13,22 +13,25 @@ from baserow.api.schemas import get_error_schema
|
|||
from baserow.api.user_files.errors import ERROR_USER_FILE_DOES_NOT_EXIST
|
||||
from baserow.api.utils import validate_data
|
||||
from baserow.contrib.database.api.fields.errors import (
|
||||
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE, ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
ERROR_FILTER_FIELD_NOT_FOUND
|
||||
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
|
||||
ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
ERROR_FILTER_FIELD_NOT_FOUND,
|
||||
)
|
||||
from baserow.contrib.database.api.rows.errors import ERROR_ROW_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
example_pagination_row_serializer_class
|
||||
example_pagination_row_serializer_class,
|
||||
)
|
||||
from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.tokens.authentications import TokenAuthentication
|
||||
from baserow.contrib.database.api.tokens.errors import ERROR_NO_PERMISSION_TO_TABLE
|
||||
from baserow.contrib.database.api.views.errors import (
|
||||
ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD
|
||||
ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
|
||||
)
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
OrderByFieldNotFound, OrderByFieldNotPossible, FilterFieldNotFound
|
||||
OrderByFieldNotFound,
|
||||
OrderByFieldNotPossible,
|
||||
FilterFieldNotFound,
|
||||
)
|
||||
from baserow.contrib.database.rows.exceptions import RowDoesNotExist
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
|
@ -37,16 +40,21 @@ from baserow.contrib.database.table.handler import TableHandler
|
|||
from baserow.contrib.database.tokens.exceptions import NoPermissionToTable
|
||||
from baserow.contrib.database.tokens.handler import TokenHandler
|
||||
from baserow.contrib.database.views.exceptions import (
|
||||
ViewFilterTypeNotAllowedForField, ViewFilterTypeDoesNotExist
|
||||
ViewFilterTypeNotAllowedForField,
|
||||
ViewFilterTypeDoesNotExist,
|
||||
)
|
||||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
from baserow.core.exceptions import UserNotInGroup
|
||||
from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
||||
from .serializers import (
|
||||
RowSerializer, get_example_row_serializer_class, get_row_serializer_class
|
||||
RowSerializer,
|
||||
get_example_row_serializer_class,
|
||||
get_row_serializer_class,
|
||||
)
|
||||
from baserow.contrib.database.fields.field_filters import (
|
||||
FILTER_TYPE_AND,
|
||||
FILTER_TYPE_OR,
|
||||
)
|
||||
from baserow.contrib.database.fields.field_filters import FILTER_TYPE_AND, \
|
||||
FILTER_TYPE_OR
|
||||
|
||||
|
||||
class RowsView(APIView):
|
||||
|
@ -56,137 +64,141 @@ class RowsView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id',
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns the rows of the table related to the provided '
|
||||
'value.'
|
||||
description="Returns the rows of the table related to the provided "
|
||||
"value.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='page',
|
||||
name="page",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Defines which page of rows should be returned.'
|
||||
description="Defines which page of rows should be returned.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='size',
|
||||
name="size",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Defines how many rows should be returned per page.'
|
||||
description="Defines how many rows should be returned per page.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='search',
|
||||
name="search",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description='If provided only rows with data that matches the search '
|
||||
'query are going to be returned.'
|
||||
description="If provided only rows with data that matches the search "
|
||||
"query are going to be returned.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='order_by',
|
||||
name="order_by",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description='Optionally the rows can be ordered by provided field ids '
|
||||
'separated by comma. By default a field is ordered in '
|
||||
'ascending (A-Z) order, but by prepending the field with '
|
||||
'a \'-\' it can be ordered descending (Z-A). '
|
||||
description="Optionally the rows can be ordered by provided field ids "
|
||||
"separated by comma. By default a field is ordered in "
|
||||
"ascending (A-Z) order, but by prepending the field with "
|
||||
"a '-' it can be ordered descending (Z-A). ",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='filter__{field}__{filter}',
|
||||
name="filter__{field}__{filter}",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
f'The rows can optionally be filtered by the same view filters '
|
||||
f'available for the views. Multiple filters can be provided if '
|
||||
f'they follow the same format. The field and filter variable '
|
||||
f'indicate how to filter and the value indicates where to filter '
|
||||
f'on.\n\n'
|
||||
f'For example if you provide the following GET parameter '
|
||||
f'`filter__field_1__equal=test` then only rows where the value of '
|
||||
f'field_1 is equal to test are going to be returned.\n\n'
|
||||
f'The following filters are available: '
|
||||
f"The rows can optionally be filtered by the same view filters "
|
||||
f"available for the views. Multiple filters can be provided if "
|
||||
f"they follow the same format. The field and filter variable "
|
||||
f"indicate how to filter and the value indicates where to filter "
|
||||
f"on.\n\n"
|
||||
f"For example if you provide the following GET parameter "
|
||||
f"`filter__field_1__equal=test` then only rows where the value of "
|
||||
f"field_1 is equal to test are going to be returned.\n\n"
|
||||
f"The following filters are available: "
|
||||
f'{", ".join(view_filter_type_registry.get_types())}.'
|
||||
)
|
||||
),
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='filter_type',
|
||||
name="filter_type",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'`AND`: Indicates that the rows must match all the provided '
|
||||
'filters.\n'
|
||||
'`OR`: Indicates that the rows only have to match one of the '
|
||||
'filters.\n\n'
|
||||
'This works only if two or more filters are provided.'
|
||||
)
|
||||
"`AND`: Indicates that the rows must match all the provided "
|
||||
"filters.\n"
|
||||
"`OR`: Indicates that the rows only have to match one of the "
|
||||
"filters.\n\n"
|
||||
"This works only if two or more filters are provided."
|
||||
),
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='include',
|
||||
name="include",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'All the fields are included in the response by default. You can '
|
||||
'select a subset of fields by providing the include query '
|
||||
'parameter. If you for example provide the following GET '
|
||||
'parameter `include=field_1,field_2` then only the fields with'
|
||||
'id `1` and id `2` are going to be selected and included in the '
|
||||
'response. '
|
||||
)
|
||||
"All the fields are included in the response by default. You can "
|
||||
"select a subset of fields by providing the include query "
|
||||
"parameter. If you for example provide the following GET "
|
||||
"parameter `include=field_1,field_2` then only the fields with"
|
||||
"id `1` and id `2` are going to be selected and included in the "
|
||||
"response. "
|
||||
),
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='exclude',
|
||||
name="exclude",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'All the fields are included in the response by default. You can '
|
||||
'select a subset of fields by providing the exclude query '
|
||||
'parameter. If you for example provide the following GET '
|
||||
'parameter `exclude=field_1,field_2` then the fields with id `1` '
|
||||
'and id `2` are going to be excluded from the selection and '
|
||||
'response.'
|
||||
)
|
||||
"All the fields are included in the response by default. You can "
|
||||
"select a subset of fields by providing the exclude query "
|
||||
"parameter. If you for example provide the following GET "
|
||||
"parameter `exclude=field_1,field_2` then the fields with id `1` "
|
||||
"and id `2` are going to be excluded from the selection and "
|
||||
"response."
|
||||
),
|
||||
),
|
||||
],
|
||||
tags=['Database table rows'],
|
||||
operation_id='list_database_table_rows',
|
||||
tags=["Database table rows"],
|
||||
operation_id="list_database_table_rows",
|
||||
description=(
|
||||
'Lists all the rows of the table related to the provided parameter if the '
|
||||
'user has access to the related database\'s group. The response is '
|
||||
'paginated by a page/size style. It is also possible to provide an '
|
||||
'optional search query, only rows where the data matches the search query '
|
||||
'are going to be returned then. The properties of the returned rows '
|
||||
'depends on which fields the table has. For a complete overview of fields '
|
||||
'use the **list_database_table_fields** endpoint to list them all. In the '
|
||||
'example all field types are listed, but normally the number in '
|
||||
'field_{id} key is going to be the id of the field. The value is what the '
|
||||
'user has provided and the format of it depends on the fields type.'
|
||||
"Lists all the rows of the table related to the provided parameter if the "
|
||||
"user has access to the related database's group. The response is "
|
||||
"paginated by a page/size style. It is also possible to provide an "
|
||||
"optional search query, only rows where the data matches the search query "
|
||||
"are going to be returned then. The properties of the returned rows "
|
||||
"depends on which fields the table has. For a complete overview of fields "
|
||||
"use the **list_database_table_fields** endpoint to list them all. In the "
|
||||
"example all field types are listed, but normally the number in "
|
||||
"field_{id} key is going to be the id of the field. The value is what the "
|
||||
"user has provided and the format of it depends on the fields type."
|
||||
),
|
||||
responses={
|
||||
200: example_pagination_row_serializer_class,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_REQUEST_BODY_VALIDATION',
|
||||
'ERROR_PAGE_SIZE_LIMIT',
|
||||
'ERROR_INVALID_PAGE',
|
||||
'ERROR_ORDER_BY_FIELD_NOT_FOUND',
|
||||
'ERROR_ORDER_BY_FIELD_NOT_POSSIBLE',
|
||||
'ERROR_FILTER_FIELD_NOT_FOUND',
|
||||
'ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST',
|
||||
'ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD'
|
||||
]),
|
||||
401: get_error_schema(['ERROR_NO_PERMISSION_TO_TABLE']),
|
||||
404: get_error_schema(['ERROR_TABLE_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"ERROR_PAGE_SIZE_LIMIT",
|
||||
"ERROR_INVALID_PAGE",
|
||||
"ERROR_ORDER_BY_FIELD_NOT_FOUND",
|
||||
"ERROR_ORDER_BY_FIELD_NOT_POSSIBLE",
|
||||
"ERROR_FILTER_FIELD_NOT_FOUND",
|
||||
"ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST",
|
||||
"ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD",
|
||||
]
|
||||
),
|
||||
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
|
||||
404: get_error_schema(["ERROR_TABLE_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
OrderByFieldNotFound: ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
OrderByFieldNotPossible: ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
|
||||
FilterFieldNotFound: ERROR_FILTER_FIELD_NOT_FOUND,
|
||||
ViewFilterTypeDoesNotExist: ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ViewFilterTypeNotAllowedForField: ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
OrderByFieldNotFound: ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
OrderByFieldNotPossible: ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
|
||||
FilterFieldNotFound: ERROR_FILTER_FIELD_NOT_FOUND,
|
||||
ViewFilterTypeDoesNotExist: ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ViewFilterTypeNotAllowedForField: ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD
|
||||
})
|
||||
def get(self, request, table_id):
|
||||
"""
|
||||
Lists all the rows of the given table id paginated. It is also possible to
|
||||
|
@ -196,17 +208,14 @@ class RowsView(APIView):
|
|||
table = TableHandler().get_table(table_id)
|
||||
table.database.group.has_user(request.user, raise_error=True)
|
||||
|
||||
TokenHandler().check_table_permissions(request, 'read', table, False)
|
||||
search = request.GET.get('search')
|
||||
order_by = request.GET.get('order_by')
|
||||
include = request.GET.get('include')
|
||||
exclude = request.GET.get('exclude')
|
||||
TokenHandler().check_table_permissions(request, "read", table, False)
|
||||
search = request.GET.get("search")
|
||||
order_by = request.GET.get("order_by")
|
||||
include = request.GET.get("include")
|
||||
exclude = request.GET.get("exclude")
|
||||
fields = RowHandler().get_include_exclude_fields(table, include, exclude)
|
||||
|
||||
model = table.get_model(
|
||||
fields=fields,
|
||||
field_ids=[] if fields else None
|
||||
)
|
||||
model = table.get_model(fields=fields, field_ids=[] if fields else None)
|
||||
queryset = model.objects.all().enhance_by_fields()
|
||||
|
||||
if search:
|
||||
|
@ -217,16 +226,17 @@ class RowsView(APIView):
|
|||
|
||||
filter_type = (
|
||||
FILTER_TYPE_OR
|
||||
if str(request.GET.get('filter_type')).upper() == 'OR' else
|
||||
FILTER_TYPE_AND
|
||||
if str(request.GET.get("filter_type")).upper() == "OR"
|
||||
else FILTER_TYPE_AND
|
||||
)
|
||||
filter_object = {key: request.GET.getlist(key) for key in request.GET.keys()}
|
||||
queryset = queryset.filter_by_fields_object(filter_object, filter_type)
|
||||
|
||||
paginator = PageNumberPagination(limit_page_size=settings.ROW_PAGE_SIZE_LIMIT)
|
||||
page = paginator.paginate_queryset(queryset, request, self)
|
||||
serializer_class = get_row_serializer_class(model, RowSerializer,
|
||||
is_response=True)
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
)
|
||||
serializer = serializer_class(page, many=True)
|
||||
|
||||
return paginator.get_paginated_response(serializer.data)
|
||||
|
@ -234,53 +244,57 @@ class RowsView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Creates a row in the table related to the provided '
|
||||
'value.'
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Creates a row in the table related to the provided "
|
||||
"value.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='before', location=OpenApiParameter.QUERY, type=OpenApiTypes.INT,
|
||||
description='If provided then the newly created row will be '
|
||||
'positioned before the row with the provided id.'
|
||||
)
|
||||
name="before",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="If provided then the newly created row will be "
|
||||
"positioned before the row with the provided id.",
|
||||
),
|
||||
],
|
||||
tags=['Database table rows'],
|
||||
operation_id='create_database_table_row',
|
||||
tags=["Database table rows"],
|
||||
operation_id="create_database_table_row",
|
||||
description=(
|
||||
'Creates a new row in the table if the user has access to the related '
|
||||
'table\'s group. The accepted body fields are depending on the fields '
|
||||
'that the table has. For a complete overview of fields use the '
|
||||
'**list_database_table_fields** to list them all. None of the fields are '
|
||||
'required, if they are not provided the value is going to be `null` or '
|
||||
'`false` or some default value is that is set. If you want to add a value '
|
||||
'for the field with for example id `10`, the key must be named '
|
||||
'`field_10`. Of course multiple fields can be provided in one request. In '
|
||||
'the examples below you will find all the different field types, the '
|
||||
'numbers/ids in the example are just there for example purposes, the '
|
||||
'field_ID must be replaced with the actual id of the field.'
|
||||
"Creates a new row in the table if the user has access to the related "
|
||||
"table's group. The accepted body fields are depending on the fields "
|
||||
"that the table has. For a complete overview of fields use the "
|
||||
"**list_database_table_fields** to list them all. None of the fields are "
|
||||
"required, if they are not provided the value is going to be `null` or "
|
||||
"`false` or some default value is that is set. If you want to add a value "
|
||||
"for the field with for example id `10`, the key must be named "
|
||||
"`field_10`. Of course multiple fields can be provided in one request. In "
|
||||
"the examples below you will find all the different field types, the "
|
||||
"numbers/ids in the example are just there for example purposes, the "
|
||||
"field_ID must be replaced with the actual id of the field."
|
||||
),
|
||||
request=get_example_row_serializer_class(False),
|
||||
responses={
|
||||
200: get_example_row_serializer_class(True),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
401: get_error_schema(['ERROR_NO_PERMISSION_TO_TABLE']),
|
||||
404: get_error_schema([
|
||||
'ERROR_TABLE_DOES_NOT_EXIST',
|
||||
'ERROR_ROW_DOES_NOT_EXIST'
|
||||
])
|
||||
}
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
|
||||
404: get_error_schema(
|
||||
["ERROR_TABLE_DOES_NOT_EXIST", "ERROR_ROW_DOES_NOT_EXIST"]
|
||||
),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def post(self, request, table_id):
|
||||
"""
|
||||
Creates a new row for the given table_id. Also the post data is validated
|
||||
|
@ -288,22 +302,23 @@ class RowsView(APIView):
|
|||
"""
|
||||
|
||||
table = TableHandler().get_table(table_id)
|
||||
TokenHandler().check_table_permissions(request, 'create', table, False)
|
||||
TokenHandler().check_table_permissions(request, "create", table, False)
|
||||
model = table.get_model()
|
||||
|
||||
validation_serializer = get_row_serializer_class(model)
|
||||
data = validate_data(validation_serializer, request.data)
|
||||
|
||||
before_id = request.GET.get('before')
|
||||
before_id = request.GET.get("before")
|
||||
before = (
|
||||
RowHandler().get_row(request.user, table, before_id, model)
|
||||
if before_id else
|
||||
None
|
||||
if before_id
|
||||
else None
|
||||
)
|
||||
|
||||
row = RowHandler().create_row(request.user, table, data, model, before=before)
|
||||
serializer_class = get_row_serializer_class(model, RowSerializer,
|
||||
is_response=True)
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
)
|
||||
serializer = serializer_class(row)
|
||||
|
||||
return Response(serializer.data)
|
||||
|
@ -316,45 +331,49 @@ class RowView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Returns the row of the table related to the provided '
|
||||
'value.'
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Returns the row of the table related to the provided "
|
||||
"value.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='row_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Returns the row related the provided value.'
|
||||
)
|
||||
name="row_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Returns the row related the provided value.",
|
||||
),
|
||||
],
|
||||
tags=['Database table rows'],
|
||||
operation_id='get_database_table_row',
|
||||
tags=["Database table rows"],
|
||||
operation_id="get_database_table_row",
|
||||
description=(
|
||||
'Fetches an existing row from the table if the user has access to the '
|
||||
'related table\'s group. The properties of the returned row depend on '
|
||||
'which fields the table has. For a complete overview of fields use the '
|
||||
'**list_database_table_fields** endpoint to list them all. In the example '
|
||||
'all field types are listed, but normally the number in field_{id} key is '
|
||||
'going to be the id of the field. The value is what the user has provided '
|
||||
'and the format of it depends on the fields type.'
|
||||
"Fetches an existing row from the table if the user has access to the "
|
||||
"related table's group. The properties of the returned row depend on "
|
||||
"which fields the table has. For a complete overview of fields use the "
|
||||
"**list_database_table_fields** endpoint to list them all. In the example "
|
||||
"all field types are listed, but normally the number in field_{id} key is "
|
||||
"going to be the id of the field. The value is what the user has provided "
|
||||
"and the format of it depends on the fields type."
|
||||
),
|
||||
responses={
|
||||
200: get_example_row_serializer_class(True),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
401: get_error_schema(['ERROR_NO_PERMISSION_TO_TABLE']),
|
||||
404: get_error_schema([
|
||||
'ERROR_TABLE_DOES_NOT_EXIST',
|
||||
'ERROR_ROW_DOES_NOT_EXIST'
|
||||
])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
|
||||
404: get_error_schema(
|
||||
["ERROR_TABLE_DOES_NOT_EXIST", "ERROR_ROW_DOES_NOT_EXIST"]
|
||||
),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE
|
||||
})
|
||||
def get(self, request, table_id, row_id):
|
||||
"""
|
||||
Responds with a serializer version of the row related to the provided row_id
|
||||
|
@ -362,12 +381,13 @@ class RowView(APIView):
|
|||
"""
|
||||
|
||||
table = TableHandler().get_table(table_id)
|
||||
TokenHandler().check_table_permissions(request, 'read', table, False)
|
||||
TokenHandler().check_table_permissions(request, "read", table, False)
|
||||
|
||||
model = table.get_model()
|
||||
row = RowHandler().get_row(request.user, table, row_id, model)
|
||||
serializer_class = get_row_serializer_class(model, RowSerializer,
|
||||
is_response=True)
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
)
|
||||
serializer = serializer_class(row)
|
||||
|
||||
return Response(serializer.data)
|
||||
|
@ -375,51 +395,55 @@ class RowView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Updates the row in the table related to the value.'
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Updates the row in the table related to the value.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='row_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Updates the row related to the value.'
|
||||
)
|
||||
name="row_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Updates the row related to the value.",
|
||||
),
|
||||
],
|
||||
tags=['Database table rows'],
|
||||
operation_id='update_database_table_row',
|
||||
tags=["Database table rows"],
|
||||
operation_id="update_database_table_row",
|
||||
description=(
|
||||
'Updates an existing row in the table if the user has access to the '
|
||||
'related table\'s group. The accepted body fields are depending on the '
|
||||
'fields that the table has. For a complete overview of fields use the '
|
||||
'**list_database_table_fields** endpoint to list them all. None of the '
|
||||
'fields are required, if they are not provided the value is not going to '
|
||||
'be updated. If you want to update a value for the field with for example '
|
||||
'id `10`, the key must be named `field_10`. Of course multiple fields can '
|
||||
'be provided in one request. In the examples below you will find all the '
|
||||
'different field types, the numbers/ids in the example are just there for '
|
||||
'example purposes, the field_ID must be replaced with the actual id of the '
|
||||
'field.'
|
||||
"Updates an existing row in the table if the user has access to the "
|
||||
"related table's group. The accepted body fields are depending on the "
|
||||
"fields that the table has. For a complete overview of fields use the "
|
||||
"**list_database_table_fields** endpoint to list them all. None of the "
|
||||
"fields are required, if they are not provided the value is not going to "
|
||||
"be updated. If you want to update a value for the field with for example "
|
||||
"id `10`, the key must be named `field_10`. Of course multiple fields can "
|
||||
"be provided in one request. In the examples below you will find all the "
|
||||
"different field types, the numbers/ids in the example are just there for "
|
||||
"example purposes, the field_ID must be replaced with the actual id of the "
|
||||
"field."
|
||||
),
|
||||
request=get_example_row_serializer_class(False),
|
||||
responses={
|
||||
200: get_example_row_serializer_class(True),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
401: get_error_schema(['ERROR_NO_PERMISSION_TO_TABLE']),
|
||||
404: get_error_schema([
|
||||
'ERROR_TABLE_DOES_NOT_EXIST',
|
||||
'ERROR_ROW_DOES_NOT_EXIST'
|
||||
])
|
||||
}
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
401: get_error_schema(["ERROR_NO_PERMISSION_TO_TABLE"]),
|
||||
404: get_error_schema(
|
||||
["ERROR_TABLE_DOES_NOT_EXIST", "ERROR_ROW_DOES_NOT_EXIST"]
|
||||
),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
UserFileDoesNotExist: ERROR_USER_FILE_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def patch(self, request, table_id, row_id):
|
||||
"""
|
||||
Updates the row with the given row_id for the table with the given
|
||||
|
@ -427,7 +451,7 @@ class RowView(APIView):
|
|||
"""
|
||||
|
||||
table = TableHandler().get_table(table_id)
|
||||
TokenHandler().check_table_permissions(request, 'update', table, False)
|
||||
TokenHandler().check_table_permissions(request, "update", table, False)
|
||||
|
||||
field_ids = RowHandler().extract_field_ids_from_dict(request.data)
|
||||
model = table.get_model()
|
||||
|
@ -436,8 +460,9 @@ class RowView(APIView):
|
|||
|
||||
row = RowHandler().update_row(request.user, table, row_id, data, model)
|
||||
|
||||
serializer_class = get_row_serializer_class(model, RowSerializer,
|
||||
is_response=True)
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
)
|
||||
serializer = serializer_class(row)
|
||||
|
||||
return Response(serializer.data)
|
||||
|
@ -445,36 +470,41 @@ class RowView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Deletes the row in the table related to the value.'
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Deletes the row in the table related to the value.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='row_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Deletes the row related to the value.'
|
||||
)
|
||||
name="row_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Deletes the row related to the value.",
|
||||
),
|
||||
],
|
||||
tags=['Database table rows'],
|
||||
operation_id='delete_database_table_row',
|
||||
tags=["Database table rows"],
|
||||
operation_id="delete_database_table_row",
|
||||
description=(
|
||||
'Deletes an existing row in the table if the user has access to the '
|
||||
'table\'s group.'
|
||||
"Deletes an existing row in the table if the user has access to the "
|
||||
"table's group."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema([
|
||||
'ERROR_TABLE_DOES_NOT_EXIST',
|
||||
'ERROR_ROW_DOES_NOT_EXIST'
|
||||
])
|
||||
}
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(
|
||||
["ERROR_TABLE_DOES_NOT_EXIST", "ERROR_ROW_DOES_NOT_EXIST"]
|
||||
),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
RowDoesNotExist: ERROR_ROW_DOES_NOT_EXIST,
|
||||
NoPermissionToTable: ERROR_NO_PERMISSION_TO_TABLE,
|
||||
}
|
||||
)
|
||||
def delete(self, request, table_id, row_id):
|
||||
"""
|
||||
Deletes an existing row with the given row_id for table with the given
|
||||
|
@ -482,7 +512,7 @@ class RowView(APIView):
|
|||
"""
|
||||
|
||||
table = TableHandler().get_table(table_id)
|
||||
TokenHandler().check_table_permissions(request, 'delete', table, False)
|
||||
TokenHandler().check_table_permissions(request, "delete", table, False)
|
||||
RowHandler().delete_row(request.user, table, row_id)
|
||||
|
||||
return Response(status=204)
|
||||
|
|
|
@ -9,13 +9,13 @@ from baserow.contrib.database.api.tables.serializers import TableSerializer
|
|||
|
||||
class DatabaseSerializer(ApplicationSerializer):
|
||||
tables = serializers.SerializerMethodField(
|
||||
help_text='This field is specific to the `database` application and contains '
|
||||
'an array of tables that are in the database.'
|
||||
help_text="This field is specific to the `database` application and contains "
|
||||
"an array of tables that are in the database."
|
||||
)
|
||||
|
||||
class Meta(ApplicationSerializer.Meta):
|
||||
ref_name = 'DatabaseApplication'
|
||||
fields = ApplicationSerializer.Meta.fields + ('tables',)
|
||||
ref_name = "DatabaseApplication"
|
||||
fields = ApplicationSerializer.Meta.fields + ("tables",)
|
||||
|
||||
@extend_schema_field(TableSerializer(many=True))
|
||||
def get_tables(self, instance):
|
||||
|
|
|
@ -4,23 +4,23 @@ from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_TABLE_DOES_NOT_EXIST = (
|
||||
'ERROR_TABLE_DOES_NOT_EXIST',
|
||||
"ERROR_TABLE_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested table does not exist.'
|
||||
"The requested table does not exist.",
|
||||
)
|
||||
ERROR_INVALID_INITIAL_TABLE_DATA = (
|
||||
'ERROR_INVALID_INITIAL_TABLE_DATA',
|
||||
"ERROR_INVALID_INITIAL_TABLE_DATA",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The provided table data must at least contain one row and one column.'
|
||||
"The provided table data must at least contain one row and one column.",
|
||||
)
|
||||
ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP = (
|
||||
'ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP',
|
||||
"ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The provided table does not belong to the related group.'
|
||||
"The provided table does not belong to the related group.",
|
||||
)
|
||||
ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED = (
|
||||
'ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED',
|
||||
"ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
f'The initial table data limit has been exceeded. You can provide a maximum of '
|
||||
f'{settings.INITIAL_TABLE_DATA_LIMIT} rows.'
|
||||
f"The initial table data limit has been exceeded. You can provide a maximum of "
|
||||
f"{settings.INITIAL_TABLE_DATA_LIMIT} rows.",
|
||||
)
|
||||
|
|
|
@ -6,13 +6,16 @@ from baserow.contrib.database.table.models import Table
|
|||
class TableSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Table
|
||||
fields = ('id', 'name', 'order', 'database_id',)
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"order",
|
||||
"database_id",
|
||||
)
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True},
|
||||
'database_id': {'read_only': True},
|
||||
'order': {
|
||||
'help_text': 'Lowest first.'
|
||||
}
|
||||
"id": {"read_only": True},
|
||||
"database_id": {"read_only": True},
|
||||
"order": {"help_text": "Lowest first."},
|
||||
}
|
||||
|
||||
|
||||
|
@ -21,32 +24,31 @@ class TableCreateSerializer(serializers.ModelSerializer):
|
|||
min_length=1,
|
||||
child=serializers.ListField(
|
||||
child=serializers.CharField(
|
||||
help_text='The value of the cell.',
|
||||
allow_blank=True
|
||||
help_text="The value of the cell.", allow_blank=True
|
||||
),
|
||||
help_text='The row containing all the values.'
|
||||
help_text="The row containing all the values.",
|
||||
),
|
||||
default=None,
|
||||
help_text='A list of rows that needs to be created as initial table data. If '
|
||||
'not provided some example data is going to be created.'
|
||||
help_text="A list of rows that needs to be created as initial table data. If "
|
||||
"not provided some example data is going to be created.",
|
||||
)
|
||||
first_row_header = serializers.BooleanField(
|
||||
default=False,
|
||||
help_text='Indicates if the first provided row is the header. If true the '
|
||||
'field names are going to be the values of the first row. Otherwise '
|
||||
'they will be called "Column N"'
|
||||
help_text="Indicates if the first provided row is the header. If true the "
|
||||
"field names are going to be the values of the first row. Otherwise "
|
||||
'they will be called "Column N"',
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Table
|
||||
fields = ('name', 'data', 'first_row_header')
|
||||
fields = ("name", "data", "first_row_header")
|
||||
extra_kwargs = {
|
||||
'data': {'required': False},
|
||||
'first_row_header': {'required': False},
|
||||
"data": {"required": False},
|
||||
"first_row_header": {"required": False},
|
||||
}
|
||||
|
||||
|
||||
class TableUpdateSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Table
|
||||
fields = ('name',)
|
||||
fields = ("name",)
|
||||
|
|
|
@ -3,9 +3,9 @@ from django.conf.urls import url
|
|||
from .views import TablesView, TableView
|
||||
|
||||
|
||||
app_name = 'baserow.contrib.database.api.tables'
|
||||
app_name = "baserow.contrib.database.api.tables"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'database/(?P<database_id>[0-9]+)/$', TablesView.as_view(), name='list'),
|
||||
url(r'(?P<table_id>[0-9]+)/$', TableView.as_view(), name='item'),
|
||||
url(r"database/(?P<database_id>[0-9]+)/$", TablesView.as_view(), name="list"),
|
||||
url(r"(?P<table_id>[0-9]+)/$", TableView.as_view(), name="item"),
|
||||
]
|
||||
|
|
|
@ -17,13 +17,16 @@ from baserow.contrib.database.models import Database
|
|||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.contrib.database.table.exceptions import (
|
||||
TableDoesNotExist, InvalidInitialTableData, InitialTableDataLimitExceeded
|
||||
TableDoesNotExist,
|
||||
InvalidInitialTableData,
|
||||
InitialTableDataLimitExceeded,
|
||||
)
|
||||
|
||||
from .serializers import TableSerializer, TableCreateSerializer, TableUpdateSerializer
|
||||
from .errors import (
|
||||
ERROR_TABLE_DOES_NOT_EXIST, ERROR_INVALID_INITIAL_TABLE_DATA,
|
||||
ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED
|
||||
ERROR_TABLE_DOES_NOT_EXIST,
|
||||
ERROR_INVALID_INITIAL_TABLE_DATA,
|
||||
ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED,
|
||||
)
|
||||
|
||||
|
||||
|
@ -33,39 +36,40 @@ class TablesView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='database_id',
|
||||
name="database_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns only tables that are related to the provided '
|
||||
'value.'
|
||||
description="Returns only tables that are related to the provided "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tables'],
|
||||
operation_id='list_database_tables',
|
||||
tags=["Database tables"],
|
||||
operation_id="list_database_tables",
|
||||
description=(
|
||||
'Lists all the tables that are in the database related to the '
|
||||
'`database_id` parameter if the user has access to the database\'s group. '
|
||||
'A table is exactly as the name suggests. It can hold multiple fields, '
|
||||
'each having their own type and multiple rows. They can be added via the '
|
||||
'**create_database_table_field** and **create_database_table_row** '
|
||||
'endpoints.'
|
||||
"Lists all the tables that are in the database related to the "
|
||||
"`database_id` parameter if the user has access to the database's group. "
|
||||
"A table is exactly as the name suggests. It can hold multiple fields, "
|
||||
"each having their own type and multiple rows. They can be added via the "
|
||||
"**create_database_table_field** and **create_database_table_row** "
|
||||
"endpoints."
|
||||
),
|
||||
responses={
|
||||
200: TableSerializer(many=True),
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_APPLICATION_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
def get(self, request, database_id):
|
||||
"""Lists all the tables of a database."""
|
||||
|
||||
database = CoreHandler().get_application(
|
||||
database_id,
|
||||
base_queryset=Database.objects
|
||||
database_id, base_queryset=Database.objects
|
||||
)
|
||||
database.group.has_user(request.user, raise_error=True)
|
||||
tables = Table.objects.filter(database=database)
|
||||
|
@ -75,51 +79,52 @@ class TablesView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='database_id',
|
||||
name="database_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Creates a table for the database related to the provided '
|
||||
'value.'
|
||||
description="Creates a table for the database related to the provided "
|
||||
"value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tables'],
|
||||
operation_id='create_database_table',
|
||||
tags=["Database tables"],
|
||||
operation_id="create_database_table",
|
||||
description=(
|
||||
'Creates a new table for the database related to the provided '
|
||||
'`database_id` parameter if the authorized user has access to the '
|
||||
'database\'s group.'
|
||||
"Creates a new table for the database related to the provided "
|
||||
"`database_id` parameter if the authorized user has access to the "
|
||||
"database's group."
|
||||
),
|
||||
request=TableCreateSerializer,
|
||||
responses={
|
||||
200: TableSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
'ERROR_INVALID_INITIAL_TABLE_DATA',
|
||||
'ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_APPLICATION_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"ERROR_INVALID_INITIAL_TABLE_DATA",
|
||||
"ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
InvalidInitialTableData: ERROR_INVALID_INITIAL_TABLE_DATA,
|
||||
InitialTableDataLimitExceeded: ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
InvalidInitialTableData: ERROR_INVALID_INITIAL_TABLE_DATA,
|
||||
InitialTableDataLimitExceeded: ERROR_INITIAL_TABLE_DATA_LIMIT_EXCEEDED,
|
||||
}
|
||||
)
|
||||
@validate_body(TableCreateSerializer)
|
||||
def post(self, request, data, database_id):
|
||||
"""Creates a new table in a database."""
|
||||
|
||||
database = CoreHandler().get_application(
|
||||
database_id,
|
||||
base_queryset=Database.objects
|
||||
database_id, base_queryset=Database.objects
|
||||
)
|
||||
table = TableHandler().create_table(
|
||||
request.user,
|
||||
database,
|
||||
fill_example=True,
|
||||
**data
|
||||
request.user, database, fill_example=True, **data
|
||||
)
|
||||
serializer = TableSerializer(table)
|
||||
return Response(serializer.data)
|
||||
|
@ -131,28 +136,30 @@ class TableView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id',
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns the table related to the provided value.'
|
||||
description="Returns the table related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tables'],
|
||||
operation_id='get_database_table',
|
||||
tags=["Database tables"],
|
||||
operation_id="get_database_table",
|
||||
description=(
|
||||
'Returns the requested table if the authorized user has access to the '
|
||||
'related database\'s group.'
|
||||
"Returns the requested table if the authorized user has access to the "
|
||||
"related database's group."
|
||||
),
|
||||
responses={
|
||||
200: TableSerializer,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_TABLE_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_TABLE_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
def get(self, request, table_id):
|
||||
"""Responds with a serialized table instance."""
|
||||
|
||||
|
@ -164,32 +171,34 @@ class TableView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id',
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the table related to the provided value.'
|
||||
description="Updates the table related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tables'],
|
||||
operation_id='update_database_table',
|
||||
tags=["Database tables"],
|
||||
operation_id="update_database_table",
|
||||
description=(
|
||||
'Updates the existing table if the authorized user has access to the '
|
||||
'related database\'s group.'
|
||||
"Updates the existing table if the authorized user has access to the "
|
||||
"related database's group."
|
||||
),
|
||||
request=TableUpdateSerializer,
|
||||
responses={
|
||||
200: TableSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_TABLE_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_TABLE_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
@validate_body(TableUpdateSerializer)
|
||||
def patch(self, request, data, table_id):
|
||||
"""Updates the values a table instance."""
|
||||
|
@ -198,7 +207,7 @@ class TableView(APIView):
|
|||
request.user,
|
||||
TableHandler().get_table(table_id),
|
||||
base_queryset=Table.objects.select_for_update(),
|
||||
name=data['name']
|
||||
name=data["name"],
|
||||
)
|
||||
serializer = TableSerializer(table)
|
||||
return Response(serializer.data)
|
||||
|
@ -206,34 +215,33 @@ class TableView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='table_id',
|
||||
name="table_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Deletes the table related to the provided value.'
|
||||
description="Deletes the table related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tables'],
|
||||
operation_id='delete_database_table',
|
||||
tags=["Database tables"],
|
||||
operation_id="delete_database_table",
|
||||
description=(
|
||||
'Deletes the existing table if the authorized user has access to the '
|
||||
'related database\'s group.'
|
||||
"Deletes the existing table if the authorized user has access to the "
|
||||
"related database's group."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_TABLE_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_TABLE_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
TableDoesNotExist: ERROR_TABLE_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def delete(self, request, table_id):
|
||||
"""Deletes an existing table."""
|
||||
|
||||
TableHandler().delete_table(
|
||||
request.user,
|
||||
TableHandler().get_table(table_id)
|
||||
)
|
||||
TableHandler().delete_table(request.user, TableHandler().get_table(table_id))
|
||||
return Response(status=204)
|
||||
|
|
|
@ -3,9 +3,7 @@ from django.utils.translation import gettext_lazy as _
|
|||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
|
||||
from rest_framework import HTTP_HEADER_ENCODING
|
||||
from rest_framework.authentication import (
|
||||
BaseAuthentication, get_authorization_header
|
||||
)
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from baserow.core.exceptions import UserNotInGroup
|
||||
|
@ -22,21 +20,19 @@ class TokenAuthentication(BaseAuthentication):
|
|||
def authenticate(self, request):
|
||||
auth = get_authorization_header(request).split()
|
||||
|
||||
if not auth or auth[0].lower() != b'token':
|
||||
if not auth or auth[0].lower() != b"token":
|
||||
return None
|
||||
|
||||
if len(auth) == 1:
|
||||
msg = _('Invalid token header. No token provided.')
|
||||
raise AuthenticationFailed({
|
||||
'detail': msg,
|
||||
'error': 'ERROR_INVALID_TOKEN_HEADER'
|
||||
})
|
||||
msg = _("Invalid token header. No token provided.")
|
||||
raise AuthenticationFailed(
|
||||
{"detail": msg, "error": "ERROR_INVALID_TOKEN_HEADER"}
|
||||
)
|
||||
elif len(auth) > 2:
|
||||
msg = _('Invalid token header. Token string should not contain spaces.')
|
||||
raise AuthenticationFailed({
|
||||
'detail': msg,
|
||||
'error': 'ERROR_INVALID_TOKEN_HEADER'
|
||||
})
|
||||
msg = _("Invalid token header. Token string should not contain spaces.")
|
||||
raise AuthenticationFailed(
|
||||
{"detail": msg, "error": "ERROR_INVALID_TOKEN_HEADER"}
|
||||
)
|
||||
|
||||
decoded_key = auth[1].decode(HTTP_HEADER_ENCODING)
|
||||
handler = TokenHandler()
|
||||
|
@ -44,17 +40,15 @@ class TokenAuthentication(BaseAuthentication):
|
|||
try:
|
||||
token = handler.get_by_key(decoded_key)
|
||||
except UserNotInGroup:
|
||||
msg = _('The token\'s user does not belong to the group anymore.')
|
||||
raise AuthenticationFailed({
|
||||
'detail': msg,
|
||||
'error': 'ERROR_TOKEN_GROUP_MISMATCH'
|
||||
})
|
||||
msg = _("The token's user does not belong to the group anymore.")
|
||||
raise AuthenticationFailed(
|
||||
{"detail": msg, "error": "ERROR_TOKEN_GROUP_MISMATCH"}
|
||||
)
|
||||
except TokenDoesNotExist:
|
||||
msg = _('The provided token does not exist.')
|
||||
raise AuthenticationFailed({
|
||||
'detail': msg,
|
||||
'error': 'ERROR_TOKEN_DOES_NOT_EXIST'
|
||||
})
|
||||
msg = _("The provided token does not exist.")
|
||||
raise AuthenticationFailed(
|
||||
{"detail": msg, "error": "ERROR_TOKEN_DOES_NOT_EXIST"}
|
||||
)
|
||||
|
||||
token = handler.update_token_usage(token)
|
||||
request.user_token = token
|
||||
|
@ -62,15 +56,16 @@ class TokenAuthentication(BaseAuthentication):
|
|||
|
||||
|
||||
class JSONWebTokenAuthenticationExtension(OpenApiAuthenticationExtension):
|
||||
target_class = 'baserow.contrib.database.api.tokens.authentications.' \
|
||||
'TokenAuthentication'
|
||||
name = 'Token'
|
||||
target_class = (
|
||||
"baserow.contrib.database.api.tokens.authentications." "TokenAuthentication"
|
||||
)
|
||||
name = "Token"
|
||||
match_subclasses = True
|
||||
priority = -1
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
return {
|
||||
'type': 'http',
|
||||
'scheme': 'bearer',
|
||||
'bearerFormat': 'Token your_token',
|
||||
"type": "http",
|
||||
"scheme": "bearer",
|
||||
"bearerFormat": "Token your_token",
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
from rest_framework.status import HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND
|
||||
|
||||
ERROR_TOKEN_DOES_NOT_EXIST = (
|
||||
'ERROR_TOKEN_DOES_NOT_EXIST',
|
||||
"ERROR_TOKEN_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The token does not exist.'
|
||||
"The token does not exist.",
|
||||
)
|
||||
ERROR_NO_PERMISSION_TO_TABLE = (
|
||||
'ERROR_NO_PERMISSION_TO_TABLE',
|
||||
"ERROR_NO_PERMISSION_TO_TABLE",
|
||||
HTTP_401_UNAUTHORIZED,
|
||||
'The token does not have permissions to the table.'
|
||||
"The token does not have permissions to the table.",
|
||||
)
|
||||
|
|
|
@ -1,62 +1,62 @@
|
|||
token_permissions_field_value_schema = {
|
||||
'anyOf': [
|
||||
"anyOf": [
|
||||
{
|
||||
'type': 'boolean',
|
||||
'description': 'Indicating if the API token has permissions to all tables.',
|
||||
'example': True
|
||||
"type": "boolean",
|
||||
"description": "Indicating if the API token has permissions to all tables.",
|
||||
"example": True,
|
||||
},
|
||||
{
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'array',
|
||||
'minItems': 2,
|
||||
'maxItems': 2,
|
||||
'items': {
|
||||
'anyOf': [
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"minItems": 2,
|
||||
"maxItems": 2,
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
'type': 'string',
|
||||
'example': 'database',
|
||||
'description': 'First element should indicate the '
|
||||
'reference type `database` or `table`.'
|
||||
"type": "string",
|
||||
"example": "database",
|
||||
"description": "First element should indicate the "
|
||||
"reference type `database` or `table`.",
|
||||
},
|
||||
{
|
||||
'type': 'number',
|
||||
'example': 1,
|
||||
'description': 'Second element should indicate the ID of '
|
||||
'the reference.'
|
||||
}
|
||||
"type": "number",
|
||||
"example": 1,
|
||||
"description": "Second element should indicate the ID of "
|
||||
"the reference.",
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
token_permissions_field_schema = {
|
||||
'type': 'object',
|
||||
'description': (
|
||||
'Indicates per operation which permissions the API token has within the whole '
|
||||
'group. If the value of for example `create` is `true`, then the token can '
|
||||
'create rows in all tables related to the group. If a list is provided with '
|
||||
"type": "object",
|
||||
"description": (
|
||||
"Indicates per operation which permissions the API token has within the whole "
|
||||
"group. If the value of for example `create` is `true`, then the token can "
|
||||
"create rows in all tables related to the group. If a list is provided with "
|
||||
'for example `[["table", 1]]` then the token only has create permissions for '
|
||||
'the table with id 1. Same goes for if a database references is provided. '
|
||||
"the table with id 1. Same goes for if a database references is provided. "
|
||||
'`[["database", 1]]` means create permissions for all tables in the database '
|
||||
'with id 1.\n\n'
|
||||
'Example:\n'
|
||||
'```json\n'
|
||||
'{\n'
|
||||
"with id 1.\n\n"
|
||||
"Example:\n"
|
||||
"```json\n"
|
||||
"{\n"
|
||||
' "create": true// Allows creating rows in all tables.\n'
|
||||
' // Allows reading rows from database 1 and table 10.\n'
|
||||
" // Allows reading rows from database 1 and table 10.\n"
|
||||
' "read": [["database", 1], ["table", 10]],\n'
|
||||
' "update": false // Denies updating rows in all tables.\n'
|
||||
' "delete": [] // Denies deleting rows in all tables.\n '
|
||||
'}\n'
|
||||
'```'
|
||||
"}\n"
|
||||
"```"
|
||||
),
|
||||
'properties': {
|
||||
'create': token_permissions_field_value_schema,
|
||||
'read': token_permissions_field_value_schema,
|
||||
'update': token_permissions_field_value_schema,
|
||||
'delete': token_permissions_field_value_schema
|
||||
}
|
||||
"properties": {
|
||||
"create": token_permissions_field_value_schema,
|
||||
"read": token_permissions_field_value_schema,
|
||||
"update": token_permissions_field_value_schema,
|
||||
"delete": token_permissions_field_value_schema,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -13,17 +13,19 @@ from .schemas import token_permissions_field_schema
|
|||
|
||||
class TokenPermissionsField(serializers.Field):
|
||||
default_error_messages = {
|
||||
'invalid_key': _('Only create, read, update and delete keys are allowed.'),
|
||||
'invalid_value': _(
|
||||
'The value must either be a bool, or a list containing database or table '
|
||||
"invalid_key": _("Only create, read, update and delete keys are allowed."),
|
||||
"invalid_value": _(
|
||||
"The value must either be a bool, or a list containing database or table "
|
||||
'ids like [["database", 1], ["table", 1]].'
|
||||
),
|
||||
'invalid_instance_type': _('The instance type can only be a database or table.')
|
||||
"invalid_instance_type": _(
|
||||
"The instance type can only be a database or table."
|
||||
),
|
||||
}
|
||||
valid_types = ['create', 'read', 'update', 'delete']
|
||||
valid_types = ["create", "read", "update", "delete"]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['source'] = '*'
|
||||
kwargs["source"] = "*"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
|
@ -59,39 +61,39 @@ class TokenPermissionsField(serializers.Field):
|
|||
databases = {}
|
||||
|
||||
if not isinstance(data, dict) or len(data) != len(self.valid_types):
|
||||
self.fail('invalid_key')
|
||||
self.fail("invalid_key")
|
||||
|
||||
for key, value in data.items():
|
||||
if key not in self.valid_types:
|
||||
self.fail('invalid_key')
|
||||
self.fail("invalid_key")
|
||||
|
||||
if not isinstance(value, bool) and not isinstance(value, list):
|
||||
self.fail('invalid_value')
|
||||
self.fail("invalid_value")
|
||||
|
||||
if isinstance(value, list):
|
||||
for instance in value:
|
||||
if (
|
||||
not isinstance(instance, list) or
|
||||
not len(instance) == 2 or
|
||||
not isinstance(instance[0], str) or
|
||||
not isinstance(instance[1], int)
|
||||
not isinstance(instance, list)
|
||||
or not len(instance) == 2
|
||||
or not isinstance(instance[0], str)
|
||||
or not isinstance(instance[1], int)
|
||||
):
|
||||
self.fail('invalid_value')
|
||||
self.fail("invalid_value")
|
||||
|
||||
instance_type, instance_id = instance
|
||||
if instance_type == 'database':
|
||||
if instance_type == "database":
|
||||
databases[instance_id] = None
|
||||
elif instance_type == 'table':
|
||||
elif instance_type == "table":
|
||||
tables[instance_id] = None
|
||||
else:
|
||||
self.fail('invalid_instance_type')
|
||||
self.fail("invalid_instance_type")
|
||||
|
||||
if len(tables) > 0:
|
||||
tables = {
|
||||
table.id: table
|
||||
for table in Table.objects.filter(
|
||||
id__in=tables.keys()
|
||||
).select_related('database')
|
||||
for table in Table.objects.filter(id__in=tables.keys()).select_related(
|
||||
"database"
|
||||
)
|
||||
}
|
||||
|
||||
if len(databases) > 0:
|
||||
|
@ -103,16 +105,16 @@ class TokenPermissionsField(serializers.Field):
|
|||
for key, value in data.items():
|
||||
if isinstance(value, list):
|
||||
for index, (instance_type, instance_id) in enumerate(value):
|
||||
if instance_type == 'database':
|
||||
if instance_type == "database":
|
||||
data[key][index] = databases[instance_id]
|
||||
elif instance_type == 'table':
|
||||
elif instance_type == "table":
|
||||
data[key][index] = tables[instance_id]
|
||||
|
||||
return {
|
||||
'create': data['create'],
|
||||
'read': data['read'],
|
||||
'update': data['update'],
|
||||
'delete': data['delete']
|
||||
"create": data["create"],
|
||||
"read": data["read"],
|
||||
"update": data["update"],
|
||||
"delete": data["delete"],
|
||||
}
|
||||
|
||||
def to_representation(self, value):
|
||||
|
@ -132,10 +134,10 @@ class TokenPermissionsField(serializers.Field):
|
|||
|
||||
if isinstance(value, Token):
|
||||
permissions = {
|
||||
'create': False,
|
||||
'read': False,
|
||||
'update': False,
|
||||
'delete': False
|
||||
"create": False,
|
||||
"read": False,
|
||||
"update": False,
|
||||
"delete": False,
|
||||
}
|
||||
|
||||
for permission in value.tokenpermission_set.all():
|
||||
|
@ -149,11 +151,11 @@ class TokenPermissionsField(serializers.Field):
|
|||
permissions[permission.type] = []
|
||||
if permission.database_id is not None:
|
||||
permissions[permission.type].append(
|
||||
('database', permission.database_id)
|
||||
("database", permission.database_id)
|
||||
)
|
||||
elif permission.table_id is not None:
|
||||
permissions[permission.type].append(
|
||||
('table', permission.table_id)
|
||||
("table", permission.table_id)
|
||||
)
|
||||
|
||||
return permissions
|
||||
|
@ -168,8 +170,7 @@ class TokenPermissionsField(serializers.Field):
|
|||
|
||||
class TokenPermissionsFieldFix(OpenApiSerializerFieldExtension):
|
||||
target_class = (
|
||||
'baserow.contrib.database.api.tokens.serializers.'
|
||||
'TokenPermissionsField'
|
||||
"baserow.contrib.database.api.tokens.serializers." "TokenPermissionsField"
|
||||
)
|
||||
|
||||
def map_serializer_field(self, auto_schema, direction):
|
||||
|
@ -181,18 +182,25 @@ class TokenSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = Token
|
||||
fields = ('id', 'name', 'group', 'key', 'permissions',)
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"group",
|
||||
"key",
|
||||
"permissions",
|
||||
)
|
||||
extra_kwargs = {
|
||||
'id': {
|
||||
'read_only': True
|
||||
},
|
||||
"id": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class TokenCreateSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Token
|
||||
fields = ('name', 'group',)
|
||||
fields = (
|
||||
"name",
|
||||
"group",
|
||||
)
|
||||
|
||||
|
||||
class TokenUpdateSerializer(serializers.ModelSerializer):
|
||||
|
@ -200,12 +208,12 @@ class TokenUpdateSerializer(serializers.ModelSerializer):
|
|||
rotate_key = serializers.BooleanField(
|
||||
default=False,
|
||||
required=False,
|
||||
help_text='Indicates if a new key must be generated.'
|
||||
help_text="Indicates if a new key must be generated.",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Token
|
||||
fields = ('name', 'permissions', 'rotate_key')
|
||||
fields = ("name", "permissions", "rotate_key")
|
||||
extra_kwargs = {
|
||||
'name': {'required': False},
|
||||
"name": {"required": False},
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@ from django.conf.urls import url
|
|||
from .views import TokensView, TokenView
|
||||
|
||||
|
||||
app_name = 'baserow.contrib.database.api.tokens'
|
||||
app_name = "baserow.contrib.database.api.tokens"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'(?P<token_id>[0-9]+)/$', TokenView.as_view(), name='item'),
|
||||
url(r'$', TokensView.as_view(), name='list'),
|
||||
url(r"(?P<token_id>[0-9]+)/$", TokenView.as_view(), name="item"),
|
||||
url(r"$", TokensView.as_view(), name="list"),
|
||||
]
|
||||
|
|
|
@ -12,11 +12,9 @@ from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
|
|||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.exceptions import UserNotInGroup
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.contrib.database.api.errors import (
|
||||
ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP
|
||||
)
|
||||
from baserow.contrib.database.api.errors import ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP
|
||||
from baserow.contrib.database.api.tables.errors import (
|
||||
ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP
|
||||
ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP,
|
||||
)
|
||||
from baserow.contrib.database.exceptions import DatabaseDoesNotBelongToGroup
|
||||
from baserow.contrib.database.table.exceptions import TableDoesNotBelongToGroup
|
||||
|
@ -32,51 +30,49 @@ class TokensView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Database tokens'],
|
||||
operation_id='list_database_tokens',
|
||||
tags=["Database tokens"],
|
||||
operation_id="list_database_tokens",
|
||||
description=(
|
||||
'Lists all the API tokens that belong to the authorized user. An API token '
|
||||
'can be used to create, read, update and delete rows in the tables of the '
|
||||
'token\'s group. It only works on the tables if the token has the correct '
|
||||
'permissions. The **Database table rows** endpoints can be used for these '
|
||||
'operations.'
|
||||
"Lists all the API tokens that belong to the authorized user. An API token "
|
||||
"can be used to create, read, update and delete rows in the tables of the "
|
||||
"token's group. It only works on the tables if the token has the correct "
|
||||
"permissions. The **Database table rows** endpoints can be used for these "
|
||||
"operations."
|
||||
),
|
||||
responses={
|
||||
200: TokenSerializer(many=True),
|
||||
}
|
||||
},
|
||||
)
|
||||
def get(self, request):
|
||||
"""Lists all the tokens of a user."""
|
||||
|
||||
tokens = Token.objects.filter(user=request.user).prefetch_related(
|
||||
'tokenpermission_set'
|
||||
"tokenpermission_set"
|
||||
)
|
||||
serializer = TokenSerializer(tokens, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
tags=['Database tokens'],
|
||||
operation_id='create_database_token',
|
||||
tags=["Database tokens"],
|
||||
operation_id="create_database_token",
|
||||
description=(
|
||||
'Creates a new API token for a given group and for the authorized user.'
|
||||
"Creates a new API token for a given group and for the authorized user."
|
||||
),
|
||||
request=TokenCreateSerializer,
|
||||
responses={
|
||||
200: TokenSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
])
|
||||
}
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions({UserNotInGroup: ERROR_USER_NOT_IN_GROUP})
|
||||
@validate_body(TokenCreateSerializer)
|
||||
def post(self, request, data):
|
||||
"""Creates a new token for the authorized user."""
|
||||
|
||||
data['group'] = CoreHandler().get_group(data.pop('group'))
|
||||
data["group"] = CoreHandler().get_group(data.pop("group"))
|
||||
token = TokenHandler().create_token(request.user, **data)
|
||||
serializer = TokenSerializer(token)
|
||||
return Response(serializer.data)
|
||||
|
@ -88,28 +84,30 @@ class TokenView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='token_id',
|
||||
name="token_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Returns the token related to the provided value.'
|
||||
description="Returns the token related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tokens'],
|
||||
operation_id='get_database_token',
|
||||
tags=["Database tokens"],
|
||||
operation_id="get_database_token",
|
||||
description=(
|
||||
'Returns the requested token if it is owned by the authorized user and'
|
||||
'if the user has access to the related group.'
|
||||
"Returns the requested token if it is owned by the authorized user and"
|
||||
"if the user has access to the related group."
|
||||
),
|
||||
responses={
|
||||
200: TokenSerializer,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_TOKEN_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_TOKEN_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
TokenDoesNotExist: ERROR_TOKEN_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
TokenDoesNotExist: ERROR_TOKEN_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
def get(self, request, token_id):
|
||||
"""Responds with a serialized token instance."""
|
||||
|
||||
|
@ -120,47 +118,50 @@ class TokenView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='token_id',
|
||||
name="token_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Updates the token related to the provided value.'
|
||||
description="Updates the token related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tokens'],
|
||||
operation_id='update_database_token',
|
||||
tags=["Database tokens"],
|
||||
operation_id="update_database_token",
|
||||
description=(
|
||||
'Updates the existing token if it is owned by the authorized user and if'
|
||||
'the user has access to the related group.'
|
||||
"Updates the existing token if it is owned by the authorized user and if"
|
||||
"the user has access to the related group."
|
||||
),
|
||||
request=TokenUpdateSerializer,
|
||||
responses={
|
||||
200: TokenSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
'ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP',
|
||||
'ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_TOKEN_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP",
|
||||
"ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_TOKEN_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
TokenDoesNotExist: ERROR_TOKEN_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
DatabaseDoesNotBelongToGroup: ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP,
|
||||
TableDoesNotBelongToGroup: ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
TokenDoesNotExist: ERROR_TOKEN_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
DatabaseDoesNotBelongToGroup: ERROR_DATABASE_DOES_NOT_BELONG_TO_GROUP,
|
||||
TableDoesNotBelongToGroup: ERROR_TABLE_DOES_NOT_BELONG_TO_GROUP,
|
||||
}
|
||||
)
|
||||
@validate_body(TokenUpdateSerializer)
|
||||
def patch(self, request, data, token_id):
|
||||
"""Updates the values of a token."""
|
||||
|
||||
token = TokenHandler().get_token(
|
||||
request.user,
|
||||
token_id,
|
||||
base_queryset=Token.objects.select_for_update()
|
||||
request.user, token_id, base_queryset=Token.objects.select_for_update()
|
||||
)
|
||||
permissions = data.pop('permissions', None)
|
||||
rotate_key = data.pop('rotate_key', False)
|
||||
permissions = data.pop("permissions", None)
|
||||
rotate_key = data.pop("rotate_key", False)
|
||||
|
||||
if len(data) > 0:
|
||||
token = TokenHandler().update_token(request.user, token, **data)
|
||||
|
@ -177,29 +178,31 @@ class TokenView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='token_id',
|
||||
name="token_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description='Deletes the token related to the provided value.'
|
||||
description="Deletes the token related to the provided value.",
|
||||
)
|
||||
],
|
||||
tags=['Database tokens'],
|
||||
operation_id='delete_database_token',
|
||||
tags=["Database tokens"],
|
||||
operation_id="delete_database_token",
|
||||
description=(
|
||||
'Deletes the existing token if it is owned by the authorized user and if'
|
||||
'the user has access to the related group.'
|
||||
"Deletes the existing token if it is owned by the authorized user and if"
|
||||
"the user has access to the related group."
|
||||
),
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_TOKEN_DOES_NOT_EXIST'])
|
||||
}
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_TOKEN_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions({
|
||||
TokenDoesNotExist: ERROR_TOKEN_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP
|
||||
})
|
||||
@map_exceptions(
|
||||
{
|
||||
TokenDoesNotExist: ERROR_TOKEN_DOES_NOT_EXIST,
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def delete(self, request, token_id):
|
||||
"""Deletes an existing token."""
|
||||
|
||||
|
|
|
@ -6,12 +6,12 @@ from .fields import urls as field_urls
|
|||
from .rows import urls as row_urls
|
||||
from .tokens import urls as token_urls
|
||||
|
||||
app_name = 'baserow.contrib.database.api'
|
||||
app_name = "baserow.contrib.database.api"
|
||||
|
||||
urlpatterns = [
|
||||
path('tables/', include(table_urls, namespace='tables')),
|
||||
path('views/', include(view_urls, namespace='views')),
|
||||
path('fields/', include(field_urls, namespace='fields')),
|
||||
path('rows/', include(row_urls, namespace='rows')),
|
||||
path('tokens/', include(token_urls, namespace='tokens'))
|
||||
path("tables/", include(table_urls, namespace="tables")),
|
||||
path("views/", include(view_urls, namespace="views")),
|
||||
path("fields/", include(field_urls, namespace="fields")),
|
||||
path("rows/", include(row_urls, namespace="rows")),
|
||||
path("tokens/", include(token_urls, namespace="tokens")),
|
||||
]
|
||||
|
|
|
@ -2,47 +2,47 @@ from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
|||
|
||||
|
||||
ERROR_VIEW_DOES_NOT_EXIST = (
|
||||
'ERROR_VIEW_DOES_NOT_EXIST',
|
||||
"ERROR_VIEW_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested view does not exist.'
|
||||
"The requested view does not exist.",
|
||||
)
|
||||
ERROR_VIEW_FILTER_DOES_NOT_EXIST = (
|
||||
'ERROR_VIEW_FILTER_DOES_NOT_EXIST',
|
||||
"ERROR_VIEW_FILTER_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The view filter does not exist.'
|
||||
"The view filter does not exist.",
|
||||
)
|
||||
ERROR_VIEW_FILTER_NOT_SUPPORTED = (
|
||||
'ERROR_VIEW_FILTER_NOT_SUPPORTED',
|
||||
"ERROR_VIEW_FILTER_NOT_SUPPORTED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'Filtering is not supported for the view type.'
|
||||
"Filtering is not supported for the view type.",
|
||||
)
|
||||
ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST = (
|
||||
'ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST',
|
||||
"ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The view filter type {e.type_name} doesn\'t exist.'
|
||||
"The view filter type {e.type_name} doesn't exist.",
|
||||
)
|
||||
ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD = (
|
||||
'ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD',
|
||||
"ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The filter {e.filter_type} is not compatible with field type {e.field_type}.'
|
||||
"The filter {e.filter_type} is not compatible with field type {e.field_type}.",
|
||||
)
|
||||
ERROR_VIEW_SORT_DOES_NOT_EXIST = (
|
||||
'ERROR_VIEW_SORT_DOES_NOT_EXIST',
|
||||
"ERROR_VIEW_SORT_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The view sort does not exist.'
|
||||
"The view sort does not exist.",
|
||||
)
|
||||
ERROR_VIEW_SORT_NOT_SUPPORTED = (
|
||||
'ERROR_VIEW_SORT_NOT_SUPPORTED',
|
||||
"ERROR_VIEW_SORT_NOT_SUPPORTED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'Sorting is not supported for the view type.'
|
||||
"Sorting is not supported for the view type.",
|
||||
)
|
||||
ERROR_VIEW_SORT_FIELD_ALREADY_EXISTS = (
|
||||
'ERROR_VIEW_SORT_FIELD_ALREADY_EXISTS',
|
||||
"ERROR_VIEW_SORT_FIELD_ALREADY_EXISTS",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'A sort with the field already exists in the view.'
|
||||
"A sort with the field already exists in the view.",
|
||||
)
|
||||
ERROR_VIEW_SORT_FIELD_NOT_SUPPORTED = (
|
||||
'ERROR_VIEW_SORT_FIELD_NOT_SUPPORTED',
|
||||
"ERROR_VIEW_SORT_FIELD_NOT_SUPPORTED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The field does not support view sorting.'
|
||||
"The field does not support view sorting.",
|
||||
)
|
||||
|
|
|
@ -2,13 +2,13 @@ from rest_framework.status import HTTP_404_NOT_FOUND, HTTP_400_BAD_REQUEST
|
|||
|
||||
|
||||
ERROR_GRID_DOES_NOT_EXIST = (
|
||||
'ERROR_GRID_DOES_NOT_EXIST',
|
||||
"ERROR_GRID_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
'The requested grid view does not exist.'
|
||||
"The requested grid view does not exist.",
|
||||
)
|
||||
|
||||
ERROR_UNRELATED_FIELD = (
|
||||
'ERROR_UNRELATED_FIELD',
|
||||
"ERROR_UNRELATED_FIELD",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
'The field is not related to the provided grid view.'
|
||||
"The field is not related to the provided grid view.",
|
||||
)
|
||||
|
|
|
@ -1,32 +1,32 @@
|
|||
grid_view_field_options_schema = {
|
||||
'type': 'object',
|
||||
'description': 'An object containing the field id as key and the '
|
||||
'properties related to view as value.',
|
||||
'properties': {
|
||||
'1': {
|
||||
'type': 'object',
|
||||
'description': 'Properties of field with id 1 of the related view.',
|
||||
'properties': {
|
||||
'width': {
|
||||
'type': 'integer',
|
||||
'example': 200,
|
||||
'description': 'The width of the table field in the related view.'
|
||||
"type": "object",
|
||||
"description": "An object containing the field id as key and the "
|
||||
"properties related to view as value.",
|
||||
"properties": {
|
||||
"1": {
|
||||
"type": "object",
|
||||
"description": "Properties of field with id 1 of the related view.",
|
||||
"properties": {
|
||||
"width": {
|
||||
"type": "integer",
|
||||
"example": 200,
|
||||
"description": "The width of the table field in the related view.",
|
||||
},
|
||||
'hidden': {
|
||||
'type': 'boolean',
|
||||
'example': True,
|
||||
'description': 'Whether or not the field should be hidden in the '
|
||||
'current view.'
|
||||
"hidden": {
|
||||
"type": "boolean",
|
||||
"example": True,
|
||||
"description": "Whether or not the field should be hidden in the "
|
||||
"current view.",
|
||||
},
|
||||
'order': {
|
||||
'type': 'integer',
|
||||
'example': 0,
|
||||
'description': 'The position that the field has within the view, '
|
||||
'lowest first. If there is another field with the '
|
||||
'same order value then the field with the lowest '
|
||||
'id must be shown first.'
|
||||
}
|
||||
}
|
||||
"order": {
|
||||
"type": "integer",
|
||||
"example": 0,
|
||||
"description": "The position that the field has within the view, "
|
||||
"lowest first. If there is another field with the "
|
||||
"same order value then the field with the lowest "
|
||||
"id must be shown first.",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -10,13 +10,13 @@ from .schemas import grid_view_field_options_schema
|
|||
|
||||
class GridViewFieldOptionsField(serializers.Field):
|
||||
default_error_messages = {
|
||||
'invalid_key': _('Field option key must be numeric.'),
|
||||
'invalid_value': _('Must be valid field options.')
|
||||
"invalid_key": _("Field option key must be numeric."),
|
||||
"invalid_value": _("Must be valid field options."),
|
||||
}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['source'] = '*'
|
||||
kwargs['read_only'] = False
|
||||
kwargs["source"] = "*"
|
||||
kwargs["read_only"] = False
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
|
@ -39,13 +39,11 @@ class GridViewFieldOptionsField(serializers.Field):
|
|||
|
||||
internal = {}
|
||||
for key, value in data.items():
|
||||
if not (
|
||||
isinstance(key, int) or (isinstance(key, str) and key.isnumeric())
|
||||
):
|
||||
self.fail('invalid_key')
|
||||
if not (isinstance(key, int) or (isinstance(key, str) and key.isnumeric())):
|
||||
self.fail("invalid_key")
|
||||
serializer = GridViewFieldOptionsSerializer(data=value)
|
||||
if not serializer.is_valid():
|
||||
self.fail('invalid_value')
|
||||
self.fail("invalid_value")
|
||||
internal[int(key)] = serializer.data
|
||||
return internal
|
||||
|
||||
|
@ -69,10 +67,11 @@ class GridViewFieldOptionsField(serializers.Field):
|
|||
# If the fields are in the context we can pass them into the
|
||||
# `get_field_options` call so that they don't have to be fetched from the
|
||||
# database again.
|
||||
fields = self.context.get('fields')
|
||||
fields = self.context.get("fields")
|
||||
return {
|
||||
field_options.field_id:
|
||||
GridViewFieldOptionsSerializer(field_options).data
|
||||
field_options.field_id: GridViewFieldOptionsSerializer(
|
||||
field_options
|
||||
).data
|
||||
for field_options in value.get_field_options(True, fields)
|
||||
}
|
||||
else:
|
||||
|
@ -81,8 +80,8 @@ class GridViewFieldOptionsField(serializers.Field):
|
|||
|
||||
class GridViewFieldOptionsFieldFix(OpenApiSerializerFieldExtension):
|
||||
target_class = (
|
||||
'baserow.contrib.database.api.views.grid.serializers.'
|
||||
'GridViewFieldOptionsField'
|
||||
"baserow.contrib.database.api.views.grid.serializers."
|
||||
"GridViewFieldOptionsField"
|
||||
)
|
||||
|
||||
def map_serializer_field(self, auto_schema, direction):
|
||||
|
@ -95,13 +94,13 @@ class GridViewSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = GridView
|
||||
fields = ('field_options', 'filters_disabled')
|
||||
fields = ("field_options", "filters_disabled")
|
||||
|
||||
|
||||
class GridViewFieldOptionsSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = GridViewFieldOptions
|
||||
fields = ('width', 'hidden', 'order')
|
||||
fields = ("width", "hidden", "order")
|
||||
|
||||
|
||||
class GridViewFilterSerializer(serializers.Serializer):
|
||||
|
@ -110,11 +109,11 @@ class GridViewFilterSerializer(serializers.Serializer):
|
|||
required=False,
|
||||
default=None,
|
||||
child=serializers.IntegerField(),
|
||||
help_text='Only the fields related to the provided ids are added to the '
|
||||
'response. If None are provided all fields will be returned.'
|
||||
help_text="Only the fields related to the provided ids are added to the "
|
||||
"response. If None are provided all fields will be returned.",
|
||||
)
|
||||
row_ids = serializers.ListField(
|
||||
allow_empty=False,
|
||||
child=serializers.IntegerField(),
|
||||
help_text='Only rows related to the provided ids are added to the response.'
|
||||
help_text="Only rows related to the provided ids are added to the response.",
|
||||
)
|
||||
|
|
|
@ -3,8 +3,8 @@ from django.conf.urls import url
|
|||
from .views import GridViewView
|
||||
|
||||
|
||||
app_name = 'baserow.contrib.database.api.views.grid'
|
||||
app_name = "baserow.contrib.database.api.views.grid"
|
||||
|
||||
urlpatterns = [
|
||||
url(r'(?P<view_id>[0-9]+)/$', GridViewView.as_view(), name='list'),
|
||||
url(r"(?P<view_id>[0-9]+)/$", GridViewView.as_view(), name="list"),
|
||||
]
|
||||
|
|
|
@ -12,15 +12,17 @@ from baserow.api.pagination import PageNumberPagination
|
|||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.exceptions import UserNotInGroup
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
get_row_serializer_class, RowSerializer,
|
||||
example_pagination_row_serializer_class_with_field_options
|
||||
get_row_serializer_class,
|
||||
RowSerializer,
|
||||
example_pagination_row_serializer_class_with_field_options,
|
||||
)
|
||||
from baserow.contrib.database.api.rows.serializers import (
|
||||
get_example_row_serializer_class
|
||||
get_example_row_serializer_class,
|
||||
)
|
||||
from baserow.contrib.database.api.views.grid.serializers import GridViewSerializer
|
||||
from baserow.contrib.database.views.exceptions import (
|
||||
ViewDoesNotExist, UnrelatedFieldError
|
||||
ViewDoesNotExist,
|
||||
UnrelatedFieldError,
|
||||
)
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.views.models import GridView
|
||||
|
@ -33,7 +35,7 @@ class GridViewView(APIView):
|
|||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == 'GET':
|
||||
if self.request.method == "GET":
|
||||
return [AllowAny()]
|
||||
|
||||
return super().get_permissions()
|
||||
|
@ -41,81 +43,97 @@ class GridViewView(APIView):
|
|||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='view_id', location=OpenApiParameter.PATH, type=OpenApiTypes.INT,
|
||||
description='Returns only rows that belong to the related view\'s '
|
||||
'table.'
|
||||
name="view_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Returns only rows that belong to the related view's "
|
||||
"table.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='count', location=OpenApiParameter.PATH, type=OpenApiTypes.NONE,
|
||||
description='If provided only the count will be returned.'
|
||||
name="count",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.NONE,
|
||||
description="If provided only the count will be returned.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='include', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR,
|
||||
description=(
|
||||
'Can contain `field_options` which will add an object with the '
|
||||
'same name to the response if included. That object contains '
|
||||
'user defined view settings for each field. For example the '
|
||||
'field\'s width is included in here.'
|
||||
)
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='limit', location=OpenApiParameter.QUERY, type=OpenApiTypes.INT,
|
||||
description='Defines how many rows should be returned.'
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='offset', location=OpenApiParameter.QUERY, type=OpenApiTypes.INT,
|
||||
description='Can only be used in combination with the `limit` '
|
||||
'parameter and defines from which offset the rows should '
|
||||
'be returned.'
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='page', location=OpenApiParameter.QUERY, type=OpenApiTypes.INT,
|
||||
description='Defines which page of rows should be returned. Either '
|
||||
'the `page` or `limit` can be provided, not both.'
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='size', location=OpenApiParameter.QUERY, type=OpenApiTypes.INT,
|
||||
description='Can only be used in combination with the `page` parameter '
|
||||
'and defines how many rows should be returned.'
|
||||
),
|
||||
OpenApiParameter(
|
||||
name='search',
|
||||
name="include",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description='If provided only rows with data that matches the search '
|
||||
'query are going to be returned.'
|
||||
description=(
|
||||
"Can contain `field_options` which will add an object with the "
|
||||
"same name to the response if included. That object contains "
|
||||
"user defined view settings for each field. For example the "
|
||||
"field's width is included in here."
|
||||
),
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="limit",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Defines how many rows should be returned.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="offset",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Can only be used in combination with the `limit` "
|
||||
"parameter and defines from which offset the rows should "
|
||||
"be returned.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="page",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Defines which page of rows should be returned. Either "
|
||||
"the `page` or `limit` can be provided, not both.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="size",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Can only be used in combination with the `page` parameter "
|
||||
"and defines how many rows should be returned.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="search",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description="If provided only rows with data that matches the search "
|
||||
"query are going to be returned.",
|
||||
),
|
||||
],
|
||||
tags=['Database table grid view'],
|
||||
operation_id='list_database_table_grid_view_rows',
|
||||
tags=["Database table grid view"],
|
||||
operation_id="list_database_table_grid_view_rows",
|
||||
description=(
|
||||
'Lists the requested rows of the view\'s table related to the provided '
|
||||
'`view_id` if the authorized user has access to the database\'s group. '
|
||||
'The response is paginated either by a limit/offset or page/size style. '
|
||||
'The style depends on the provided GET parameters. The properties of the '
|
||||
'returned rows depends on which fields the table has. For a complete '
|
||||
'overview of fields use the **list_database_table_fields** endpoint to '
|
||||
'list them all. In the example all field types are listed, but normally '
|
||||
'the number in field_{id} key is going to be the id of the field. '
|
||||
'The value is what the user has provided and the format of it depends on '
|
||||
'the fields type.\n'
|
||||
'\n'
|
||||
'The filters and sortings are automatically applied. To get a full '
|
||||
'overview of the applied filters and sortings you can use the '
|
||||
'`list_database_table_view_filters` and '
|
||||
'`list_database_table_view_sortings` endpoints.'
|
||||
"Lists the requested rows of the view's table related to the provided "
|
||||
"`view_id` if the authorized user has access to the database's group. "
|
||||
"The response is paginated either by a limit/offset or page/size style. "
|
||||
"The style depends on the provided GET parameters. The properties of the "
|
||||
"returned rows depends on which fields the table has. For a complete "
|
||||
"overview of fields use the **list_database_table_fields** endpoint to "
|
||||
"list them all. In the example all field types are listed, but normally "
|
||||
"the number in field_{id} key is going to be the id of the field. "
|
||||
"The value is what the user has provided and the format of it depends on "
|
||||
"the fields type.\n"
|
||||
"\n"
|
||||
"The filters and sortings are automatically applied. To get a full "
|
||||
"overview of the applied filters and sortings you can use the "
|
||||
"`list_database_table_view_filters` and "
|
||||
"`list_database_table_view_sortings` endpoints."
|
||||
),
|
||||
responses={
|
||||
200: example_pagination_row_serializer_class_with_field_options,
|
||||
400: get_error_schema(['ERROR_USER_NOT_IN_GROUP']),
|
||||
404: get_error_schema(['ERROR_GRID_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_GRID_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ViewDoesNotExist: ERROR_GRID_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ViewDoesNotExist: ERROR_GRID_DOES_NOT_EXIST
|
||||
})
|
||||
@allowed_includes('field_options')
|
||||
@allowed_includes("field_options")
|
||||
def get(self, request, view_id, field_options):
|
||||
"""
|
||||
Lists all the rows of a grid view, paginated either by a page or offset/limit.
|
||||
|
@ -126,12 +144,13 @@ class GridViewView(APIView):
|
|||
`field_options` are provided in the include GET parameter.
|
||||
"""
|
||||
|
||||
search = request.GET.get('search')
|
||||
search = request.GET.get("search")
|
||||
|
||||
view_handler = ViewHandler()
|
||||
view = view_handler.get_view(view_id, GridView)
|
||||
view.table.database.group.has_user(request.user, raise_error=True,
|
||||
allow_if_template=True)
|
||||
view.table.database.group.has_user(
|
||||
request.user, raise_error=True, allow_if_template=True
|
||||
)
|
||||
|
||||
model = view.table.get_model()
|
||||
queryset = model.objects.all().enhance_by_fields()
|
||||
|
@ -142,8 +161,8 @@ class GridViewView(APIView):
|
|||
if search:
|
||||
queryset = queryset.search_all_fields(search)
|
||||
|
||||
if 'count' in request.GET:
|
||||
return Response({'count': queryset.count()})
|
||||
if "count" in request.GET:
|
||||
return Response({"count": queryset.count()})
|
||||
|
||||
if LimitOffsetPagination.limit_query_param in request.GET:
|
||||
paginator = LimitOffsetPagination()
|
||||
|
@ -151,8 +170,9 @@ class GridViewView(APIView):
|
|||
paginator = PageNumberPagination()
|
||||
|
||||
page = paginator.paginate_queryset(queryset, request, self)
|
||||
serializer_class = get_row_serializer_class(model, RowSerializer,
|
||||
is_response=True)
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
)
|
||||
serializer = serializer_class(page, many=True)
|
||||
|
||||
response = paginator.get_paginated_response(serializer.data)
|
||||
|
@ -162,50 +182,52 @@ class GridViewView(APIView):
|
|||
# field options from the database and creates them if they don't exist,
|
||||
# but when added to the context the fields don't have to be fetched from
|
||||
# the database again when checking if they exist.
|
||||
context = {'fields': [o['field'] for o in model._field_objects.values()]}
|
||||
context = {"fields": [o["field"] for o in model._field_objects.values()]}
|
||||
serialized_view = GridViewSerializer(view, context=context).data
|
||||
response.data['field_options'] = serialized_view['field_options']
|
||||
response.data["field_options"] = serialized_view["field_options"]
|
||||
|
||||
return response
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='view_id',
|
||||
name="view_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
required=False,
|
||||
description='Returns only rows that belong to the related view\'s '
|
||||
'table.'
|
||||
description="Returns only rows that belong to the related view's "
|
||||
"table.",
|
||||
)
|
||||
],
|
||||
tags=['Database table grid view'],
|
||||
operation_id='filter_database_table_grid_view_rows',
|
||||
tags=["Database table grid view"],
|
||||
operation_id="filter_database_table_grid_view_rows",
|
||||
description=(
|
||||
'Lists only the rows and fields that match the request. Only the rows '
|
||||
'with the ids that are in the `row_ids` list are going to be returned. '
|
||||
'Same goes for the fields, only the fields with the ids in the '
|
||||
'`field_ids` are going to be returned. This endpoint could be used to '
|
||||
'refresh data after changes something. For example in the web frontend '
|
||||
'after changing a field type, the data of the related cells will be '
|
||||
'refreshed using this endpoint. In the example all field types are listed, '
|
||||
'but normally the number in field_{id} key is going to be the id of the '
|
||||
'field. The value is what the user has provided and the format of it '
|
||||
'depends on the fields type.'
|
||||
"Lists only the rows and fields that match the request. Only the rows "
|
||||
"with the ids that are in the `row_ids` list are going to be returned. "
|
||||
"Same goes for the fields, only the fields with the ids in the "
|
||||
"`field_ids` are going to be returned. This endpoint could be used to "
|
||||
"refresh data after changes something. For example in the web frontend "
|
||||
"after changing a field type, the data of the related cells will be "
|
||||
"refreshed using this endpoint. In the example all field types are listed, "
|
||||
"but normally the number in field_{id} key is going to be the id of the "
|
||||
"field. The value is what the user has provided and the format of it "
|
||||
"depends on the fields type."
|
||||
),
|
||||
request=GridViewFilterSerializer,
|
||||
responses={
|
||||
200: get_example_row_serializer_class(True)(many=True),
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP', 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GRID_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
["ERROR_USER_NOT_IN_GROUP", "ERROR_REQUEST_BODY_VALIDATION"]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GRID_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ViewDoesNotExist: ERROR_GRID_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ViewDoesNotExist: ERROR_GRID_DOES_NOT_EXIST
|
||||
})
|
||||
@validate_body(GridViewFilterSerializer)
|
||||
def post(self, request, view_id, data):
|
||||
"""
|
||||
|
@ -216,48 +238,53 @@ class GridViewView(APIView):
|
|||
view = ViewHandler().get_view(view_id, GridView)
|
||||
view.table.database.group.has_user(request.user, raise_error=True)
|
||||
|
||||
model = view.table.get_model(field_ids=data['field_ids'])
|
||||
results = model.objects.filter(pk__in=data['row_ids'])
|
||||
model = view.table.get_model(field_ids=data["field_ids"])
|
||||
results = model.objects.filter(pk__in=data["row_ids"])
|
||||
|
||||
serializer_class = get_row_serializer_class(model, RowSerializer,
|
||||
is_response=True)
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
)
|
||||
serializer = serializer_class(results, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='view_id',
|
||||
name="view_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
required=False,
|
||||
description='Updates the field related to the provided `view_id` '
|
||||
'parameter.'
|
||||
description="Updates the field related to the provided `view_id` "
|
||||
"parameter.",
|
||||
)
|
||||
],
|
||||
tags=['Database table grid view'],
|
||||
operation_id='update_database_table_grid_view_field_options',
|
||||
tags=["Database table grid view"],
|
||||
operation_id="update_database_table_grid_view_field_options",
|
||||
description=(
|
||||
'Updates the field options of a `grid` view. The field options are unique '
|
||||
'options per field for a view. This could for example be used to update '
|
||||
'the field width if the user changes it.'
|
||||
"Updates the field options of a `grid` view. The field options are unique "
|
||||
"options per field for a view. This could for example be used to update "
|
||||
"the field width if the user changes it."
|
||||
),
|
||||
request=GridViewSerializer,
|
||||
responses={
|
||||
200: GridViewSerializer,
|
||||
400: get_error_schema([
|
||||
'ERROR_USER_NOT_IN_GROUP',
|
||||
'ERROR_UNRELATED_FIELD',
|
||||
'ERROR_REQUEST_BODY_VALIDATION'
|
||||
]),
|
||||
404: get_error_schema(['ERROR_GRID_DOES_NOT_EXIST'])
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_UNRELATED_FIELD",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_GRID_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ViewDoesNotExist: ERROR_GRID_DOES_NOT_EXIST,
|
||||
UnrelatedFieldError: ERROR_UNRELATED_FIELD,
|
||||
}
|
||||
)
|
||||
@map_exceptions({
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ViewDoesNotExist: ERROR_GRID_DOES_NOT_EXIST,
|
||||
UnrelatedFieldError: ERROR_UNRELATED_FIELD
|
||||
})
|
||||
@validate_body(GridViewSerializer)
|
||||
def patch(self, request, view_id, data):
|
||||
"""
|
||||
|
@ -275,8 +302,6 @@ class GridViewView(APIView):
|
|||
handler = ViewHandler()
|
||||
view = handler.get_view(view_id, GridView)
|
||||
handler.update_grid_view_field_options(
|
||||
request.user,
|
||||
view,
|
||||
data['field_options']
|
||||
request.user, view, data["field_options"]
|
||||
)
|
||||
return Response(GridViewSerializer(view).data)
|
||||
|
|
|
@ -7,7 +7,8 @@ from rest_framework import serializers
|
|||
|
||||
from baserow.contrib.database.api.serializers import TableSerializer
|
||||
from baserow.contrib.database.views.registries import (
|
||||
view_type_registry, view_filter_type_registry
|
||||
view_type_registry,
|
||||
view_filter_type_registry,
|
||||
)
|
||||
from baserow.contrib.database.views.models import View, ViewFilter, ViewSort
|
||||
|
||||
|
@ -15,89 +16,84 @@ from baserow.contrib.database.views.models import View, ViewFilter, ViewSort
|
|||
class ViewFilterSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ViewFilter
|
||||
fields = ('id', 'view', 'field', 'type', 'value')
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True}
|
||||
}
|
||||
fields = ("id", "view", "field", "type", "value")
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
||||
class CreateViewFilterSerializer(serializers.ModelSerializer):
|
||||
type = serializers.ChoiceField(
|
||||
choices=lazy(view_filter_type_registry.get_types, list)(),
|
||||
help_text=ViewFilter._meta.get_field('type').help_text
|
||||
help_text=ViewFilter._meta.get_field("type").help_text,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ViewFilter
|
||||
fields = ('field', 'type', 'value')
|
||||
extra_kwargs = {
|
||||
'value': {'default': ''}
|
||||
}
|
||||
fields = ("field", "type", "value")
|
||||
extra_kwargs = {"value": {"default": ""}}
|
||||
|
||||
|
||||
class UpdateViewFilterSerializer(serializers.ModelSerializer):
|
||||
type = serializers.ChoiceField(
|
||||
choices=lazy(view_filter_type_registry.get_types, list)(),
|
||||
required=False,
|
||||
help_text=ViewFilter._meta.get_field('type').help_text
|
||||
help_text=ViewFilter._meta.get_field("type").help_text,
|
||||
)
|
||||
|
||||
class Meta(CreateViewFilterSerializer.Meta):
|
||||
model = ViewFilter
|
||||
fields = ('field', 'type', 'value')
|
||||
extra_kwargs = {
|
||||
'field': {'required': False},
|
||||
'value': {'required': False}
|
||||
}
|
||||
fields = ("field", "type", "value")
|
||||
extra_kwargs = {"field": {"required": False}, "value": {"required": False}}
|
||||
|
||||
|
||||
class ViewSortSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ViewSort
|
||||
fields = ('id', 'view', 'field', 'order')
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True}
|
||||
}
|
||||
fields = ("id", "view", "field", "order")
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
||||
class CreateViewSortSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ViewSort
|
||||
fields = ('field', 'order')
|
||||
fields = ("field", "order")
|
||||
extra_kwargs = {
|
||||
'order': {'default': ViewSort._meta.get_field('order').default},
|
||||
"order": {"default": ViewSort._meta.get_field("order").default},
|
||||
}
|
||||
|
||||
|
||||
class UpdateViewSortSerializer(serializers.ModelSerializer):
|
||||
class Meta(CreateViewFilterSerializer.Meta):
|
||||
model = ViewSort
|
||||
fields = ('field', 'order')
|
||||
extra_kwargs = {
|
||||
'field': {'required': False},
|
||||
'order': {'required': False}
|
||||
}
|
||||
fields = ("field", "order")
|
||||
extra_kwargs = {"field": {"required": False}, "order": {"required": False}}
|
||||
|
||||
|
||||
class ViewSerializer(serializers.ModelSerializer):
|
||||
type = serializers.SerializerMethodField()
|
||||
table = TableSerializer()
|
||||
filters = ViewFilterSerializer(many=True, source='viewfilter_set', required=False)
|
||||
sortings = ViewSortSerializer(many=True, source='viewsort_set', required=False)
|
||||
filters = ViewFilterSerializer(many=True, source="viewfilter_set", required=False)
|
||||
sortings = ViewSortSerializer(many=True, source="viewsort_set", required=False)
|
||||
|
||||
class Meta:
|
||||
model = View
|
||||
fields = ('id', 'table_id', 'name', 'order', 'type', 'table', 'filter_type',
|
||||
'filters', 'sortings', 'filters_disabled')
|
||||
extra_kwargs = {
|
||||
'id': {'read_only': True},
|
||||
'table_id': {'read_only': True}
|
||||
}
|
||||
fields = (
|
||||
"id",
|
||||
"table_id",
|
||||
"name",
|
||||
"order",
|
||||
"type",
|
||||
"table",
|
||||
"filter_type",
|
||||
"filters",
|
||||
"sortings",
|
||||
"filters_disabled",
|
||||
)
|
||||
extra_kwargs = {"id": {"read_only": True}, "table_id": {"read_only": True}}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
context = kwargs.setdefault("context", {})
|
||||
context['include_filters'] = kwargs.pop('filters', False)
|
||||
context['include_sortings'] = kwargs.pop('sortings', False)
|
||||
context["include_filters"] = kwargs.pop("filters", False)
|
||||
context["include_sortings"] = kwargs.pop("sortings", False)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def to_representation(self, instance):
|
||||
|
@ -105,11 +101,11 @@ class ViewSerializer(serializers.ModelSerializer):
|
|||
# drf-spectacular will not know that filters and sortings exist as optional
|
||||
# return fields. This way the fields are still dynamic and also show up in the
|
||||
# OpenAPI specification.
|
||||
if not self.context['include_filters']:
|
||||
self.fields.pop('filters', None)
|
||||
if not self.context["include_filters"]:
|
||||
self.fields.pop("filters", None)
|
||||
|
||||
if not self.context['include_sortings']:
|
||||
self.fields.pop('sortings', None)
|
||||
if not self.context["include_sortings"]:
|
||||
self.fields.pop("sortings", None)
|
||||
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
@ -117,7 +113,7 @@ class ViewSerializer(serializers.ModelSerializer):
|
|||
def get_type(self, instance):
|
||||
# It could be that the view related to the instance is already in the context
|
||||
# else we can call the specific_class property to find it.
|
||||
view = self.context.get('instance_type')
|
||||
view = self.context.get("instance_type")
|
||||
if not view:
|
||||
view = view_type_registry.get_by_model(instance.specific_class)
|
||||
|
||||
|
@ -125,21 +121,19 @@ class ViewSerializer(serializers.ModelSerializer):
|
|||
|
||||
|
||||
class CreateViewSerializer(serializers.ModelSerializer):
|
||||
type = serializers.ChoiceField(
|
||||
choices=lazy(view_type_registry.get_types, list)()
|
||||
)
|
||||
type = serializers.ChoiceField(choices=lazy(view_type_registry.get_types, list)())
|
||||
|
||||
class Meta:
|
||||
model = View
|
||||
fields = ('name', 'type', 'filter_type', 'filters_disabled')
|
||||
fields = ("name", "type", "filter_type", "filters_disabled")
|
||||
|
||||
|
||||
class UpdateViewSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = View
|
||||
fields = ('name', 'filter_type', 'filters_disabled')
|
||||
fields = ("name", "filter_type", "filters_disabled")
|
||||
extra_kwargs = {
|
||||
'name': {'required': False},
|
||||
'filter_type': {'required': False},
|
||||
'filters_disabled': {'required': False},
|
||||
"name": {"required": False},
|
||||
"filter_type": {"required": False},
|
||||
"filters_disabled": {"required": False},
|
||||
}
|
||||
|
|
|
@ -3,34 +3,32 @@ from django.conf.urls import url
|
|||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
|
||||
from .views import (
|
||||
ViewsView, ViewView, ViewFiltersView, ViewFilterView, ViewSortingsView,
|
||||
ViewSortView
|
||||
ViewsView,
|
||||
ViewView,
|
||||
ViewFiltersView,
|
||||
ViewFilterView,
|
||||
ViewSortingsView,
|
||||
ViewSortView,
|
||||
)
|
||||
|
||||
|
||||
app_name = 'baserow.contrib.database.api.views'
|
||||
app_name = "baserow.contrib.database.api.views"
|
||||
|
||||
urlpatterns = view_type_registry.api_urls + [
|
||||
url(r'table/(?P<table_id>[0-9]+)/$', ViewsView.as_view(), name='list'),
|
||||
url(r"table/(?P<table_id>[0-9]+)/$", ViewsView.as_view(), name="list"),
|
||||
url(
|
||||
r'filter/(?P<view_filter_id>[0-9]+)/$',
|
||||
r"filter/(?P<view_filter_id>[0-9]+)/$",
|
||||
ViewFilterView.as_view(),
|
||||
name='filter_item'
|
||||
name="filter_item",
|
||||
),
|
||||
url(r"sort/(?P<view_sort_id>[0-9]+)/$", ViewSortView.as_view(), name="sort_item"),
|
||||
url(r"(?P<view_id>[0-9]+)/$", ViewView.as_view(), name="item"),
|
||||
url(
|
||||
r"(?P<view_id>[0-9]+)/filters/$", ViewFiltersView.as_view(), name="list_filters"
|
||||
),
|
||||
url(
|
||||
r'sort/(?P<view_sort_id>[0-9]+)/$',
|
||||
ViewSortView.as_view(),
|
||||
name='sort_item'
|
||||
),
|
||||
url(r'(?P<view_id>[0-9]+)/$', ViewView.as_view(), name='item'),
|
||||
url(
|
||||
r'(?P<view_id>[0-9]+)/filters/$',
|
||||
ViewFiltersView.as_view(),
|
||||
name='list_filters'
|
||||
),
|
||||
url(
|
||||
r'(?P<view_id>[0-9]+)/sortings/$',
|
||||
r"(?P<view_id>[0-9]+)/sortings/$",
|
||||
ViewSortingsView.as_view(),
|
||||
name='list_sortings'
|
||||
name="list_sortings",
|
||||
),
|
||||
]
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -13,7 +13,7 @@ from .api.serializers import DatabaseSerializer
|
|||
|
||||
|
||||
class DatabaseApplicationType(ApplicationType):
|
||||
type = 'database'
|
||||
type = "database"
|
||||
model_class = Database
|
||||
instance_serializer_class = DatabaseSerializer
|
||||
|
||||
|
@ -23,7 +23,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
handler.
|
||||
"""
|
||||
|
||||
database_tables = database.table_set.all().select_related('database__group')
|
||||
database_tables = database.table_set.all().select_related("database__group")
|
||||
table_handler = TableHandler()
|
||||
|
||||
for table in database_tables:
|
||||
|
@ -33,7 +33,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
from .api import urls as api_urls
|
||||
|
||||
return [
|
||||
path('database/', include(api_urls, namespace=self.type)),
|
||||
path("database/", include(api_urls, namespace=self.type)),
|
||||
]
|
||||
|
||||
def export_serialized(self, database):
|
||||
|
@ -43,10 +43,10 @@ class DatabaseApplicationType(ApplicationType):
|
|||
"""
|
||||
|
||||
tables = database.table_set.all().prefetch_related(
|
||||
'field_set',
|
||||
'view_set',
|
||||
'view_set__viewfilter_set',
|
||||
'view_set__viewsort_set'
|
||||
"field_set",
|
||||
"view_set",
|
||||
"view_set__viewfilter_set",
|
||||
"view_set__viewsort_set",
|
||||
)
|
||||
serialized_tables = []
|
||||
for table in tables:
|
||||
|
@ -67,31 +67,28 @@ class DatabaseApplicationType(ApplicationType):
|
|||
serialized_rows = []
|
||||
table_cache = {}
|
||||
for row in model.objects.all():
|
||||
serialized_row = {
|
||||
'id': row.id,
|
||||
'order': str(row.order)
|
||||
}
|
||||
serialized_row = {"id": row.id, "order": str(row.order)}
|
||||
for field_object in model._field_objects.values():
|
||||
field_name = field_object['name']
|
||||
field_type = field_object['type']
|
||||
field_name = field_object["name"]
|
||||
field_type = field_object["type"]
|
||||
serialized_row[field_name] = field_type.get_export_serialized_value(
|
||||
row,
|
||||
field_name,
|
||||
table_cache
|
||||
row, field_name, table_cache
|
||||
)
|
||||
serialized_rows.append(serialized_row)
|
||||
|
||||
serialized_tables.append({
|
||||
'id': table.id,
|
||||
'name': table.name,
|
||||
'order': table.order,
|
||||
'fields': serialized_fields,
|
||||
'views': serialized_views,
|
||||
'rows': serialized_rows,
|
||||
})
|
||||
serialized_tables.append(
|
||||
{
|
||||
"id": table.id,
|
||||
"name": table.name,
|
||||
"order": table.order,
|
||||
"fields": serialized_fields,
|
||||
"views": serialized_views,
|
||||
"rows": serialized_rows,
|
||||
}
|
||||
)
|
||||
|
||||
serialized = super().export_serialized(database)
|
||||
serialized['tables'] = serialized_tables
|
||||
serialized["tables"] = serialized_tables
|
||||
return serialized
|
||||
|
||||
def import_serialized(self, group, serialized_values, id_mapping):
|
||||
|
@ -99,10 +96,10 @@ class DatabaseApplicationType(ApplicationType):
|
|||
Imports a database application exported by the `export_serialized` method.
|
||||
"""
|
||||
|
||||
if 'database_tables' not in id_mapping:
|
||||
id_mapping['database_tables'] = {}
|
||||
if "database_tables" not in id_mapping:
|
||||
id_mapping["database_tables"] = {}
|
||||
|
||||
tables = serialized_values.pop('tables')
|
||||
tables = serialized_values.pop("tables")
|
||||
database = super().import_serialized(group, serialized_values, id_mapping)
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
|
||||
|
@ -111,58 +108,54 @@ class DatabaseApplicationType(ApplicationType):
|
|||
for table in tables:
|
||||
table_object = Table.objects.create(
|
||||
database=database,
|
||||
name=table['name'],
|
||||
order=table['order'],
|
||||
name=table["name"],
|
||||
order=table["order"],
|
||||
)
|
||||
id_mapping['database_tables'][table['id']] = table_object.id
|
||||
table['_object'] = table_object
|
||||
table['_field_objects'] = []
|
||||
id_mapping["database_tables"][table["id"]] = table_object.id
|
||||
table["_object"] = table_object
|
||||
table["_field_objects"] = []
|
||||
|
||||
# Because view properties might depend on fields, we first want to create all
|
||||
# the fields.
|
||||
for table in tables:
|
||||
for field in table['fields']:
|
||||
field_type = field_type_registry.get(field['type'])
|
||||
for field in table["fields"]:
|
||||
field_type = field_type_registry.get(field["type"])
|
||||
field_object = field_type.import_serialized(
|
||||
table['_object'],
|
||||
field,
|
||||
id_mapping
|
||||
table["_object"], field, id_mapping
|
||||
)
|
||||
|
||||
if field_object:
|
||||
table['_field_objects'].append(field_object)
|
||||
table["_field_objects"].append(field_object)
|
||||
|
||||
# Now that the all tables and fields exist, we can create the views and create
|
||||
# the table schema in the database.
|
||||
for table in tables:
|
||||
for view in table['views']:
|
||||
view_type = view_type_registry.get(view['type'])
|
||||
view_type.import_serialized(table['_object'], view, id_mapping)
|
||||
for view in table["views"]:
|
||||
view_type = view_type_registry.get(view["type"])
|
||||
view_type.import_serialized(table["_object"], view, id_mapping)
|
||||
|
||||
# We don't need to create all the fields individually because the schema
|
||||
# editor can handle the creation of the table schema in one go.
|
||||
with connection.schema_editor() as schema_editor:
|
||||
model = table['_object'].get_model(
|
||||
fields=table['_field_objects'],
|
||||
field_ids=[]
|
||||
model = table["_object"].get_model(
|
||||
fields=table["_field_objects"], field_ids=[]
|
||||
)
|
||||
schema_editor.create_model(model)
|
||||
|
||||
# Now that everything is in place we can start filling the table with the rows
|
||||
# in an efficient matter by using the bulk_create functionality.
|
||||
for table in tables:
|
||||
model = table['_object'].get_model(
|
||||
fields=table['_field_objects'],
|
||||
field_ids=[]
|
||||
model = table["_object"].get_model(
|
||||
fields=table["_field_objects"], field_ids=[]
|
||||
)
|
||||
field_ids = [field_object.id for field_object in table['_field_objects']]
|
||||
field_ids = [field_object.id for field_object in table["_field_objects"]]
|
||||
rows_to_be_inserted = []
|
||||
|
||||
for row in table['rows']:
|
||||
row_object = model(id=row['id'], order=row['order'])
|
||||
for row in table["rows"]:
|
||||
row_object = model(id=row["id"], order=row["order"])
|
||||
|
||||
for field in table['fields']:
|
||||
field_type = field_type_registry.get(field['type'])
|
||||
for field in table["fields"]:
|
||||
field_type = field_type_registry.get(field["type"])
|
||||
new_field_id = id_mapping["database_fields"][field["id"]]
|
||||
|
||||
# If the new field id is not present in the field_ids then we don't
|
||||
|
@ -176,7 +169,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
row_object,
|
||||
f'field_{id_mapping["database_fields"][field["id"]]}',
|
||||
row[f'field_{field["id"]}'],
|
||||
id_mapping
|
||||
id_mapping,
|
||||
)
|
||||
|
||||
rows_to_be_inserted.append(row_object)
|
||||
|
|
|
@ -5,7 +5,7 @@ from baserow.ws.registries import page_registry
|
|||
|
||||
|
||||
class DatabaseConfig(AppConfig):
|
||||
name = 'baserow.contrib.database'
|
||||
name = "baserow.contrib.database"
|
||||
|
||||
def prevent_generated_model_for_registering(self):
|
||||
"""
|
||||
|
@ -27,9 +27,8 @@ class DatabaseConfig(AppConfig):
|
|||
original = self.apps.register_model
|
||||
|
||||
def register_model(app_label, model):
|
||||
if (
|
||||
not hasattr(model, '_generated_table_model') and
|
||||
not hasattr(model._meta.auto_created, '_generated_table_model')
|
||||
if not hasattr(model, "_generated_table_model") and not hasattr(
|
||||
model._meta.auto_created, "_generated_table_model"
|
||||
):
|
||||
return original(app_label, model)
|
||||
|
||||
|
@ -42,13 +41,23 @@ class DatabaseConfig(AppConfig):
|
|||
from .fields.registries import field_type_registry, field_converter_registry
|
||||
|
||||
from .plugins import DatabasePlugin
|
||||
|
||||
plugin_registry.register(DatabasePlugin())
|
||||
|
||||
from .fields.field_types import (
|
||||
TextFieldType, LongTextFieldType, URLFieldType, NumberFieldType,
|
||||
BooleanFieldType, DateFieldType, LinkRowFieldType, EmailFieldType,
|
||||
FileFieldType, SingleSelectFieldType, PhoneNumberFieldType
|
||||
TextFieldType,
|
||||
LongTextFieldType,
|
||||
URLFieldType,
|
||||
NumberFieldType,
|
||||
BooleanFieldType,
|
||||
DateFieldType,
|
||||
LinkRowFieldType,
|
||||
EmailFieldType,
|
||||
FileFieldType,
|
||||
SingleSelectFieldType,
|
||||
PhoneNumberFieldType,
|
||||
)
|
||||
|
||||
field_type_registry.register(TextFieldType())
|
||||
field_type_registry.register(LongTextFieldType())
|
||||
field_type_registry.register(URLFieldType())
|
||||
|
@ -62,20 +71,31 @@ class DatabaseConfig(AppConfig):
|
|||
field_type_registry.register(PhoneNumberFieldType())
|
||||
|
||||
from .fields.field_converters import LinkRowFieldConverter, FileFieldConverter
|
||||
|
||||
field_converter_registry.register(LinkRowFieldConverter())
|
||||
field_converter_registry.register(FileFieldConverter())
|
||||
|
||||
from .views.view_types import GridViewType
|
||||
|
||||
view_type_registry.register(GridViewType())
|
||||
|
||||
from .views.view_filters import (
|
||||
EqualViewFilterType, NotEqualViewFilterType, EmptyViewFilterType,
|
||||
NotEmptyViewFilterType, DateEqualViewFilterType, DateNotEqualViewFilterType,
|
||||
HigherThanViewFilterType, LowerThanViewFilterType, ContainsViewFilterType,
|
||||
FilenameContainsViewFilterType, ContainsNotViewFilterType,
|
||||
BooleanViewFilterType, SingleSelectEqualViewFilterType,
|
||||
SingleSelectNotEqualViewFilterType
|
||||
EqualViewFilterType,
|
||||
NotEqualViewFilterType,
|
||||
EmptyViewFilterType,
|
||||
NotEmptyViewFilterType,
|
||||
DateEqualViewFilterType,
|
||||
DateNotEqualViewFilterType,
|
||||
HigherThanViewFilterType,
|
||||
LowerThanViewFilterType,
|
||||
ContainsViewFilterType,
|
||||
FilenameContainsViewFilterType,
|
||||
ContainsNotViewFilterType,
|
||||
BooleanViewFilterType,
|
||||
SingleSelectEqualViewFilterType,
|
||||
SingleSelectNotEqualViewFilterType,
|
||||
)
|
||||
|
||||
view_filter_type_registry.register(EqualViewFilterType())
|
||||
view_filter_type_registry.register(NotEqualViewFilterType())
|
||||
view_filter_type_registry.register(FilenameContainsViewFilterType())
|
||||
|
@ -92,9 +112,11 @@ class DatabaseConfig(AppConfig):
|
|||
view_filter_type_registry.register(NotEmptyViewFilterType())
|
||||
|
||||
from .application_types import DatabaseApplicationType
|
||||
|
||||
application_type_registry.register(DatabaseApplicationType())
|
||||
|
||||
from .ws.pages import TablePageType
|
||||
|
||||
page_registry.register(TablePageType())
|
||||
|
||||
# The signals must always be imported last because they use the registries
|
||||
|
|
|
@ -14,12 +14,9 @@ class TablesDatabaseRouter(object):
|
|||
# USER_TABLE_DATABASE because it could be that the user data does not live in
|
||||
# the default database. This is also the case when the model is automatically
|
||||
# created by a generated table model.
|
||||
if (
|
||||
hasattr(model, '_generated_table_model') or
|
||||
(
|
||||
model._meta.auto_created and
|
||||
hasattr(model._meta.auto_created, '_generated_table_model')
|
||||
)
|
||||
if hasattr(model, "_generated_table_model") or (
|
||||
model._meta.auto_created
|
||||
and hasattr(model._meta.auto_created, "_generated_table_model")
|
||||
):
|
||||
return settings.USER_TABLE_DATABASE
|
||||
|
||||
|
@ -35,7 +32,7 @@ class TablesDatabaseRouter(object):
|
|||
database table can make references to for example a select option.
|
||||
"""
|
||||
|
||||
allowed = ('default', settings.USER_TABLE_DATABASE)
|
||||
allowed = ("default", settings.USER_TABLE_DATABASE)
|
||||
if obj1._state.db in allowed and obj2._state.db in allowed:
|
||||
return True
|
||||
return None
|
||||
|
|
|
@ -10,9 +10,11 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
format. If the casting still fails the value will be set to null.
|
||||
"""
|
||||
|
||||
sql_alter_column_type = 'ALTER COLUMN %(column)s TYPE %(type)s ' \
|
||||
'USING pg_temp.try_cast(%(column)s::text)'
|
||||
sql_drop_try_cast = 'DROP FUNCTION IF EXISTS pg_temp.try_cast(text, int)'
|
||||
sql_alter_column_type = (
|
||||
"ALTER COLUMN %(column)s TYPE %(type)s "
|
||||
"USING pg_temp.try_cast(%(column)s::text)"
|
||||
)
|
||||
sql_drop_try_cast = "DROP FUNCTION IF EXISTS pg_temp.try_cast(text, int)"
|
||||
sql_create_try_cast = """
|
||||
create or replace function pg_temp.try_cast(
|
||||
p_in text,
|
||||
|
@ -34,18 +36,31 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
language plpgsql;
|
||||
"""
|
||||
|
||||
def __init__(self, *args, alter_column_prepare_old_value='',
|
||||
alter_column_prepare_new_value='',
|
||||
force_alter_column=False):
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
alter_column_prepare_old_value="",
|
||||
alter_column_prepare_new_value="",
|
||||
force_alter_column=False,
|
||||
):
|
||||
self.alter_column_prepare_old_value = alter_column_prepare_old_value
|
||||
self.alter_column_prepare_new_value = alter_column_prepare_new_value
|
||||
self.force_alter_column = force_alter_column
|
||||
super().__init__(*args)
|
||||
|
||||
def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
||||
old_db_params, new_db_params, strict=False):
|
||||
def _alter_field(
|
||||
self,
|
||||
model,
|
||||
old_field,
|
||||
new_field,
|
||||
old_type,
|
||||
new_type,
|
||||
old_db_params,
|
||||
new_db_params,
|
||||
strict=False,
|
||||
):
|
||||
if self.force_alter_column:
|
||||
old_type = f'{old_type}_forced'
|
||||
old_type = f"{old_type}_forced"
|
||||
|
||||
if old_type != new_type:
|
||||
variables = {}
|
||||
|
@ -63,21 +78,36 @@ class PostgresqlLenientDatabaseSchemaEditor:
|
|||
|
||||
quoted_column_name = self.quote_name(new_field.column)
|
||||
self.execute(self.sql_drop_try_cast)
|
||||
self.execute(self.sql_create_try_cast % {
|
||||
'column': quoted_column_name,
|
||||
'type': new_type,
|
||||
'alter_column_prepare_old_value': alter_column_prepare_old_value,
|
||||
'alter_column_prepare_new_value': alter_column_prepare_new_value
|
||||
}, variables)
|
||||
self.execute(
|
||||
self.sql_create_try_cast
|
||||
% {
|
||||
"column": quoted_column_name,
|
||||
"type": new_type,
|
||||
"alter_column_prepare_old_value": alter_column_prepare_old_value,
|
||||
"alter_column_prepare_new_value": alter_column_prepare_new_value,
|
||||
},
|
||||
variables,
|
||||
)
|
||||
|
||||
return super()._alter_field(model, old_field, new_field, old_type, new_type,
|
||||
old_db_params, new_db_params, strict)
|
||||
return super()._alter_field(
|
||||
model,
|
||||
old_field,
|
||||
new_field,
|
||||
old_type,
|
||||
new_type,
|
||||
old_db_params,
|
||||
new_db_params,
|
||||
strict,
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def lenient_schema_editor(connection, alter_column_prepare_old_value=None,
|
||||
alter_column_prepare_new_value=None,
|
||||
force_alter_column=False):
|
||||
def lenient_schema_editor(
|
||||
connection,
|
||||
alter_column_prepare_old_value=None,
|
||||
alter_column_prepare_new_value=None,
|
||||
force_alter_column=False,
|
||||
):
|
||||
"""
|
||||
A contextual function that yields a modified version of the connection's schema
|
||||
editor. This temporary version is more lenient then the regular editor. Normally
|
||||
|
@ -102,31 +132,29 @@ def lenient_schema_editor(connection, alter_column_prepare_old_value=None,
|
|||
`postgresql` is supported.
|
||||
"""
|
||||
|
||||
vendor_schema_editor_mapping = {'postgresql': PostgresqlLenientDatabaseSchemaEditor}
|
||||
vendor_schema_editor_mapping = {"postgresql": PostgresqlLenientDatabaseSchemaEditor}
|
||||
schema_editor_class = vendor_schema_editor_mapping.get(connection.vendor)
|
||||
|
||||
if not schema_editor_class:
|
||||
raise ValueError(f'The provided connection vendor is not supported. We only '
|
||||
f'support {", ".join(vendor_schema_editor_mapping.keys())}.')
|
||||
raise ValueError(
|
||||
f"The provided connection vendor is not supported. We only "
|
||||
f'support {", ".join(vendor_schema_editor_mapping.keys())}.'
|
||||
)
|
||||
|
||||
regular_schema_editor = connection.SchemaEditorClass
|
||||
schema_editor_class = type(
|
||||
'LenientDatabaseSchemaEditor',
|
||||
(schema_editor_class, regular_schema_editor),
|
||||
{}
|
||||
"LenientDatabaseSchemaEditor", (schema_editor_class, regular_schema_editor), {}
|
||||
)
|
||||
|
||||
connection.SchemaEditorClass = schema_editor_class
|
||||
|
||||
kwargs = {
|
||||
'force_alter_column': force_alter_column
|
||||
}
|
||||
kwargs = {"force_alter_column": force_alter_column}
|
||||
|
||||
if alter_column_prepare_old_value:
|
||||
kwargs['alter_column_prepare_old_value'] = alter_column_prepare_old_value
|
||||
kwargs["alter_column_prepare_old_value"] = alter_column_prepare_old_value
|
||||
|
||||
if alter_column_prepare_new_value:
|
||||
kwargs['alter_column_prepare_new_value'] = alter_column_prepare_new_value
|
||||
kwargs["alter_column_prepare_new_value"] = alter_column_prepare_new_value
|
||||
|
||||
try:
|
||||
with connection.schema_editor(**kwargs) as schema_editor:
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from baserow.core.exceptions import (
|
||||
InstanceTypeDoesNotExist, InstanceTypeAlreadyRegistered
|
||||
InstanceTypeDoesNotExist,
|
||||
InstanceTypeAlreadyRegistered,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -3,8 +3,17 @@ from .models import LinkRowField, FileField
|
|||
|
||||
|
||||
class RecreateFieldConverter(FieldConverter):
|
||||
def alter_field(self, from_field, to_field, from_model, to_model,
|
||||
from_model_field, to_model_field, user, connection):
|
||||
def alter_field(
|
||||
self,
|
||||
from_field,
|
||||
to_field,
|
||||
from_model,
|
||||
to_model,
|
||||
from_model_field,
|
||||
to_model_field,
|
||||
user,
|
||||
connection,
|
||||
):
|
||||
"""
|
||||
Does the field alteration by removing the old field and creating the new field.
|
||||
The success rate of this converter is very high, but the downside is that the
|
||||
|
@ -17,36 +26,32 @@ class RecreateFieldConverter(FieldConverter):
|
|||
|
||||
|
||||
class LinkRowFieldConverter(RecreateFieldConverter):
|
||||
type = 'link_row'
|
||||
type = "link_row"
|
||||
|
||||
def is_applicable(self, from_model, from_field, to_field):
|
||||
return (
|
||||
(
|
||||
isinstance(from_field, LinkRowField) and
|
||||
not isinstance(to_field, LinkRowField)
|
||||
) or (
|
||||
not isinstance(from_field, LinkRowField) and
|
||||
isinstance(to_field, LinkRowField)
|
||||
) or (
|
||||
isinstance(from_field, LinkRowField)
|
||||
and not isinstance(to_field, LinkRowField)
|
||||
)
|
||||
or (
|
||||
not isinstance(from_field, LinkRowField)
|
||||
and isinstance(to_field, LinkRowField)
|
||||
)
|
||||
or (
|
||||
# If both fields are LinkRowFields and neither the linked table nor the
|
||||
# multiple setting has changed.
|
||||
isinstance(from_field, LinkRowField) and
|
||||
isinstance(to_field, LinkRowField) and
|
||||
from_field.link_row_table_id != to_field.link_row_table_id
|
||||
isinstance(from_field, LinkRowField)
|
||||
and isinstance(to_field, LinkRowField)
|
||||
and from_field.link_row_table_id != to_field.link_row_table_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class FileFieldConverter(RecreateFieldConverter):
|
||||
type = 'file'
|
||||
type = "file"
|
||||
|
||||
def is_applicable(self, from_model, from_field, to_field):
|
||||
return (
|
||||
(
|
||||
isinstance(from_field, FileField) and
|
||||
not isinstance(to_field, FileField)
|
||||
) or (
|
||||
not isinstance(from_field, FileField) and
|
||||
isinstance(to_field, FileField)
|
||||
)
|
||||
)
|
||||
isinstance(from_field, FileField) and not isinstance(to_field, FileField)
|
||||
) or (not isinstance(from_field, FileField) and isinstance(to_field, FileField))
|
||||
|
|
|
@ -3,8 +3,8 @@ from typing import Dict, Any, Union
|
|||
from django.db.models import Q, BooleanField
|
||||
from django.db.models.expressions import RawSQL
|
||||
|
||||
FILTER_TYPE_AND = 'AND'
|
||||
FILTER_TYPE_OR = 'OR'
|
||||
FILTER_TYPE_AND = "AND"
|
||||
FILTER_TYPE_OR = "OR"
|
||||
|
||||
|
||||
class AnnotatedQ:
|
||||
|
@ -54,13 +54,13 @@ class FilterBuilder:
|
|||
"""
|
||||
|
||||
if filter_type not in [FILTER_TYPE_AND, FILTER_TYPE_OR]:
|
||||
raise ValueError(f'Unknown filter type {filter_type}.')
|
||||
raise ValueError(f"Unknown filter type {filter_type}.")
|
||||
|
||||
self._annotation = {}
|
||||
self._q_filters = Q()
|
||||
self._filter_type = filter_type
|
||||
|
||||
def filter(self, q: OptionallyAnnotatedQ) -> 'FilterBuilder':
|
||||
def filter(self, q: OptionallyAnnotatedQ) -> "FilterBuilder":
|
||||
"""
|
||||
Adds a Q or AnnotatedQ filter into this builder to be joined together with
|
||||
existing filters based on the builders `filter_type`.
|
||||
|
@ -92,27 +92,27 @@ class FilterBuilder:
|
|||
|
||||
return queryset.annotate(**self._annotation).filter(self._q_filters)
|
||||
|
||||
def _annotate(self, annotation_dict: Dict[str, Any]) -> 'FilterBuilder':
|
||||
def _annotate(self, annotation_dict: Dict[str, Any]) -> "FilterBuilder":
|
||||
self._annotation = {**self._annotation, **annotation_dict}
|
||||
|
||||
def _filter(self, q_filter: Q) -> 'FilterBuilder':
|
||||
def _filter(self, q_filter: Q) -> "FilterBuilder":
|
||||
if self._filter_type == FILTER_TYPE_AND:
|
||||
self._q_filters &= q_filter
|
||||
elif self._filter_type == FILTER_TYPE_OR:
|
||||
self._q_filters |= q_filter
|
||||
else:
|
||||
raise ValueError(f'Unknown filter type {self._filter_type}.')
|
||||
raise ValueError(f"Unknown filter type {self._filter_type}.")
|
||||
|
||||
|
||||
def contains_filter(field_name, value, model_field, _) -> OptionallyAnnotatedQ:
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
if value == "":
|
||||
return Q()
|
||||
# Check if the model_field accepts the value.
|
||||
try:
|
||||
model_field.get_prep_value(value)
|
||||
return Q(**{f'{field_name}__icontains': value})
|
||||
return Q(**{f"{field_name}__icontains": value})
|
||||
except Exception:
|
||||
pass
|
||||
return Q()
|
||||
|
@ -121,15 +121,14 @@ def contains_filter(field_name, value, model_field, _) -> OptionallyAnnotatedQ:
|
|||
def filename_contains_filter(field_name, value, _, field) -> OptionallyAnnotatedQ:
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == '':
|
||||
if value == "":
|
||||
return Q()
|
||||
# Check if the model_field has a file which matches the provided filter value.
|
||||
annotation_query = _build_filename_contains_raw_query(field, value)
|
||||
return AnnotatedQ(annotation={
|
||||
f'{field_name}_matches_visible_names': annotation_query
|
||||
}, q={
|
||||
f'{field_name}_matches_visible_names': True
|
||||
})
|
||||
return AnnotatedQ(
|
||||
annotation={f"{field_name}_matches_visible_names": annotation_query},
|
||||
q={f"{field_name}_matches_visible_names": True},
|
||||
)
|
||||
|
||||
|
||||
def _build_filename_contains_raw_query(field, value):
|
||||
|
@ -151,5 +150,8 @@ def _build_filename_contains_raw_query(field, value):
|
|||
WHERE UPPER(attached_files ->> 'visible_name') LIKE UPPER(%s)
|
||||
)
|
||||
"""
|
||||
return RawSQL(num_files_with_name_like_value, params=[f"%{value}%"],
|
||||
output_field=BooleanField())
|
||||
return RawSQL(
|
||||
num_files_with_name_like_value,
|
||||
params=[f"%{value}%"],
|
||||
output_field=BooleanField(),
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -9,8 +9,11 @@ from baserow.contrib.database.db.schema import lenient_schema_editor
|
|||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from .exceptions import (
|
||||
PrimaryFieldAlreadyExists, CannotDeletePrimaryField, CannotChangeFieldType,
|
||||
FieldDoesNotExist, IncompatiblePrimaryFieldTypeError
|
||||
PrimaryFieldAlreadyExists,
|
||||
CannotDeletePrimaryField,
|
||||
CannotChangeFieldType,
|
||||
FieldDoesNotExist,
|
||||
IncompatiblePrimaryFieldTypeError,
|
||||
)
|
||||
from .models import Field, SelectOption
|
||||
from .registries import field_type_registry, field_converter_registry
|
||||
|
@ -46,16 +49,17 @@ class FieldHandler:
|
|||
base_queryset = field_model.objects
|
||||
|
||||
try:
|
||||
field = base_queryset.select_related('table__database__group').get(
|
||||
field = base_queryset.select_related("table__database__group").get(
|
||||
id=field_id
|
||||
)
|
||||
except Field.DoesNotExist:
|
||||
raise FieldDoesNotExist(f'The field with id {field_id} does not exist.')
|
||||
raise FieldDoesNotExist(f"The field with id {field_id} does not exist.")
|
||||
|
||||
return field
|
||||
|
||||
def create_field(self, user, table, type_name, primary=False,
|
||||
do_schema_change=True, **kwargs):
|
||||
def create_field(
|
||||
self, user, table, type_name, primary=False, do_schema_change=True, **kwargs
|
||||
):
|
||||
"""
|
||||
Creates a new field with the given type for a table.
|
||||
|
||||
|
@ -86,23 +90,26 @@ class FieldHandler:
|
|||
# Because only one primary field per table can exist and we have to check if one
|
||||
# already exists. If so the field cannot be created and an exception is raised.
|
||||
if primary and Field.objects.filter(table=table, primary=True).exists():
|
||||
raise PrimaryFieldAlreadyExists(f'A primary field already exists for the '
|
||||
f'table {table}.')
|
||||
raise PrimaryFieldAlreadyExists(
|
||||
f"A primary field already exists for the " f"table {table}."
|
||||
)
|
||||
|
||||
# Figure out which model to use and which field types are allowed for the given
|
||||
# field type.
|
||||
field_type = field_type_registry.get(type_name)
|
||||
model_class = field_type.model_class
|
||||
allowed_fields = ['name'] + field_type.allowed_fields
|
||||
allowed_fields = ["name"] + field_type.allowed_fields
|
||||
field_values = extract_allowed(kwargs, allowed_fields)
|
||||
last_order = model_class.get_last_order(table)
|
||||
|
||||
field_values = field_type.prepare_values(field_values, user)
|
||||
before = field_type.before_create(table, primary, field_values, last_order,
|
||||
user)
|
||||
before = field_type.before_create(
|
||||
table, primary, field_values, last_order, user
|
||||
)
|
||||
|
||||
instance = model_class.objects.create(table=table, order=last_order,
|
||||
primary=primary, **field_values)
|
||||
instance = model_class.objects.create(
|
||||
table=table, order=last_order, primary=primary, **field_values
|
||||
)
|
||||
|
||||
# Add the field to the table schema.
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
|
@ -115,8 +122,7 @@ class FieldHandler:
|
|||
|
||||
field_type.after_create(instance, to_model, user, connection, before)
|
||||
|
||||
field_created.send(self, field=instance, user=user,
|
||||
type_name=type_name)
|
||||
field_created.send(self, field=instance, user=user, type_name=type_name)
|
||||
|
||||
return instance
|
||||
|
||||
|
@ -143,7 +149,7 @@ class FieldHandler:
|
|||
"""
|
||||
|
||||
if not isinstance(field, Field):
|
||||
raise ValueError('The field is not an instance of Field.')
|
||||
raise ValueError("The field is not an instance of Field.")
|
||||
|
||||
group = field.table.database.group
|
||||
group.has_user(user, raise_error=True)
|
||||
|
@ -171,7 +177,7 @@ class FieldHandler:
|
|||
# like filters or sortings need to be changed.
|
||||
ViewHandler().field_type_changed(field)
|
||||
|
||||
allowed_fields = ['name'] + field_type.allowed_fields
|
||||
allowed_fields = ["name"] + field_type.allowed_fields
|
||||
field_values = extract_allowed(kwargs, allowed_fields)
|
||||
|
||||
field_values = field_type.prepare_values(field_values, user)
|
||||
|
@ -192,14 +198,19 @@ class FieldHandler:
|
|||
# Before a field is updated we are going to call the before_schema_change
|
||||
# method of the old field because some cleanup of related instances might
|
||||
# need to happen.
|
||||
old_field_type.before_schema_change(old_field, field, from_model, to_model,
|
||||
from_model_field, to_model_field, user)
|
||||
old_field_type.before_schema_change(
|
||||
old_field,
|
||||
field,
|
||||
from_model,
|
||||
to_model,
|
||||
from_model_field,
|
||||
to_model_field,
|
||||
user,
|
||||
)
|
||||
|
||||
# Try to find a data converter that can be applied.
|
||||
converter = field_converter_registry.find_applicable_converter(
|
||||
from_model,
|
||||
old_field,
|
||||
field
|
||||
from_model, old_field, field
|
||||
)
|
||||
|
||||
if converter:
|
||||
|
@ -213,7 +224,7 @@ class FieldHandler:
|
|||
from_model_field,
|
||||
to_model_field,
|
||||
user,
|
||||
connection
|
||||
connection,
|
||||
)
|
||||
else:
|
||||
if baserow_field_type_changed:
|
||||
|
@ -223,8 +234,7 @@ class FieldHandler:
|
|||
force_alter_column = True
|
||||
else:
|
||||
force_alter_column = field_type.force_same_type_alter_column(
|
||||
old_field,
|
||||
field
|
||||
old_field, field
|
||||
)
|
||||
|
||||
# If no field converter is found we are going to alter the field using the
|
||||
|
@ -232,39 +242,51 @@ class FieldHandler:
|
|||
with lenient_schema_editor(
|
||||
connection,
|
||||
old_field_type.get_alter_column_prepare_old_value(
|
||||
connection, old_field, field),
|
||||
connection, old_field, field
|
||||
),
|
||||
field_type.get_alter_column_prepare_new_value(
|
||||
connection, old_field, field
|
||||
),
|
||||
force_alter_column
|
||||
force_alter_column,
|
||||
) as schema_editor:
|
||||
try:
|
||||
schema_editor.alter_field(from_model, from_model_field,
|
||||
to_model_field)
|
||||
schema_editor.alter_field(
|
||||
from_model, from_model_field, to_model_field
|
||||
)
|
||||
except (ProgrammingError, DataError) as e:
|
||||
# If something is going wrong while changing the schema we will
|
||||
# just raise a specific exception. In the future we want to have
|
||||
# some sort of converter abstraction where the values of certain
|
||||
# types can be converted to another value.
|
||||
logger.error(str(e))
|
||||
message = f'Could not alter field when changing field type ' \
|
||||
f'{from_field_type} to {new_type_name}.'
|
||||
message = (
|
||||
f"Could not alter field when changing field type "
|
||||
f"{from_field_type} to {new_type_name}."
|
||||
)
|
||||
raise CannotChangeFieldType(message)
|
||||
|
||||
from_model_field_type = from_model_field.db_parameters(connection)['type']
|
||||
to_model_field_type = to_model_field.db_parameters(connection)['type']
|
||||
from_model_field_type = from_model_field.db_parameters(connection)["type"]
|
||||
to_model_field_type = to_model_field.db_parameters(connection)["type"]
|
||||
altered_column = from_model_field_type != to_model_field_type
|
||||
|
||||
# If the new field doesn't support select options we can delete those
|
||||
# relations.
|
||||
if (
|
||||
old_field_type.can_have_select_options and
|
||||
not field_type.can_have_select_options
|
||||
old_field_type.can_have_select_options
|
||||
and not field_type.can_have_select_options
|
||||
):
|
||||
old_field.select_options.all().delete()
|
||||
|
||||
field_type.after_update(old_field, field, from_model, to_model, user,
|
||||
connection, altered_column, before)
|
||||
field_type.after_update(
|
||||
old_field,
|
||||
field,
|
||||
from_model,
|
||||
to_model,
|
||||
user,
|
||||
connection,
|
||||
altered_column,
|
||||
before,
|
||||
)
|
||||
|
||||
field_updated.send(self, field=field, user=user)
|
||||
|
||||
|
@ -284,14 +306,15 @@ class FieldHandler:
|
|||
"""
|
||||
|
||||
if not isinstance(field, Field):
|
||||
raise ValueError('The field is not an instance of Field')
|
||||
raise ValueError("The field is not an instance of Field")
|
||||
|
||||
group = field.table.database.group
|
||||
group.has_user(user, raise_error=True)
|
||||
|
||||
if field.primary:
|
||||
raise CannotDeletePrimaryField('Cannot delete the primary field of a '
|
||||
'table.')
|
||||
raise CannotDeletePrimaryField(
|
||||
"Cannot delete the primary field of a " "table."
|
||||
)
|
||||
|
||||
field = field.specific
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
|
@ -340,11 +363,8 @@ class FieldHandler:
|
|||
to_delete = [
|
||||
existing.id
|
||||
for existing in existing_select_options
|
||||
if existing.id not in [
|
||||
desired['id']
|
||||
for desired in select_options
|
||||
if 'id' in desired
|
||||
]
|
||||
if existing.id
|
||||
not in [desired["id"] for desired in select_options if "id" in desired]
|
||||
]
|
||||
|
||||
if len(to_delete) > 0:
|
||||
|
@ -352,30 +372,34 @@ class FieldHandler:
|
|||
|
||||
# Checks which existing instances must be fetched using a single query.
|
||||
to_select = [
|
||||
select_option['id']
|
||||
select_option["id"]
|
||||
for select_option in select_options
|
||||
if 'id' in select_option
|
||||
if "id" in select_option
|
||||
]
|
||||
|
||||
if len(to_select) > 0:
|
||||
for existing in field.select_options.filter(id__in=to_select):
|
||||
for select_option in select_options:
|
||||
if select_option.get('id') == existing.id:
|
||||
select_option['instance'] = existing
|
||||
if select_option.get("id") == existing.id:
|
||||
select_option["instance"] = existing
|
||||
|
||||
to_create = []
|
||||
|
||||
for order, select_option in enumerate(select_options):
|
||||
if 'instance' in select_option:
|
||||
instance = select_option['instance']
|
||||
if "instance" in select_option:
|
||||
instance = select_option["instance"]
|
||||
instance.order = order
|
||||
instance.value = select_option['value']
|
||||
instance.color = select_option['color']
|
||||
instance.value = select_option["value"]
|
||||
instance.color = select_option["color"]
|
||||
instance.save()
|
||||
else:
|
||||
to_create.append(SelectOption(
|
||||
field=field, order=order, value=select_option['value'],
|
||||
color=select_option['color'])
|
||||
to_create.append(
|
||||
SelectOption(
|
||||
field=field,
|
||||
order=order,
|
||||
value=select_option["value"],
|
||||
color=select_option["color"],
|
||||
)
|
||||
)
|
||||
|
||||
if len(to_create) > 0:
|
||||
|
|
|
@ -3,87 +3,73 @@ from django.contrib.contenttypes.models import ContentType
|
|||
|
||||
from baserow.core.utils import to_snake_case, remove_special_characters
|
||||
from baserow.core.mixins import (
|
||||
OrderableMixin, PolymorphicContentTypeMixin, CreatedAndUpdatedOnMixin
|
||||
OrderableMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
)
|
||||
|
||||
NUMBER_TYPE_INTEGER = 'INTEGER'
|
||||
NUMBER_TYPE_DECIMAL = 'DECIMAL'
|
||||
NUMBER_TYPE_INTEGER = "INTEGER"
|
||||
NUMBER_TYPE_DECIMAL = "DECIMAL"
|
||||
NUMBER_TYPE_CHOICES = (
|
||||
('INTEGER', 'Integer'),
|
||||
('DECIMAL', 'Decimal'),
|
||||
("INTEGER", "Integer"),
|
||||
("DECIMAL", "Decimal"),
|
||||
)
|
||||
|
||||
NUMBER_DECIMAL_PLACES_CHOICES = (
|
||||
(1, '1.0'),
|
||||
(2, '1.00'),
|
||||
(3, '1.000'),
|
||||
(4, '1.0000'),
|
||||
(5, '1.00000')
|
||||
(1, "1.0"),
|
||||
(2, "1.00"),
|
||||
(3, "1.000"),
|
||||
(4, "1.0000"),
|
||||
(5, "1.00000"),
|
||||
)
|
||||
|
||||
DATE_FORMAT = {
|
||||
'EU': {
|
||||
'name': 'European (D/M/Y)',
|
||||
'format': '%d/%m/%Y',
|
||||
'sql': 'DD/MM/YYYY'
|
||||
},
|
||||
'US': {
|
||||
'name': 'US (M/D/Y)',
|
||||
'format': '%m/%d/%Y',
|
||||
'sql': 'MM/DD/YYYY'
|
||||
},
|
||||
'ISO': {
|
||||
'name': 'ISO (Y-M-D)',
|
||||
'format': '%Y-%m-%d',
|
||||
'sql': 'YYYY-MM-DD'
|
||||
},
|
||||
"EU": {"name": "European (D/M/Y)", "format": "%d/%m/%Y", "sql": "DD/MM/YYYY"},
|
||||
"US": {"name": "US (M/D/Y)", "format": "%m/%d/%Y", "sql": "MM/DD/YYYY"},
|
||||
"ISO": {"name": "ISO (Y-M-D)", "format": "%Y-%m-%d", "sql": "YYYY-MM-DD"},
|
||||
}
|
||||
DATE_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_FORMAT.items()]
|
||||
DATE_FORMAT_CHOICES = [(k, v["name"]) for k, v in DATE_FORMAT.items()]
|
||||
|
||||
DATE_TIME_FORMAT = {
|
||||
'24': {
|
||||
'name': '24 hour',
|
||||
'format': '%H:%M',
|
||||
'sql': 'HH24:MI'
|
||||
},
|
||||
'12': {
|
||||
'name': '12 hour',
|
||||
'format': '%I:%M %p',
|
||||
'sql': 'HH12:MIAM'
|
||||
}
|
||||
"24": {"name": "24 hour", "format": "%H:%M", "sql": "HH24:MI"},
|
||||
"12": {"name": "12 hour", "format": "%I:%M %p", "sql": "HH12:MIAM"},
|
||||
}
|
||||
DATE_TIME_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_TIME_FORMAT.items()]
|
||||
DATE_TIME_FORMAT_CHOICES = [(k, v["name"]) for k, v in DATE_TIME_FORMAT.items()]
|
||||
|
||||
|
||||
def get_default_field_content_type():
|
||||
return ContentType.objects.get_for_model(Field)
|
||||
|
||||
|
||||
class Field(CreatedAndUpdatedOnMixin, OrderableMixin, PolymorphicContentTypeMixin,
|
||||
models.Model):
|
||||
class Field(
|
||||
CreatedAndUpdatedOnMixin, OrderableMixin, PolymorphicContentTypeMixin, models.Model
|
||||
):
|
||||
"""
|
||||
Because each field type can have custom settings, for example precision for a number
|
||||
field, values for an option field or checkbox style for a boolean field we need a
|
||||
polymorphic content type to store these settings in another table.
|
||||
"""
|
||||
|
||||
table = models.ForeignKey('database.Table', on_delete=models.CASCADE)
|
||||
order = models.PositiveIntegerField(help_text='Lowest first.')
|
||||
table = models.ForeignKey("database.Table", on_delete=models.CASCADE)
|
||||
order = models.PositiveIntegerField(help_text="Lowest first.")
|
||||
name = models.CharField(max_length=255)
|
||||
primary = models.BooleanField(
|
||||
default=False,
|
||||
help_text='Indicates if the field is a primary field. If `true` the field '
|
||||
'cannot be deleted and the value should represent the whole row.'
|
||||
help_text="Indicates if the field is a primary field. If `true` the field "
|
||||
"cannot be deleted and the value should represent the whole row.",
|
||||
)
|
||||
content_type = models.ForeignKey(
|
||||
ContentType,
|
||||
verbose_name='content type',
|
||||
related_name='database_fields',
|
||||
on_delete=models.SET(get_default_field_content_type)
|
||||
verbose_name="content type",
|
||||
related_name="database_fields",
|
||||
on_delete=models.SET(get_default_field_content_type),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ('-primary', 'order',)
|
||||
ordering = (
|
||||
"-primary",
|
||||
"order",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_last_order(cls, table):
|
||||
|
@ -92,7 +78,7 @@ class Field(CreatedAndUpdatedOnMixin, OrderableMixin, PolymorphicContentTypeMixi
|
|||
|
||||
@property
|
||||
def db_column(self):
|
||||
return f'field_{self.id}'
|
||||
return f"field_{self.id}"
|
||||
|
||||
@property
|
||||
def model_attribute_name(self):
|
||||
|
@ -107,7 +93,7 @@ class Field(CreatedAndUpdatedOnMixin, OrderableMixin, PolymorphicContentTypeMixi
|
|||
name = to_snake_case(name)
|
||||
|
||||
if name[0].isnumeric():
|
||||
name = f'field_{name}'
|
||||
name = f"field_{name}"
|
||||
|
||||
return name
|
||||
|
||||
|
@ -116,11 +102,15 @@ class SelectOption(models.Model):
|
|||
value = models.CharField(max_length=255, blank=True)
|
||||
color = models.CharField(max_length=255, blank=True)
|
||||
order = models.PositiveIntegerField()
|
||||
field = models.ForeignKey(Field, on_delete=models.CASCADE,
|
||||
related_name='select_options')
|
||||
field = models.ForeignKey(
|
||||
Field, on_delete=models.CASCADE, related_name="select_options"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ('order', 'id',)
|
||||
ordering = (
|
||||
"order",
|
||||
"id",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
@ -130,9 +120,9 @@ class TextField(Field):
|
|||
text_default = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='If set, this value is going to be added every time a new row '
|
||||
'created.'
|
||||
default="",
|
||||
help_text="If set, this value is going to be added every time a new row "
|
||||
"created.",
|
||||
)
|
||||
|
||||
|
||||
|
@ -146,30 +136,27 @@ class URLField(Field):
|
|||
|
||||
class NumberField(Field):
|
||||
number_type = models.CharField(
|
||||
max_length=32,
|
||||
choices=NUMBER_TYPE_CHOICES,
|
||||
default=NUMBER_TYPE_INTEGER
|
||||
max_length=32, choices=NUMBER_TYPE_CHOICES, default=NUMBER_TYPE_INTEGER
|
||||
)
|
||||
number_decimal_places = models.IntegerField(
|
||||
choices=NUMBER_DECIMAL_PLACES_CHOICES,
|
||||
default=1,
|
||||
help_text='The amount of digits allowed after the point.'
|
||||
help_text="The amount of digits allowed after the point.",
|
||||
)
|
||||
number_negative = models.BooleanField(
|
||||
default=False,
|
||||
help_text='Indicates if negative values are allowed.'
|
||||
default=False, help_text="Indicates if negative values are allowed."
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Check if the number_type and number_decimal_places has a valid choice."""
|
||||
|
||||
if not any(self.number_type in _tuple for _tuple in NUMBER_TYPE_CHOICES):
|
||||
raise ValueError(f'{self.number_type} is not a valid choice.')
|
||||
raise ValueError(f"{self.number_type} is not a valid choice.")
|
||||
if not any(
|
||||
self.number_decimal_places in _tuple
|
||||
for _tuple in NUMBER_DECIMAL_PLACES_CHOICES
|
||||
):
|
||||
raise ValueError(f'{self.number_decimal_places} is not a valid choice.')
|
||||
raise ValueError(f"{self.number_decimal_places} is not a valid choice.")
|
||||
super(NumberField, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
|
@ -182,17 +169,16 @@ class DateField(Field):
|
|||
choices=DATE_FORMAT_CHOICES,
|
||||
default=DATE_FORMAT_CHOICES[0][0],
|
||||
max_length=32,
|
||||
help_text='EU (20/02/2020), US (02/20/2020) or ISO (2020-02-20)'
|
||||
help_text="EU (20/02/2020), US (02/20/2020) or ISO (2020-02-20)",
|
||||
)
|
||||
date_include_time = models.BooleanField(
|
||||
default=False,
|
||||
help_text='Indicates if the field also includes a time.'
|
||||
default=False, help_text="Indicates if the field also includes a time."
|
||||
)
|
||||
date_time_format = models.CharField(
|
||||
choices=DATE_TIME_FORMAT_CHOICES,
|
||||
default=DATE_TIME_FORMAT_CHOICES[0][0],
|
||||
max_length=32,
|
||||
help_text='24 (14:30) or 12 (02:30 PM)'
|
||||
help_text="24 (14:30) or 12 (02:30 PM)",
|
||||
)
|
||||
|
||||
def get_python_format(self):
|
||||
|
@ -204,7 +190,7 @@ class DateField(Field):
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
return self._get_format('format')
|
||||
return self._get_format("format")
|
||||
|
||||
def get_psql_format(self):
|
||||
"""
|
||||
|
@ -215,7 +201,7 @@ class DateField(Field):
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
return self._get_format('sql')
|
||||
return self._get_format("sql")
|
||||
|
||||
def get_psql_type(self):
|
||||
"""
|
||||
|
@ -226,7 +212,7 @@ class DateField(Field):
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
return 'timestamp' if self.date_include_time else 'date'
|
||||
return "timestamp" if self.date_include_time else "date"
|
||||
|
||||
def get_psql_type_convert_function(self):
|
||||
"""
|
||||
|
@ -238,30 +224,30 @@ class DateField(Field):
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
return 'TO_TIMESTAMP' if self.date_include_time else 'TO_DATE'
|
||||
return "TO_TIMESTAMP" if self.date_include_time else "TO_DATE"
|
||||
|
||||
def _get_format(self, format_type):
|
||||
date_format = DATE_FORMAT[self.date_format][format_type]
|
||||
time_format = DATE_TIME_FORMAT[self.date_time_format][format_type]
|
||||
if self.date_include_time:
|
||||
return f'{date_format} {time_format}'
|
||||
return f"{date_format} {time_format}"
|
||||
else:
|
||||
return date_format
|
||||
|
||||
|
||||
class LinkRowField(Field):
|
||||
link_row_table = models.ForeignKey(
|
||||
'database.Table',
|
||||
"database.Table",
|
||||
on_delete=models.CASCADE,
|
||||
help_text='The table that the field has a relation with.',
|
||||
blank=True
|
||||
help_text="The table that the field has a relation with.",
|
||||
blank=True,
|
||||
)
|
||||
link_row_related_field = models.ForeignKey(
|
||||
'self',
|
||||
"self",
|
||||
on_delete=models.SET_NULL,
|
||||
help_text='The relation field in the other table.',
|
||||
help_text="The relation field in the other table.",
|
||||
null=True,
|
||||
blank=True
|
||||
blank=True,
|
||||
)
|
||||
link_row_relation_id = models.IntegerField(null=True, blank=True)
|
||||
|
||||
|
@ -286,15 +272,18 @@ class LinkRowField(Field):
|
|||
"""
|
||||
|
||||
if not self.link_row_relation_id:
|
||||
raise ValueError('The link row field does not yet have a relation id.')
|
||||
raise ValueError("The link row field does not yet have a relation id.")
|
||||
|
||||
return f'database_relation_{self.link_row_relation_id}'
|
||||
return f"database_relation_{self.link_row_relation_id}"
|
||||
|
||||
@staticmethod
|
||||
def get_new_relation_id():
|
||||
last_id = LinkRowField.objects.all().aggregate(
|
||||
largest=models.Max('link_row_relation_id')
|
||||
)['largest'] or 0
|
||||
last_id = (
|
||||
LinkRowField.objects.all().aggregate(
|
||||
largest=models.Max("link_row_relation_id")
|
||||
)["largest"]
|
||||
or 0
|
||||
)
|
||||
return last_id + 1
|
||||
|
||||
|
||||
|
|
|
@ -1,18 +1,30 @@
|
|||
from django.db.models import Q
|
||||
|
||||
from baserow.core.registry import (
|
||||
Instance, Registry, ModelInstanceMixin, ModelRegistryMixin,
|
||||
CustomFieldsInstanceMixin, CustomFieldsRegistryMixin, MapAPIExceptionsInstanceMixin,
|
||||
APIUrlsRegistryMixin, APIUrlsInstanceMixin, ImportExportMixin
|
||||
Instance,
|
||||
Registry,
|
||||
ModelInstanceMixin,
|
||||
ModelRegistryMixin,
|
||||
CustomFieldsInstanceMixin,
|
||||
CustomFieldsRegistryMixin,
|
||||
MapAPIExceptionsInstanceMixin,
|
||||
APIUrlsRegistryMixin,
|
||||
APIUrlsInstanceMixin,
|
||||
ImportExportMixin,
|
||||
)
|
||||
|
||||
from .exceptions import FieldTypeAlreadyRegistered, FieldTypeDoesNotExist
|
||||
from .models import SelectOption
|
||||
|
||||
|
||||
class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
||||
CustomFieldsInstanceMixin, ModelInstanceMixin, ImportExportMixin,
|
||||
Instance):
|
||||
class FieldType(
|
||||
MapAPIExceptionsInstanceMixin,
|
||||
APIUrlsInstanceMixin,
|
||||
CustomFieldsInstanceMixin,
|
||||
ModelInstanceMixin,
|
||||
ImportExportMixin,
|
||||
Instance,
|
||||
):
|
||||
"""
|
||||
This abstract class represents a custom field type that can be added to the
|
||||
field type registry. It must be extended so customisation can be done. Each field
|
||||
|
@ -126,7 +138,7 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
:rtype: serializer.Field
|
||||
"""
|
||||
|
||||
raise NotImplementedError('Each must have his own get_serializer_field method.')
|
||||
raise NotImplementedError("Each must have his own get_serializer_field method.")
|
||||
|
||||
def get_response_serializer_field(self, instance, **kwargs):
|
||||
"""
|
||||
|
@ -158,7 +170,7 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
return ''
|
||||
return ""
|
||||
|
||||
def get_model_field(self, instance, **kwargs):
|
||||
"""
|
||||
|
@ -174,7 +186,7 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
:rtype: model.Field
|
||||
"""
|
||||
|
||||
raise NotImplementedError('Each must have his own get_model_field method.')
|
||||
raise NotImplementedError("Each must have his own get_model_field method.")
|
||||
|
||||
def after_model_generation(self, instance, model, field_name, manytomany_models):
|
||||
"""
|
||||
|
@ -329,8 +341,16 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
:type user: User
|
||||
"""
|
||||
|
||||
def before_schema_change(self, from_field, to_field, from_model, to_model,
|
||||
from_model_field, to_model_field, user):
|
||||
def before_schema_change(
|
||||
self,
|
||||
from_field,
|
||||
to_field,
|
||||
from_model,
|
||||
to_model,
|
||||
from_model_field,
|
||||
to_model_field,
|
||||
user,
|
||||
):
|
||||
"""
|
||||
This hook is called just before the database's schema change. In some cases
|
||||
some additional cleanup or creation of related instances has to happen if the
|
||||
|
@ -354,8 +374,17 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
:type user: User
|
||||
"""
|
||||
|
||||
def after_update(self, from_field, to_field, from_model, to_model, user, connection,
|
||||
altered_column, before):
|
||||
def after_update(
|
||||
self,
|
||||
from_field,
|
||||
to_field,
|
||||
from_model,
|
||||
to_model,
|
||||
user,
|
||||
connection,
|
||||
altered_column,
|
||||
before,
|
||||
):
|
||||
"""
|
||||
This hook is called right after a field has been updated. In some cases data
|
||||
mutation still has to be done in order to maintain data integrity. For example
|
||||
|
@ -457,11 +486,11 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
"""
|
||||
|
||||
serialized = {
|
||||
'id': field.id,
|
||||
'type': self.type,
|
||||
'name': field.name,
|
||||
'order': field.order,
|
||||
'primary': field.primary
|
||||
"id": field.id,
|
||||
"type": self.type,
|
||||
"name": field.name,
|
||||
"order": field.order,
|
||||
"primary": field.primary,
|
||||
}
|
||||
|
||||
if include_allowed_fields:
|
||||
|
@ -469,12 +498,12 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
serialized[field_name] = getattr(field, field_name)
|
||||
|
||||
if self.can_have_select_options:
|
||||
serialized['select_options'] = [
|
||||
serialized["select_options"] = [
|
||||
{
|
||||
'id': select_option.id,
|
||||
'value': select_option.value,
|
||||
'color': select_option.color,
|
||||
'order': select_option.order,
|
||||
"id": select_option.id,
|
||||
"value": select_option.value,
|
||||
"color": select_option.color,
|
||||
"order": select_option.order,
|
||||
}
|
||||
for select_option in field.select_options.all()
|
||||
]
|
||||
|
@ -498,33 +527,32 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
:rtype: Field
|
||||
"""
|
||||
|
||||
if 'database_fields' not in id_mapping:
|
||||
id_mapping['database_fields'] = {}
|
||||
id_mapping['database_field_select_options'] = {}
|
||||
if "database_fields" not in id_mapping:
|
||||
id_mapping["database_fields"] = {}
|
||||
id_mapping["database_field_select_options"] = {}
|
||||
|
||||
serialized_copy = serialized_values.copy()
|
||||
field_id = serialized_copy.pop('id')
|
||||
serialized_copy.pop('type')
|
||||
field_id = serialized_copy.pop("id")
|
||||
serialized_copy.pop("type")
|
||||
select_options = (
|
||||
serialized_copy.pop('select_options')
|
||||
if self.can_have_select_options else
|
||||
[]
|
||||
serialized_copy.pop("select_options")
|
||||
if self.can_have_select_options
|
||||
else []
|
||||
)
|
||||
field = self.model_class.objects.create(table=table, **serialized_copy)
|
||||
|
||||
id_mapping['database_fields'][field_id] = field.id
|
||||
id_mapping["database_fields"][field_id] = field.id
|
||||
|
||||
if self.can_have_select_options:
|
||||
for select_option in select_options:
|
||||
select_option_copy = select_option.copy()
|
||||
select_option_id = select_option_copy.pop('id')
|
||||
select_option_id = select_option_copy.pop("id")
|
||||
select_option_object = SelectOption.objects.create(
|
||||
field=field,
|
||||
**select_option_copy
|
||||
)
|
||||
id_mapping['database_field_select_options'][select_option_id] = (
|
||||
select_option_object.id
|
||||
field=field, **select_option_copy
|
||||
)
|
||||
id_mapping["database_field_select_options"][
|
||||
select_option_id
|
||||
] = select_option_object.id
|
||||
|
||||
return field
|
||||
|
||||
|
@ -565,15 +593,16 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
|
|||
setattr(row, field_name, value)
|
||||
|
||||
|
||||
class FieldTypeRegistry(APIUrlsRegistryMixin, CustomFieldsRegistryMixin,
|
||||
ModelRegistryMixin, Registry):
|
||||
class FieldTypeRegistry(
|
||||
APIUrlsRegistryMixin, CustomFieldsRegistryMixin, ModelRegistryMixin, Registry
|
||||
):
|
||||
"""
|
||||
With the field type registry it is possible to register new field types. A field
|
||||
type is an abstraction made specifically for Baserow. If added to the registry a
|
||||
user can create new fields based on this type.
|
||||
"""
|
||||
|
||||
name = 'field'
|
||||
name = "field"
|
||||
does_not_exist_exception_class = FieldTypeDoesNotExist
|
||||
already_registered_exception_class = FieldTypeAlreadyRegistered
|
||||
|
||||
|
@ -639,11 +668,21 @@ class FieldConverter(Instance):
|
|||
:rtype: bool
|
||||
"""
|
||||
|
||||
raise NotImplementedError('Each field converter must have an is_applicable '
|
||||
'method.')
|
||||
raise NotImplementedError(
|
||||
"Each field converter must have an is_applicable " "method."
|
||||
)
|
||||
|
||||
def alter_field(self, from_field, to_field, from_model, to_model,
|
||||
from_model_field, to_model_field, user, connection):
|
||||
def alter_field(
|
||||
self,
|
||||
from_field,
|
||||
to_field,
|
||||
from_model,
|
||||
to_model,
|
||||
from_model_field,
|
||||
to_model_field,
|
||||
user,
|
||||
connection,
|
||||
):
|
||||
"""
|
||||
Should perform the schema change and changes related to the field change. It
|
||||
must bring the field's schema into the desired state.
|
||||
|
@ -668,8 +707,9 @@ class FieldConverter(Instance):
|
|||
:type connection: DatabaseWrapper
|
||||
"""
|
||||
|
||||
raise NotImplementedError('Each field converter must have an alter_field '
|
||||
'method.')
|
||||
raise NotImplementedError(
|
||||
"Each field converter must have an alter_field " "method."
|
||||
)
|
||||
|
||||
|
||||
class FieldConverterRegistry(Registry):
|
||||
|
@ -680,7 +720,7 @@ class FieldConverterRegistry(Registry):
|
|||
default lenient schema editor does not work.
|
||||
"""
|
||||
|
||||
name = 'field_converter'
|
||||
name = "field_converter"
|
||||
|
||||
def find_applicable_converter(self, *args, **kwargs):
|
||||
"""
|
||||
|
|
|
@ -12,17 +12,19 @@ from baserow.contrib.database.rows.handler import RowHandler
|
|||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Fills a table with random data.'
|
||||
help = "Fills a table with random data."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('table_id', type=int, help='The table that needs to be '
|
||||
'filled.')
|
||||
parser.add_argument('limit', type=int, help='Amount of rows that need to be '
|
||||
'inserted.')
|
||||
parser.add_argument(
|
||||
"table_id", type=int, help="The table that needs to be " "filled."
|
||||
)
|
||||
parser.add_argument(
|
||||
"limit", type=int, help="Amount of rows that need to be " "inserted."
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
table_id = options['table_id']
|
||||
limit = options['limit']
|
||||
table_id = options["table_id"]
|
||||
limit = options["limit"]
|
||||
fake = Faker()
|
||||
row_handler = RowHandler()
|
||||
cache = {}
|
||||
|
@ -30,25 +32,24 @@ class Command(BaseCommand):
|
|||
try:
|
||||
table = Table.objects.get(pk=table_id)
|
||||
except Table.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f"The table with id {table_id} was not "
|
||||
f"found."))
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"The table with id {table_id} was not " f"found.")
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
# Find out what the highest order is because we want to append the new rows.
|
||||
order = ceil(
|
||||
model.objects.aggregate(max=Max('order')).get('max') or Decimal('0')
|
||||
model.objects.aggregate(max=Max("order")).get("max") or Decimal("0")
|
||||
)
|
||||
|
||||
for i in range(0, limit):
|
||||
# Based on the random_value function we have for each type we can
|
||||
# build a dict with a random value for each field.
|
||||
values = {
|
||||
f'field_{field_id}': field_object['type'].random_value(
|
||||
field_object['field'],
|
||||
fake,
|
||||
cache
|
||||
f"field_{field_id}": field_object["type"].random_value(
|
||||
field_object["field"], fake, cache
|
||||
)
|
||||
for field_id, field_object in model._field_objects.items()
|
||||
}
|
||||
|
@ -56,8 +57,8 @@ class Command(BaseCommand):
|
|||
values, manytomany_values = row_handler.extract_manytomany_values(
|
||||
values, model
|
||||
)
|
||||
order += Decimal('1')
|
||||
values['order'] = order
|
||||
order += Decimal("1")
|
||||
values["order"] = order
|
||||
|
||||
# Insert the row with the randomly created values.
|
||||
instance = model.objects.create(**values)
|
||||
|
|
|
@ -3,18 +3,38 @@ from baserow.core.models import Application
|
|||
from .table.models import Table
|
||||
from .views.models import View, GridView, GridViewFieldOptions, ViewFilter
|
||||
from .fields.models import (
|
||||
Field, TextField, NumberField, LongTextField, BooleanField, DateField, LinkRowField,
|
||||
URLField, EmailField, PhoneNumberField
|
||||
Field,
|
||||
TextField,
|
||||
NumberField,
|
||||
LongTextField,
|
||||
BooleanField,
|
||||
DateField,
|
||||
LinkRowField,
|
||||
URLField,
|
||||
EmailField,
|
||||
PhoneNumberField,
|
||||
)
|
||||
from .tokens.models import Token, TokenPermission
|
||||
|
||||
__all__ = [
|
||||
'Database',
|
||||
'Table',
|
||||
'View', 'GridView', 'GridViewFieldOptions', 'ViewFilter',
|
||||
'Field', 'TextField', 'NumberField', 'LongTextField', 'BooleanField', 'DateField',
|
||||
'LinkRowField', 'URLField', 'EmailField', 'PhoneNumberField',
|
||||
'Token', 'TokenPermission'
|
||||
"Database",
|
||||
"Table",
|
||||
"View",
|
||||
"GridView",
|
||||
"GridViewFieldOptions",
|
||||
"ViewFilter",
|
||||
"Field",
|
||||
"TextField",
|
||||
"NumberField",
|
||||
"LongTextField",
|
||||
"BooleanField",
|
||||
"DateField",
|
||||
"LinkRowField",
|
||||
"URLField",
|
||||
"EmailField",
|
||||
"PhoneNumberField",
|
||||
"Token",
|
||||
"TokenPermission",
|
||||
]
|
||||
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue