mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-17 10:22:36 +00:00
Resolve "Trash (never delete anything)"
This commit is contained in:
parent
eecac39d35
commit
6e5ce3846a
119 changed files with 6266 additions and 655 deletions
backend
requirements
src/baserow
api
config/settings
contrib/database
api/rows
application_types.pyconfig.pyfields
management/commands
migrations
mixins.pyrows
table
tokens
trash
views
ws/fields
core
ws
tests
baserow
api
applications
groups
trash
contrib/database
api
export
field
rows
table
trash
view
ws
core
performance
ws
docs/getting-started
premium/backend
web-frontend/modules/core
assets/scss
components
all.scssinfinite_scroll.scssmodal.scssmodal_sidebar.scssnotifications.scsstrash.scsstrash_entry.scss
helpers.scsscomponents
module.jspages
plugins
|
@ -10,3 +10,4 @@ watchdog==1.0.2
|
|||
argh==0.26.2
|
||||
black==20.8b1
|
||||
django-capture-on-commit-callbacks==1.4.0
|
||||
pyinstrument==3.4.2
|
|
@ -71,7 +71,7 @@ class AllApplicationsView(APIView):
|
|||
|
||||
applications = Application.objects.select_related(
|
||||
"content_type", "group"
|
||||
).filter(group__users__in=[request.user])
|
||||
).filter(group__users__in=[request.user], group__trashed=False)
|
||||
|
||||
data = [
|
||||
get_application_serializer(application).data for application in applications
|
||||
|
|
0
backend/src/baserow/api/trash/__init__.py
Normal file
0
backend/src/baserow/api/trash/__init__.py
Normal file
26
backend/src/baserow/api/trash/errors.py
Normal file
26
backend/src/baserow/api/trash/errors.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
|
||||
|
||||
ERROR_TRASH_ITEM_DOES_NOT_EXIST = (
|
||||
"ERROR_TRASH_ITEM_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
"The requested trash item does not exist.",
|
||||
)
|
||||
|
||||
ERROR_CANNOT_RESTORE_PARENT_BEFORE_CHILD = (
|
||||
"ERROR_CANNOT_RESTORE_PARENT_BEFORE_CHILD",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"Cannot restore a trashed item if it's parent is also trashed, please restore the "
|
||||
"parent first.",
|
||||
)
|
||||
|
||||
ERROR_PARENT_ID_MUST_BE_PROVIDED = (
|
||||
"ERROR_PARENT_ID_MUST_BE_PROVIDED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"A parent id must be provided when using this trashable item type.",
|
||||
)
|
||||
|
||||
ERROR_PARENT_ID_MUST_NOT_BE_PROVIDED = (
|
||||
"ERROR_PARENT_ID_MUST_NOT_BE_PROVIDED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"A parent id must NOT be provided when using this trashable item type.",
|
||||
)
|
64
backend/src/baserow/api/trash/serializers.py
Normal file
64
backend/src/baserow/api/trash/serializers.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
from django.utils.functional import lazy
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers, fields
|
||||
|
||||
from baserow.api.mixins import UnknownFieldRaisesExceptionSerializerMixin
|
||||
from baserow.core.models import TrashEntry, Application
|
||||
from baserow.core.trash.registries import trash_item_type_registry
|
||||
|
||||
|
||||
class TrashEntryRequestSerializer(
|
||||
UnknownFieldRaisesExceptionSerializerMixin, serializers.Serializer
|
||||
):
|
||||
trash_item_id = serializers.IntegerField(min_value=0)
|
||||
parent_trash_item_id = serializers.IntegerField(
|
||||
min_value=0, required=False, allow_null=True
|
||||
)
|
||||
trash_item_type = fields.ChoiceField(
|
||||
choices=lazy(trash_item_type_registry.get_types, list)(),
|
||||
)
|
||||
|
||||
|
||||
class TrashStructureApplicationSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = ("id", "name", "trashed")
|
||||
|
||||
|
||||
class TrashStructureGroupSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField(min_value=0)
|
||||
trashed = serializers.BooleanField()
|
||||
name = serializers.CharField()
|
||||
applications = TrashStructureApplicationSerializer(many=True)
|
||||
|
||||
|
||||
class TrashStructureSerializer(serializers.Serializer):
|
||||
groups = TrashStructureGroupSerializer(many=True)
|
||||
|
||||
|
||||
class TrashContentsSerializer(serializers.ModelSerializer):
|
||||
user_who_trashed = serializers.SerializerMethodField()
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_user_who_trashed(self, instance):
|
||||
if instance.user_who_trashed is not None:
|
||||
return instance.user_who_trashed.first_name
|
||||
else:
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = TrashEntry
|
||||
fields = (
|
||||
"id",
|
||||
"user_who_trashed",
|
||||
"trash_item_type",
|
||||
"trash_item_id",
|
||||
"parent_trash_item_id",
|
||||
"trashed_at",
|
||||
"application",
|
||||
"group",
|
||||
"name",
|
||||
"parent_name",
|
||||
"extra_description",
|
||||
)
|
19
backend/src/baserow/api/trash/urls.py
Normal file
19
backend/src/baserow/api/trash/urls.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from django.conf.urls import url
|
||||
|
||||
from .views import TrashContentsView, TrashStructureView, TrashItemView
|
||||
|
||||
app_name = "baserow.api.trash"
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^$", TrashStructureView.as_view(), name="list"),
|
||||
url(
|
||||
r"^group/(?P<group_id>[0-9]+)/$",
|
||||
TrashContentsView.as_view(),
|
||||
name="contents",
|
||||
),
|
||||
url(
|
||||
r"^restore/$",
|
||||
TrashItemView.as_view(),
|
||||
name="restore",
|
||||
),
|
||||
]
|
227
backend/src/baserow/api/trash/views.py
Normal file
227
backend/src/baserow/api/trash/views.py
Normal file
|
@ -0,0 +1,227 @@
|
|||
from django.conf import settings
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema, OpenApiParameter
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from baserow.api.applications.errors import (
|
||||
ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
ERROR_APPLICATION_NOT_IN_GROUP,
|
||||
)
|
||||
from baserow.api.decorators import map_exceptions, validate_body
|
||||
from baserow.api.errors import (
|
||||
ERROR_GROUP_DOES_NOT_EXIST,
|
||||
ERROR_USER_NOT_IN_GROUP,
|
||||
)
|
||||
from baserow.api.pagination import PageNumberPagination
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.api.serializers import get_example_pagination_serializer_class
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroup,
|
||||
ApplicationNotInGroup,
|
||||
GroupDoesNotExist,
|
||||
ApplicationDoesNotExist,
|
||||
TrashItemDoesNotExist,
|
||||
)
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from .errors import (
|
||||
ERROR_CANNOT_RESTORE_PARENT_BEFORE_CHILD,
|
||||
ERROR_PARENT_ID_MUST_NOT_BE_PROVIDED,
|
||||
ERROR_PARENT_ID_MUST_BE_PROVIDED,
|
||||
ERROR_TRASH_ITEM_DOES_NOT_EXIST,
|
||||
)
|
||||
from .serializers import (
|
||||
TrashContentsSerializer,
|
||||
TrashStructureSerializer,
|
||||
TrashEntryRequestSerializer,
|
||||
)
|
||||
from ...core.trash.exceptions import (
|
||||
CannotRestoreChildBeforeParent,
|
||||
ParentIdMustNotBeProvidedException,
|
||||
ParentIdMustBeProvidedException,
|
||||
)
|
||||
|
||||
|
||||
class TrashItemView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Trash"],
|
||||
operation_id="restore",
|
||||
description="Restores the specified trashed item back into baserow.",
|
||||
request=TrashEntryRequestSerializer,
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_TRASH_ITEM_DOES_NOT_EXIST",
|
||||
"ERROR_CANNOT_RESTORE_PARENT_BEFORE_CHILD",
|
||||
"ERROR_PARENT_ID_MUST_NOT_BE_PROVIDED",
|
||||
"ERROR_PARENT_ID_MUST_BE_PROVIDED",
|
||||
]
|
||||
),
|
||||
},
|
||||
)
|
||||
@validate_body(TrashEntryRequestSerializer)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
TrashItemDoesNotExist: ERROR_TRASH_ITEM_DOES_NOT_EXIST,
|
||||
CannotRestoreChildBeforeParent: ERROR_CANNOT_RESTORE_PARENT_BEFORE_CHILD,
|
||||
ParentIdMustNotBeProvidedException: ERROR_PARENT_ID_MUST_NOT_BE_PROVIDED,
|
||||
ParentIdMustBeProvidedException: ERROR_PARENT_ID_MUST_BE_PROVIDED,
|
||||
}
|
||||
)
|
||||
def patch(self, request, data):
|
||||
"""
|
||||
Restores the specified trashable item if it is in the trash and the user is
|
||||
in the items group.
|
||||
"""
|
||||
|
||||
TrashHandler.restore_item(
|
||||
request.user,
|
||||
data["trash_item_type"],
|
||||
data["trash_item_id"],
|
||||
parent_trash_item_id=data.get("parent_trash_item_id", None),
|
||||
)
|
||||
return Response(status=204)
|
||||
|
||||
|
||||
class TrashContentsView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Returns the trash for the group with this id.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="application_id",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Optionally filters down the trash to only items for "
|
||||
"this application in the group.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="page",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Selects which page of trash contents should be returned.",
|
||||
),
|
||||
],
|
||||
tags=["Trash"],
|
||||
operation_id="get_contents",
|
||||
description="Responds with trash contents for a group optionally "
|
||||
"filtered to a specific application.",
|
||||
responses={
|
||||
200: get_example_pagination_serializer_class(TrashContentsSerializer),
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_APPLICATION_NOT_IN_GROUP",
|
||||
"ERROR_GROUP_DOES_NOT_EXIST",
|
||||
"ERROR_APPLICATION_DOES_NOT_EXIST",
|
||||
]
|
||||
),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ApplicationNotInGroup: ERROR_APPLICATION_NOT_IN_GROUP,
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def get(self, request, group_id):
|
||||
"""
|
||||
Responds with any trashed items in the group or application, including an
|
||||
entry for the group/app if they themselves are trashed.
|
||||
"""
|
||||
|
||||
application_id = request.GET.get("application_id", None)
|
||||
trash_contents = TrashHandler.get_trash_contents(
|
||||
request.user, group_id, application_id
|
||||
)
|
||||
paginator = PageNumberPagination(limit_page_size=settings.TRASH_PAGE_SIZE_LIMIT)
|
||||
page = paginator.paginate_queryset(trash_contents, request, self)
|
||||
serializer = TrashContentsSerializer(page, many=True)
|
||||
|
||||
return paginator.get_paginated_response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="group_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="The group whose trash contents to empty, including the "
|
||||
"group itself if it is also trashed.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="application_id",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Optionally filters down the trash to delete to only items "
|
||||
"for this application in the group.",
|
||||
),
|
||||
],
|
||||
tags=["Trash"],
|
||||
operation_id="empty_contents",
|
||||
description="Empties the specified group and/or application of trash, including"
|
||||
" the group and application themselves if they are trashed also.",
|
||||
responses={
|
||||
204: None,
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_APPLICATION_NOT_IN_GROUP",
|
||||
"ERROR_GROUP_DOES_NOT_EXIST",
|
||||
"ERROR_APPLICATION_DOES_NOT_EXIST",
|
||||
]
|
||||
),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
|
||||
ApplicationNotInGroup: ERROR_APPLICATION_NOT_IN_GROUP,
|
||||
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
|
||||
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def delete(self, request, group_id):
|
||||
"""
|
||||
Empties the group and/or application of trash permanently deleting any trashed
|
||||
contents, including the group and application if they are also trashed.
|
||||
"""
|
||||
|
||||
application_id = request.GET.get("application_id", None)
|
||||
TrashHandler.empty(request.user, group_id, application_id)
|
||||
return Response(status=204)
|
||||
|
||||
|
||||
class TrashStructureView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Trash"],
|
||||
operation_id="get_trash_structure",
|
||||
description="Responds with the groups and applications available for the "
|
||||
"requesting user to inspect the trash contents of.",
|
||||
responses={
|
||||
200: TrashStructureSerializer,
|
||||
},
|
||||
)
|
||||
def get(self, request):
|
||||
"""
|
||||
Responds with the structure of the trash for the user.
|
||||
"""
|
||||
|
||||
structure = TrashHandler.get_trash_structure(request.user)
|
||||
return Response(TrashStructureSerializer(structure).data)
|
|
@ -10,6 +10,7 @@ from .user_files import urls as user_files_urls
|
|||
from .groups import urls as group_urls
|
||||
from .templates import urls as templates_urls
|
||||
from .applications import urls as application_urls
|
||||
from .trash import urls as trash_urls
|
||||
|
||||
|
||||
app_name = "baserow.api"
|
||||
|
@ -28,6 +29,7 @@ urlpatterns = (
|
|||
path("groups/", include(group_urls, namespace="groups")),
|
||||
path("templates/", include(templates_urls, namespace="templates")),
|
||||
path("applications/", include(application_urls, namespace="applications")),
|
||||
path("trash/", include(trash_urls, namespace="trash")),
|
||||
]
|
||||
+ application_type_registry.api_urls
|
||||
+ plugin_registry.api_urls
|
||||
|
|
|
@ -82,6 +82,10 @@ CELERY_BROKER_URL = REDIS_URL
|
|||
CELERY_TASK_ROUTES = {
|
||||
"baserow.contrib.database.export.tasks.run_export_job": {"queue": "export"},
|
||||
"baserow.contrib.database.export.tasks.clean_up_old_jobs": {"queue": "export"},
|
||||
"baserow.core.trash.tasks.mark_old_trash_for_permanent_deletion": {
|
||||
"queue": "export"
|
||||
},
|
||||
"baserow.core.trash.tasks.permanently_delete_marked_trash": {"queue": "export"},
|
||||
}
|
||||
CELERY_SOFT_TIME_LIMIT = 60 * 5
|
||||
CELERY_TIME_LIMIT = CELERY_SOFT_TIME_LIMIT + 60
|
||||
|
@ -221,6 +225,7 @@ SPECTACULAR_SETTINGS = {
|
|||
{"name": "Groups"},
|
||||
{"name": "Group invitations"},
|
||||
{"name": "Templates"},
|
||||
{"name": "Trash"},
|
||||
{"name": "Applications"},
|
||||
{"name": "Database tables"},
|
||||
{"name": "Database table fields"},
|
||||
|
@ -260,7 +265,8 @@ if PRIVATE_BACKEND_HOSTNAME:
|
|||
|
||||
FROM_EMAIL = os.getenv("FROM_EMAIL", "no-reply@localhost")
|
||||
RESET_PASSWORD_TOKEN_MAX_AGE = 60 * 60 * 48 # 48 hours
|
||||
ROW_PAGE_SIZE_LIMIT = 200 # Indicates how many rows can be requested at once.
|
||||
ROW_PAGE_SIZE_LIMIT = 200 # How many rows can be requested at once.
|
||||
TRASH_PAGE_SIZE_LIMIT = 200 # How many trash entries can be requested at once.
|
||||
|
||||
# The amount of rows that can be imported when creating a table.
|
||||
INITIAL_TABLE_DATA_LIMIT = None
|
||||
|
@ -309,3 +315,10 @@ APPLICATION_TEMPLATES_DIR = os.path.join(BASE_DIR, "../../../templates")
|
|||
DEFAULT_APPLICATION_TEMPLATE = "project-management"
|
||||
|
||||
MAX_FIELD_LIMIT = 1500
|
||||
|
||||
# If you change this default please also update the default for the web-frontend found
|
||||
# in web-frontend/modules/core/module.js:55
|
||||
HOURS_UNTIL_TRASH_PERMANENTLY_DELETED = os.getenv(
|
||||
"HOURS_UNTIL_TRASH_PERMANENTLY_DELETED", 24 * 3
|
||||
)
|
||||
OLD_TRASH_CLEANUP_CHECK_INTERVAL_MINUTES = 5
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from rest_framework.status import HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
ERROR_ROW_DOES_NOT_EXIST = (
|
||||
"ERROR_ROW_DOES_NOT_EXIST",
|
||||
HTTP_404_NOT_FOUND,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import transaction
|
||||
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema
|
||||
|
@ -8,6 +9,7 @@ from rest_framework.views import APIView
|
|||
|
||||
from baserow.api.decorators import map_exceptions
|
||||
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
|
||||
from baserow.api.exceptions import RequestBodyValidationException
|
||||
from baserow.api.pagination import PageNumberPagination
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.api.user_files.errors import ERROR_USER_FILE_DOES_NOT_EXIST
|
||||
|
@ -315,7 +317,13 @@ class RowsView(APIView):
|
|||
else None
|
||||
)
|
||||
|
||||
row = RowHandler().create_row(request.user, table, data, model, before=before)
|
||||
try:
|
||||
row = RowHandler().create_row(
|
||||
request.user, table, data, model, before=before
|
||||
)
|
||||
except ValidationError as e:
|
||||
raise RequestBodyValidationException(detail=e.message)
|
||||
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
)
|
||||
|
@ -458,7 +466,10 @@ class RowView(APIView):
|
|||
validation_serializer = get_row_serializer_class(model, field_ids=field_ids)
|
||||
data = validate_data(validation_serializer, request.data)
|
||||
|
||||
row = RowHandler().update_row(request.user, table, row_id, data, model)
|
||||
try:
|
||||
row = RowHandler().update_row(request.user, table, row_id, data, model)
|
||||
except ValidationError as e:
|
||||
raise RequestBodyValidationException(detail=e.message)
|
||||
|
||||
serializer_class = get_row_serializer_class(
|
||||
model, RowSerializer, is_response=True
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
from django.core.management.color import no_style
|
||||
from django.urls import path, include
|
||||
from django.db import connections
|
||||
from django.conf import settings
|
||||
from django.core.management.color import no_style
|
||||
from django.db import connections
|
||||
from django.urls import path, include
|
||||
|
||||
from baserow.core.registries import ApplicationType
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
|
||||
from .models import Database, Table
|
||||
from .table.handler import TableHandler
|
||||
from baserow.core.registries import ApplicationType
|
||||
from .api.serializers import DatabaseSerializer
|
||||
from .models import Database, Table
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
class DatabaseApplicationType(ApplicationType):
|
||||
|
@ -23,11 +22,14 @@ class DatabaseApplicationType(ApplicationType):
|
|||
handler.
|
||||
"""
|
||||
|
||||
database_tables = database.table_set.all().select_related("database__group")
|
||||
table_handler = TableHandler()
|
||||
database_tables = (
|
||||
database.table_set(manager="objects_and_trash")
|
||||
.all()
|
||||
.select_related("database__group")
|
||||
)
|
||||
|
||||
for table in database_tables:
|
||||
table_handler._delete_table(table)
|
||||
TrashHandler.permanently_delete(table)
|
||||
|
||||
def get_api_urls(self):
|
||||
from .api import urls as api_urls
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
from baserow.core.registries import plugin_registry, application_type_registry
|
||||
from baserow.core.registries import (
|
||||
plugin_registry,
|
||||
application_type_registry,
|
||||
)
|
||||
from baserow.core.trash.registries import trash_item_type_registry
|
||||
from baserow.ws.registries import page_registry
|
||||
|
||||
|
||||
|
@ -141,6 +145,16 @@ class DatabaseConfig(AppConfig):
|
|||
|
||||
table_exporter_registry.register(CsvTableExporter())
|
||||
|
||||
from .trash.trash_types import (
|
||||
TableTrashableItemType,
|
||||
RowTrashableItemType,
|
||||
FieldTrashableItemType,
|
||||
)
|
||||
|
||||
trash_item_type_registry.register(TableTrashableItemType())
|
||||
trash_item_type_registry.register(FieldTrashableItemType())
|
||||
trash_item_type_registry.register(RowTrashableItemType())
|
||||
|
||||
# The signals must always be imported last because they use the registries
|
||||
# which need to be filled first.
|
||||
import baserow.contrib.database.ws.signals # noqa: F403, F401
|
||||
|
|
|
@ -2,6 +2,7 @@ from collections import defaultdict
|
|||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from random import randrange, randint
|
||||
from typing import Any, Callable, Dict, List
|
||||
|
||||
from dateutil import parser
|
||||
from dateutil.parser import ParserError
|
||||
|
@ -497,8 +498,47 @@ class LinkRowFieldType(FieldType):
|
|||
)
|
||||
|
||||
def get_export_value(self, value, field_object):
|
||||
instance = field_object["field"]
|
||||
def map_to_export_value(inner_value, inner_field_object):
|
||||
return inner_field_object["type"].get_export_value(
|
||||
inner_value, inner_field_object
|
||||
)
|
||||
|
||||
return self._get_and_map_pk_values(field_object, value, map_to_export_value)
|
||||
|
||||
def get_human_readable_value(self, value, field_object):
|
||||
def map_to_human_readable_value(inner_value, inner_field_object):
|
||||
return inner_field_object["type"].get_human_readable_value(
|
||||
inner_value, inner_field_object
|
||||
)
|
||||
|
||||
return ",".join(
|
||||
self._get_and_map_pk_values(
|
||||
field_object, value, map_to_human_readable_value
|
||||
)
|
||||
)
|
||||
|
||||
def _get_and_map_pk_values(
|
||||
self, field_object, value, map_func: Callable[[Any, Dict[str, Any]], Any]
|
||||
):
|
||||
"""
|
||||
Helper function which given a linked row field pointing at another model,
|
||||
constructs a list of the related row's primary key values which are mapped by
|
||||
the provided map_func function.
|
||||
|
||||
For example, Table A has Field 1 which links to Table B. Table B has a text
|
||||
primary key column. This function takes the value for a single row of of
|
||||
Field 1, which is a number of related rows in Table B. It then gets
|
||||
the primary key column values for those related rows in Table B and applies
|
||||
map_func to each individual value. Finally it returns those mapped values as a
|
||||
list.
|
||||
|
||||
:param value: The value of the link field in a specific row.
|
||||
:param field_object: The field object for the link field.
|
||||
:param map_func: A function to apply to each linked primary key value.
|
||||
:return: A list of mapped linked primary key values.
|
||||
"""
|
||||
|
||||
instance = field_object["field"]
|
||||
if hasattr(instance, "_related_model"):
|
||||
related_model = instance._related_model
|
||||
primary_field = next(
|
||||
|
@ -508,28 +548,27 @@ class LinkRowFieldType(FieldType):
|
|||
)
|
||||
if primary_field:
|
||||
primary_field_name = primary_field["name"]
|
||||
primary_field_type = primary_field["type"]
|
||||
primary_field_values = []
|
||||
for sub in value.all():
|
||||
# Ensure we also convert the value from the other table to it's
|
||||
# export form as it could be an odd field type!
|
||||
# appropriate form as it could be an odd field type!
|
||||
linked_value = getattr(sub, primary_field_name)
|
||||
if self._is_unnamed_primary_field_value(linked_value):
|
||||
export_linked_value = f"unnamed row {sub.id}"
|
||||
linked_pk_value = f"unnamed row {sub.id}"
|
||||
else:
|
||||
export_linked_value = primary_field_type.get_export_value(
|
||||
linked_pk_value = map_func(
|
||||
getattr(sub, primary_field_name), primary_field
|
||||
)
|
||||
primary_field_values.append(export_linked_value)
|
||||
primary_field_values.append(linked_pk_value)
|
||||
return primary_field_values
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def _is_unnamed_primary_field_value(primary_field_value):
|
||||
"""
|
||||
Checks if the value for a linked primary field is considered "empty".
|
||||
Checks if the value for a linked primary field is considered "unnamed".
|
||||
:param primary_field_value: The value of a primary field row in a linked table.
|
||||
:return: If this value is considered an empty primary field value.
|
||||
:return: If this value is considered an unnamed primary field value.
|
||||
"""
|
||||
|
||||
if isinstance(primary_field_value, list):
|
||||
|
@ -780,7 +819,7 @@ class LinkRowFieldType(FieldType):
|
|||
)
|
||||
to_field.save()
|
||||
|
||||
def after_delete(self, field, model, user, connection):
|
||||
def after_delete(self, field, model, connection):
|
||||
"""
|
||||
After the field has been deleted we also need to delete the related field.
|
||||
"""
|
||||
|
@ -883,6 +922,9 @@ class LinkRowFieldType(FieldType):
|
|||
):
|
||||
getattr(row, field_name).set(value)
|
||||
|
||||
def get_related_items_to_trash(self, field) -> List[Any]:
|
||||
return [field.link_row_related_field]
|
||||
|
||||
|
||||
class EmailFieldType(FieldType):
|
||||
type = "email"
|
||||
|
@ -1004,6 +1046,15 @@ class FileFieldType(FieldType):
|
|||
|
||||
return files
|
||||
|
||||
def get_human_readable_value(self, value, field_object):
|
||||
file_names = []
|
||||
for file in value:
|
||||
file_names.append(
|
||||
file["visible_name"],
|
||||
)
|
||||
|
||||
return ",".join(file_names)
|
||||
|
||||
def get_response_serializer_field(self, instance, **kwargs):
|
||||
return FileFieldResponseSerializer(many=True, required=False, **kwargs)
|
||||
|
||||
|
@ -1015,7 +1066,7 @@ class FileFieldType(FieldType):
|
|||
)
|
||||
|
||||
def get_model_field(self, instance, **kwargs):
|
||||
return JSONField(default=[], **kwargs)
|
||||
return JSONField(default=list, **kwargs)
|
||||
|
||||
def random_value(self, instance, fake, cache):
|
||||
"""
|
||||
|
|
|
@ -7,6 +7,7 @@ from django.db.utils import ProgrammingError, DataError
|
|||
|
||||
from baserow.contrib.database.db.schema import lenient_schema_editor
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from .exceptions import (
|
||||
PrimaryFieldAlreadyExists,
|
||||
|
@ -56,6 +57,9 @@ class FieldHandler:
|
|||
except Field.DoesNotExist:
|
||||
raise FieldDoesNotExist(f"The field with id {field_id} does not exist.")
|
||||
|
||||
if TrashHandler.item_has_a_trashed_parent(field.table, check_item_also=True):
|
||||
raise FieldDoesNotExist(f"The field with id {field_id} does not exist.")
|
||||
|
||||
return field
|
||||
|
||||
def create_field(
|
||||
|
@ -326,22 +330,8 @@ class FieldHandler:
|
|||
)
|
||||
|
||||
field = field.specific
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
|
||||
# Remove the field from the table schema.
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
with connection.schema_editor() as schema_editor:
|
||||
from_model = field.table.get_model(field_ids=[], fields=[field])
|
||||
model_field = from_model._meta.get_field(field.db_column)
|
||||
schema_editor.remove_field(from_model, model_field)
|
||||
|
||||
TrashHandler.trash(user, group, field.table.database, field)
|
||||
field_id = field.id
|
||||
field.delete()
|
||||
|
||||
# After the field is deleted we are going to to call the after_delete method of
|
||||
# the field type because some instance cleanup might need to happen.
|
||||
field_type.after_delete(field, from_model, user, connection)
|
||||
|
||||
field_deleted.send(self, field_id=field_id, field=field, user=user)
|
||||
|
||||
def update_field_select_options(self, user, field, select_options):
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
|
||||
from baserow.core.utils import to_snake_case, remove_special_characters
|
||||
from baserow.contrib.database.mixins import ParentFieldTrashableModelMixin
|
||||
from baserow.core.mixins import (
|
||||
OrderableMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
TrashableModelMixin,
|
||||
)
|
||||
from baserow.core.utils import to_snake_case, remove_special_characters
|
||||
|
||||
NUMBER_TYPE_INTEGER = "INTEGER"
|
||||
NUMBER_TYPE_DECIMAL = "DECIMAL"
|
||||
|
@ -42,7 +44,11 @@ def get_default_field_content_type():
|
|||
|
||||
|
||||
class Field(
|
||||
CreatedAndUpdatedOnMixin, OrderableMixin, PolymorphicContentTypeMixin, models.Model
|
||||
TrashableModelMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
OrderableMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
models.Model,
|
||||
):
|
||||
"""
|
||||
Because each field type can have custom settings, for example precision for a number
|
||||
|
@ -98,7 +104,7 @@ class Field(
|
|||
return name
|
||||
|
||||
|
||||
class SelectOption(models.Model):
|
||||
class SelectOption(ParentFieldTrashableModelMixin, models.Model):
|
||||
value = models.CharField(max_length=255, blank=True)
|
||||
color = models.CharField(max_length=255, blank=True)
|
||||
order = models.PositiveIntegerField()
|
||||
|
@ -279,7 +285,7 @@ class LinkRowField(Field):
|
|||
@staticmethod
|
||||
def get_new_relation_id():
|
||||
last_id = (
|
||||
LinkRowField.objects.all().aggregate(
|
||||
LinkRowField.objects_and_trash.all().aggregate(
|
||||
largest=models.Max("link_row_relation_id")
|
||||
)["largest"]
|
||||
or 0
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Any, List
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
from baserow.core.registry import (
|
||||
|
@ -411,7 +413,7 @@ class FieldType(
|
|||
:type before: any
|
||||
"""
|
||||
|
||||
def after_delete(self, field, model, user, connection):
|
||||
def after_delete(self, field, model, connection):
|
||||
"""
|
||||
This hook is called right after the field has been deleted and the schema
|
||||
change has been done.
|
||||
|
@ -420,8 +422,6 @@ class FieldType(
|
|||
:type field: Field
|
||||
:param model: The Django model that contains the deleted field.
|
||||
:type model: Model
|
||||
:param user: The user on whose behalf the delete is done.
|
||||
:type user: User
|
||||
:param connection: The connection used to make the database schema change.
|
||||
:type connection: DatabaseWrapper
|
||||
"""
|
||||
|
@ -616,6 +616,43 @@ class FieldType(
|
|||
|
||||
return value
|
||||
|
||||
def get_human_readable_value(self, value: Any, field_object) -> str:
|
||||
"""
|
||||
Should convert the value of the provided field to a human readable string for
|
||||
display purposes.
|
||||
|
||||
:param value: The value of the field extracted from a row to convert to human
|
||||
readable form.
|
||||
:param field_object: The field object for the field to extract
|
||||
:type field_object: FieldObject
|
||||
:return A human readable string.
|
||||
"""
|
||||
|
||||
human_readable_value = self.get_export_value(value, field_object)
|
||||
if human_readable_value is None:
|
||||
return ""
|
||||
else:
|
||||
return str(human_readable_value)
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_related_items_to_trash(self, field) -> List[Any]:
|
||||
"""
|
||||
When a field of this type is trashed/restored, or the table it is in
|
||||
trashed/restored, this method should return any other trashable items that
|
||||
should be trashed or restored in tandem.
|
||||
|
||||
For example, a link field has an opposing link field in the other table that
|
||||
should also be trashed when it is trashed. And so for link fields this method
|
||||
is overridden to return the related field so it is trashed/restored correctly.
|
||||
|
||||
:param field: The specific instance of the field that is being trashed or whose
|
||||
table is being trashed.
|
||||
:return: A list of related trashable items that should be trashed or restored
|
||||
in tandem with this field or it's table.
|
||||
"""
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class FieldTypeRegistry(
|
||||
APIUrlsRegistryMixin, CustomFieldsRegistryMixin, ModelRegistryMixin, Registry
|
||||
|
|
|
@ -2,5 +2,6 @@ from django.dispatch import Signal
|
|||
|
||||
|
||||
field_created = Signal()
|
||||
field_restored = Signal()
|
||||
field_updated = Signal()
|
||||
field_deleted = Signal()
|
||||
|
|
|
@ -34,9 +34,7 @@ class Command(BaseCommand):
|
|||
def handle(self, *args, **options):
|
||||
table_id = options["table_id"]
|
||||
limit = options["limit"]
|
||||
fake = Faker()
|
||||
row_handler = RowHandler()
|
||||
cache = {}
|
||||
add_columns = "add_columns" in options and options["add_columns"]
|
||||
|
||||
try:
|
||||
table = Table.objects.get(pk=table_id)
|
||||
|
@ -46,52 +44,52 @@ class Command(BaseCommand):
|
|||
)
|
||||
sys.exit(1)
|
||||
|
||||
if "add_columns" in options and options["add_columns"]:
|
||||
self.create_a_column_for_every_type(table)
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
# Find out what the highest order is because we want to append the new rows.
|
||||
order = ceil(
|
||||
model.objects.aggregate(max=Max("order")).get("max") or Decimal("0")
|
||||
)
|
||||
|
||||
for i in range(0, limit):
|
||||
# Based on the random_value function we have for each type we can
|
||||
# build a dict with a random value for each field.
|
||||
values = {
|
||||
f"field_{field_id}": field_object["type"].random_value(
|
||||
field_object["field"], fake, cache
|
||||
)
|
||||
for field_id, field_object in model._field_objects.items()
|
||||
}
|
||||
|
||||
values, manytomany_values = row_handler.extract_manytomany_values(
|
||||
values, model
|
||||
)
|
||||
order += Decimal("1")
|
||||
values["order"] = order
|
||||
|
||||
# Insert the row with the randomly created values.
|
||||
instance = model.objects.create(**values)
|
||||
|
||||
# Changes the set of the manytomany values.
|
||||
for field_name, value in manytomany_values.items():
|
||||
if value and len(value) > 0:
|
||||
getattr(instance, field_name).set(value)
|
||||
fill_table(limit, table, add_columns=add_columns)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"{limit} rows have been inserted."))
|
||||
|
||||
@staticmethod
|
||||
def create_a_column_for_every_type(table):
|
||||
field_handler = FieldHandler()
|
||||
all_kwargs_per_type = construct_all_possible_field_kwargs(None, None, None)
|
||||
for field_type_name, all_possible_kwargs in all_kwargs_per_type.items():
|
||||
if field_type_name == "link_row":
|
||||
continue
|
||||
i = 0
|
||||
for kwargs in all_possible_kwargs:
|
||||
i = i + 1
|
||||
field_handler.create_field(
|
||||
table.database.group.users.first(), table, field_type_name, **kwargs
|
||||
)
|
||||
|
||||
def fill_table(limit, table, add_columns=False):
|
||||
fake = Faker()
|
||||
row_handler = RowHandler()
|
||||
cache = {}
|
||||
if add_columns:
|
||||
create_a_column_for_every_type(table)
|
||||
model = table.get_model()
|
||||
# Find out what the highest order is because we want to append the new rows.
|
||||
order = ceil(model.objects.aggregate(max=Max("order")).get("max") or Decimal("0"))
|
||||
for i in range(0, limit):
|
||||
# Based on the random_value function we have for each type we can
|
||||
# build a dict with a random value for each field.
|
||||
values = {
|
||||
f"field_{field_id}": field_object["type"].random_value(
|
||||
field_object["field"], fake, cache
|
||||
)
|
||||
for field_id, field_object in model._field_objects.items()
|
||||
}
|
||||
|
||||
values, manytomany_values = row_handler.extract_manytomany_values(values, model)
|
||||
order += Decimal("1")
|
||||
values["order"] = order
|
||||
|
||||
# Insert the row with the randomly created values.
|
||||
instance = model.objects.create(**values)
|
||||
|
||||
# Changes the set of the manytomany values.
|
||||
for field_name, value in manytomany_values.items():
|
||||
if value and len(value) > 0:
|
||||
getattr(instance, field_name).set(value)
|
||||
|
||||
|
||||
def create_a_column_for_every_type(table):
|
||||
field_handler = FieldHandler()
|
||||
all_kwargs_per_type = construct_all_possible_field_kwargs(None, None, None)
|
||||
for field_type_name, all_possible_kwargs in all_kwargs_per_type.items():
|
||||
if field_type_name == "link_row":
|
||||
continue
|
||||
i = 0
|
||||
for kwargs in all_possible_kwargs:
|
||||
i = i + 1
|
||||
field_handler.create_field(
|
||||
table.database.group.users.first(), table, field_type_name, **kwargs
|
||||
)
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
# Generated by Django 2.2.11 on 2021-06-14 09:08
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models, connections
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
# noinspection PyPep8Naming
|
||||
Table = apps.get_model("database", "Table")
|
||||
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
with connection.schema_editor() as tables_schema_editor:
|
||||
# We need to stop the transaction because we might need to lock a lot of tables
|
||||
# which could result in an out of memory exception.
|
||||
tables_schema_editor.atomic.__exit__(None, None, None)
|
||||
|
||||
for table in Table.objects.all():
|
||||
table_name = f"database_table_{table.id}"
|
||||
# Make the forward migration more idempotent / resilient to partially
|
||||
# applied migrations due to the lack of a transaction by using IF NOT
|
||||
# EXISTS.
|
||||
tables_schema_editor.execute(
|
||||
f"ALTER TABLE {table_name} "
|
||||
f"ADD COLUMN IF NOT EXISTS trashed boolean not null default false"
|
||||
)
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
# noinspection PyPep8Naming
|
||||
Table = apps.get_model("database", "Table")
|
||||
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
with connection.schema_editor() as tables_schema_editor:
|
||||
# We need to stop the transaction because we might need to lock a lot of tables
|
||||
# which could result in an out of memory exception.
|
||||
tables_schema_editor.atomic.__exit__(None, None, None)
|
||||
|
||||
# apps.get_model doesn't return a model using our custom overridden managers
|
||||
# so we can safely use .objects which will return all trashed tables also
|
||||
for table in Table.objects.all():
|
||||
table_name = f"database_table_{table.id}"
|
||||
# Make the reverse migration more idempotent / resilient to partially
|
||||
# applied migrations due to the lack of a transaction by using IF EXISTS.
|
||||
tables_schema_editor.execute(
|
||||
f"ALTER TABLE {table_name} DROP COLUMN IF EXISTS trashed"
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("database", "0031_fix_url_field_max_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="field",
|
||||
name="trashed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="table",
|
||||
name="trashed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.RunPython(forward, reverse),
|
||||
]
|
4
backend/src/baserow/contrib/database/mixins.py
Normal file
4
backend/src/baserow/contrib/database/mixins.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from baserow.core.mixins import make_trashable_mixin
|
||||
|
||||
ParentFieldTrashableModelMixin = make_trashable_mixin("field")
|
||||
ParentTableTrashableModelMixin = make_trashable_mixin("table")
|
|
@ -8,6 +8,7 @@ from django.db.models.fields.related import ManyToManyField
|
|||
from django.conf import settings
|
||||
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
from .exceptions import RowDoesNotExist
|
||||
from .signals import (
|
||||
|
@ -126,7 +127,7 @@ class RowHandler:
|
|||
|
||||
:param values: The values where to extract the manytomany values from.
|
||||
:type values: dict
|
||||
:param model: The model containing the fields. They key, which is also the
|
||||
:param model: The model containing the fields. The key, which is also the
|
||||
field name, is used to check in the model if the value is a ManyToMany
|
||||
value.
|
||||
:type model: Model
|
||||
|
@ -405,7 +406,8 @@ class RowHandler:
|
|||
)
|
||||
|
||||
row_id = row.id
|
||||
row.delete()
|
||||
|
||||
TrashHandler.trash(user, group, table.database, row, parent_id=table.id)
|
||||
|
||||
row_deleted.send(
|
||||
self,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from django.db import connections
|
||||
from django.conf import settings
|
||||
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from baserow.contrib.database.fields.models import TextField
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
|
@ -45,6 +46,9 @@ class TableHandler:
|
|||
except Table.DoesNotExist:
|
||||
raise TableDoesNotExist(f"The table with id {table_id} does not exist.")
|
||||
|
||||
if TrashHandler.item_has_a_trashed_parent(table):
|
||||
raise TableDoesNotExist(f"The table with id {table_id} does not exist.")
|
||||
|
||||
return table
|
||||
|
||||
def create_table(
|
||||
|
@ -304,16 +308,6 @@ class TableHandler:
|
|||
table.database.group.has_user(user, raise_error=True)
|
||||
table_id = table.id
|
||||
|
||||
self._delete_table(table)
|
||||
TrashHandler.trash(user, table.database.group, table.database, table)
|
||||
|
||||
table_deleted.send(self, table_id=table_id, table=table, user=user)
|
||||
|
||||
def _delete_table(self, table):
|
||||
"""Deletes the table schema and instance."""
|
||||
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
with connection.schema_editor() as schema_editor:
|
||||
model = table.get_model()
|
||||
schema_editor.delete_model(model)
|
||||
|
||||
table.delete()
|
||||
|
|
|
@ -17,7 +17,11 @@ from baserow.contrib.database.fields.field_filters import (
|
|||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.views.exceptions import ViewFilterTypeNotAllowedForField
|
||||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
from baserow.core.mixins import OrderableMixin, CreatedAndUpdatedOnMixin
|
||||
from baserow.core.mixins import (
|
||||
OrderableMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
TrashableModelMixin,
|
||||
)
|
||||
|
||||
deconstruct_filter_key_regex = re.compile(r"filter__field_([0-9]+)__([a-zA-Z0-9_]*)$")
|
||||
|
||||
|
@ -184,13 +188,24 @@ class TableModelQuerySet(models.QuerySet):
|
|||
|
||||
class TableModelManager(models.Manager):
|
||||
def get_queryset(self):
|
||||
return TableModelQuerySet(self.model, using=self._db)
|
||||
return TableModelQuerySet(self.model, using=self._db).filter(trashed=False)
|
||||
|
||||
|
||||
FieldObject = Dict[str, Any]
|
||||
|
||||
|
||||
class Table(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
||||
class GeneratedTableModel:
|
||||
"""
|
||||
This class is purely used to mark Model classes which have been generated by Baserow
|
||||
for identification using instance(possible_baserow_model, GeneratedTableModel).
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Table(
|
||||
TrashableModelMixin, CreatedAndUpdatedOnMixin, OrderableMixin, models.Model
|
||||
):
|
||||
database = models.ForeignKey("database.Database", on_delete=models.CASCADE)
|
||||
order = models.PositiveIntegerField()
|
||||
name = models.CharField(max_length=255)
|
||||
|
@ -203,6 +218,9 @@ class Table(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
|||
queryset = Table.objects.filter(database=database)
|
||||
return cls.get_highest_order_of_queryset(queryset) + 1
|
||||
|
||||
def get_database_table_name(self):
|
||||
return f"database_table_{self.id}"
|
||||
|
||||
def get_model(
|
||||
self, fields=None, field_ids=None, attribute_names=False, manytomany_models=None
|
||||
):
|
||||
|
@ -241,7 +259,7 @@ class Table(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
|||
(),
|
||||
{
|
||||
"managed": False,
|
||||
"db_table": f"database_table_{self.id}",
|
||||
"db_table": self.get_database_table_name(),
|
||||
"app_label": app_label,
|
||||
"ordering": ["order", "id"],
|
||||
},
|
||||
|
@ -269,8 +287,13 @@ class Table(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
|||
),
|
||||
}
|
||||
|
||||
# Construct a query to fetch all the fields of that table.
|
||||
fields_query = self.field_set.all()
|
||||
# Construct a query to fetch all the fields of that table. We need to include
|
||||
# any trashed fields so the created model still has them present as the column
|
||||
# is still actually there. If the model did not have the trashed field
|
||||
# attributes then model.objects.create will fail as the trashed columns will
|
||||
# be given null values by django triggering not null constraints in the
|
||||
# database.
|
||||
fields_query = self.field_set(manager="objects_and_trash").all()
|
||||
|
||||
# If the field ids are provided we must only fetch the fields of which the ids
|
||||
# are in that list.
|
||||
|
@ -291,13 +314,17 @@ class Table(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
|||
# We will have to add each field to with the correct field name and model field
|
||||
# to the attribute list in order for the model to work.
|
||||
for field in fields:
|
||||
trashed = field.trashed
|
||||
field = field.specific
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
field_name = field.db_column
|
||||
# If attribute_names is True we will not use 'field_{id}' as attribute name,
|
||||
# but we will rather use a name the user provided.
|
||||
|
||||
if attribute_names:
|
||||
field_name = field.model_attribute_name
|
||||
if trashed:
|
||||
field_name = f"trashed_{field_name}"
|
||||
# If the field name already exists we will append '_field_{id}' to each
|
||||
# entry that is a duplicate.
|
||||
if field_name in attrs:
|
||||
|
@ -307,13 +334,15 @@ class Table(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
|||
if field_name in duplicate_field_names:
|
||||
field_name = f"{field_name}_{field.db_column}"
|
||||
|
||||
# Add the generated objects and information to the dict that optionally can
|
||||
# be returned.
|
||||
attrs["_field_objects"][field.id] = {
|
||||
"field": field,
|
||||
"type": field_type,
|
||||
"name": field_name,
|
||||
}
|
||||
if not trashed:
|
||||
# Add the generated objects and information to the dict that
|
||||
# optionally can be returned. We exclude trashed fields here so they
|
||||
# are not displayed by baserow anywhere.
|
||||
attrs["_field_objects"][field.id] = {
|
||||
"field": field,
|
||||
"type": field_type,
|
||||
"name": field_name,
|
||||
}
|
||||
|
||||
# Add the field to the attribute dict that is used to generate the model.
|
||||
# All the kwargs that are passed to the `get_model_field` method are going
|
||||
|
@ -326,6 +355,8 @@ class Table(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
|||
model = type(
|
||||
str(f"Table{self.pk}Model"),
|
||||
(
|
||||
GeneratedTableModel,
|
||||
TrashableModelMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
models.Model,
|
||||
),
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
from django.db import models
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
from baserow.core.mixins import ParentGroupTrashableModelMixin
|
||||
from baserow.core.models import Group
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class Token(models.Model):
|
||||
class Token(ParentGroupTrashableModelMixin, models.Model):
|
||||
"""
|
||||
A token can be used to authenticate a user with the row create, read, update and
|
||||
delete endpoints.
|
||||
|
|
220
backend/src/baserow/contrib/database/trash/trash_types.py
Normal file
220
backend/src/baserow/contrib/database/trash/trash_types.py
Normal file
|
@ -0,0 +1,220 @@
|
|||
from typing import Optional, Any, List
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connections
|
||||
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.fields.signals import field_restored
|
||||
from baserow.contrib.database.rows.signals import row_created
|
||||
from baserow.contrib.database.table.models import Table, GeneratedTableModel
|
||||
from baserow.contrib.database.table.signals import table_created
|
||||
from baserow.core.exceptions import TrashItemDoesNotExist
|
||||
from baserow.core.models import Application, TrashEntry
|
||||
from baserow.core.trash.registries import TrashableItemType
|
||||
|
||||
|
||||
class TableTrashableItemType(TrashableItemType):
|
||||
|
||||
type = "table"
|
||||
model_class = Table
|
||||
|
||||
def get_parent(self, trashed_item: Any, parent_id: int) -> Optional[Any]:
|
||||
return trashed_item.database
|
||||
|
||||
def get_name(self, trashed_item: Table) -> str:
|
||||
return trashed_item.name
|
||||
|
||||
def trashed_item_restored(self, trashed_item: Table, trash_entry: TrashEntry):
|
||||
table_created.send(
|
||||
self,
|
||||
table=trashed_item,
|
||||
user=None,
|
||||
)
|
||||
|
||||
def permanently_delete_item(self, trashed_item: Table):
|
||||
"""Deletes the table schema and instance."""
|
||||
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
# We are using a different connection, and hence any outer transactions
|
||||
# calling this method using a different connection will not roll back
|
||||
# changes made to the USER_TABLE_DATABASE. Hence it is possible that the
|
||||
# table has already been deleted if this code previously ran, but then the
|
||||
# wrapping transaction failed causing the trashed_item to be restored.
|
||||
# So we check to see if the table still exists before attempting to
|
||||
# delete it to avoid a ProgrammingError being thrown by the schema_editor.
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = %s
|
||||
);""",
|
||||
[trashed_item.get_database_table_name()],
|
||||
)
|
||||
table_exists = cursor.fetchone()[0]
|
||||
if table_exists:
|
||||
with connection.schema_editor() as schema_editor:
|
||||
model = trashed_item.get_model()
|
||||
schema_editor.delete_model(model)
|
||||
|
||||
trashed_item.delete()
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_items_to_trash(self, trashed_item: Table) -> List[Any]:
|
||||
"""
|
||||
When trashing a link row field we also want to trash the related link row field.
|
||||
"""
|
||||
|
||||
model = trashed_item.get_model()
|
||||
things_to_trash = [trashed_item]
|
||||
for field in model._field_objects.values():
|
||||
things_to_trash += field["type"].get_related_items_to_trash(field["field"])
|
||||
return things_to_trash
|
||||
|
||||
|
||||
class FieldTrashableItemType(TrashableItemType):
|
||||
|
||||
type = "field"
|
||||
model_class = Field
|
||||
|
||||
def get_parent(self, trashed_item: Any, parent_id: int) -> Optional[Any]:
|
||||
return trashed_item.table
|
||||
|
||||
def get_name(self, trashed_item: Field) -> str:
|
||||
return trashed_item.name
|
||||
|
||||
def trashed_item_restored(self, trashed_item: Field, trash_entry: TrashEntry):
|
||||
field_restored.send(
|
||||
self,
|
||||
field=trashed_item,
|
||||
user=None,
|
||||
)
|
||||
|
||||
def permanently_delete_item(self, field: Application):
|
||||
"""Deletes the table schema and instance."""
|
||||
|
||||
field = field.specific
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
|
||||
# Remove the field from the table schema.
|
||||
connection = connections[settings.USER_TABLE_DATABASE]
|
||||
with connection.schema_editor() as schema_editor:
|
||||
from_model = field.table.get_model(field_ids=[], fields=[field])
|
||||
model_field = from_model._meta.get_field(field.db_column)
|
||||
schema_editor.remove_field(from_model, model_field)
|
||||
|
||||
field.delete()
|
||||
|
||||
# After the field is deleted we are going to to call the after_delete method of
|
||||
# the field type because some instance cleanup might need to happen.
|
||||
field_type.after_delete(field, from_model, connection)
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_items_to_trash(self, trashed_item: Field) -> List[Any]:
|
||||
"""
|
||||
When trashing a link row field we also want to trash the related link row field.
|
||||
"""
|
||||
|
||||
items_to_trash = [trashed_item]
|
||||
field_type = field_type_registry.get_by_model(trashed_item.specific)
|
||||
return items_to_trash + field_type.get_related_items_to_trash(
|
||||
trashed_item.specific
|
||||
)
|
||||
|
||||
|
||||
class RowTrashableItemType(TrashableItemType):
|
||||
|
||||
type = "row"
|
||||
model_class = GeneratedTableModel
|
||||
|
||||
@property
|
||||
def requires_parent_id(self) -> bool:
|
||||
# A row is not unique just with its ID. We also need the table id (parent id)
|
||||
# to uniquely identify and lookup a specific row.
|
||||
return True
|
||||
|
||||
def get_parent(self, trashed_item: Any, parent_id: int) -> Optional[Any]:
|
||||
return self._get_table(parent_id)
|
||||
|
||||
@staticmethod
|
||||
def _get_table(parent_id):
|
||||
try:
|
||||
return Table.objects_and_trash.get(id=parent_id)
|
||||
except Table.DoesNotExist:
|
||||
# The parent table must have been actually deleted, in which case the
|
||||
# row itself no longer exits.
|
||||
raise TrashItemDoesNotExist()
|
||||
|
||||
def get_name(self, trashed_item) -> str:
|
||||
return str(trashed_item.id)
|
||||
|
||||
def trashed_item_restored(self, trashed_item, trash_entry: TrashEntry):
|
||||
table = self.get_parent(trashed_item, trash_entry.parent_trash_item_id)
|
||||
|
||||
model = table.get_model()
|
||||
row_created.send(
|
||||
self,
|
||||
row=trashed_item,
|
||||
table=table,
|
||||
model=model,
|
||||
before=None,
|
||||
user=None,
|
||||
)
|
||||
|
||||
def permanently_delete_item(self, row):
|
||||
row.delete()
|
||||
|
||||
def lookup_trashed_item(
|
||||
self, trashed_entry: TrashEntry, trash_item_lookup_cache=None
|
||||
):
|
||||
"""
|
||||
Returns the actual instance of the trashed item. By default simply does a get
|
||||
on the model_class's trash manager.
|
||||
|
||||
:param trash_item_lookup_cache: A cache dict used to store the generated models
|
||||
for a given table so if looking up many rows from the same table we only
|
||||
need to lookup the tables fields etc once.
|
||||
:param trashed_entry: The entry to get the real trashed instance for.
|
||||
:return: An instance of the model_class with trashed_item_id
|
||||
"""
|
||||
|
||||
# Cache the expensive table.get_model function call if we are looking up
|
||||
# many trash items at once.
|
||||
if trash_item_lookup_cache is not None:
|
||||
model_cache = trash_item_lookup_cache.setdefault(
|
||||
"row_table_model_cache", {}
|
||||
)
|
||||
try:
|
||||
model = model_cache[trashed_entry.parent_trash_item_id]
|
||||
except KeyError:
|
||||
model = model_cache.setdefault(
|
||||
trashed_entry.parent_trash_item_id,
|
||||
self._get_table_model(trashed_entry.parent_trash_item_id),
|
||||
)
|
||||
else:
|
||||
model = self._get_table_model(trashed_entry.parent_trash_item_id)
|
||||
|
||||
try:
|
||||
return model.trash.get(id=trashed_entry.trash_item_id)
|
||||
except model.DoesNotExist:
|
||||
raise TrashItemDoesNotExist()
|
||||
|
||||
def _get_table_model(self, table_id):
|
||||
table = self._get_table(table_id)
|
||||
return table.get_model()
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_extra_description(self, trashed_item: Any, table) -> Optional[str]:
|
||||
|
||||
model = table.get_model()
|
||||
for field in model._field_objects.values():
|
||||
if field["field"].primary:
|
||||
primary_value = field["type"].get_human_readable_value(
|
||||
getattr(trashed_item, field["name"]), field
|
||||
)
|
||||
if primary_value is None or primary_value == "":
|
||||
primary_value = f"unnamed row {trashed_item.id}"
|
||||
return primary_value
|
||||
|
||||
return "unknown row"
|
|
@ -4,6 +4,7 @@ from baserow.contrib.database.fields.exceptions import FieldNotInTable
|
|||
from baserow.contrib.database.fields.field_filters import FilterBuilder
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
from .exceptions import (
|
||||
ViewDoesNotExist,
|
||||
|
@ -68,6 +69,9 @@ class ViewHandler:
|
|||
except View.DoesNotExist:
|
||||
raise ViewDoesNotExist(f"The view with id {view_id} does not exist.")
|
||||
|
||||
if TrashHandler.item_has_a_trashed_parent(view.table, check_item_also=True):
|
||||
raise ViewDoesNotExist(f"The view with id {view_id} does not exist.")
|
||||
|
||||
return view
|
||||
|
||||
def create_view(self, user, table, type_name, **kwargs):
|
||||
|
@ -331,6 +335,13 @@ class ViewHandler:
|
|||
f"The view filter with id {view_filter_id} does not exist."
|
||||
)
|
||||
|
||||
if TrashHandler.item_has_a_trashed_parent(
|
||||
view_filter.view.table, check_item_also=True
|
||||
):
|
||||
raise ViewFilterDoesNotExist(
|
||||
f"The view filter with id {view_filter_id} does not exist."
|
||||
)
|
||||
|
||||
group = view_filter.view.table.database.group
|
||||
group.has_user(user, raise_error=True)
|
||||
|
||||
|
@ -559,6 +570,13 @@ class ViewHandler:
|
|||
f"The view sort with id {view_sort_id} does not exist."
|
||||
)
|
||||
|
||||
if TrashHandler.item_has_a_trashed_parent(
|
||||
view_sort.view.table, check_item_also=True
|
||||
):
|
||||
raise ViewSortDoesNotExist(
|
||||
f"The view sort with id {view_sort_id} does not exist."
|
||||
)
|
||||
|
||||
group = view_sort.view.table.database.group
|
||||
group.has_user(user, raise_error=True)
|
||||
|
||||
|
|
|
@ -6,6 +6,10 @@ from baserow.contrib.database.fields.field_filters import (
|
|||
FILTER_TYPE_OR,
|
||||
)
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.mixins import (
|
||||
ParentTableTrashableModelMixin,
|
||||
ParentFieldTrashableModelMixin,
|
||||
)
|
||||
from baserow.core.mixins import (
|
||||
OrderableMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
|
@ -24,7 +28,11 @@ def get_default_view_content_type():
|
|||
|
||||
|
||||
class View(
|
||||
CreatedAndUpdatedOnMixin, OrderableMixin, PolymorphicContentTypeMixin, models.Model
|
||||
ParentTableTrashableModelMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
OrderableMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
models.Model,
|
||||
):
|
||||
table = models.ForeignKey("database.Table", on_delete=models.CASCADE)
|
||||
order = models.PositiveIntegerField()
|
||||
|
@ -57,7 +65,7 @@ class View(
|
|||
return cls.get_highest_order_of_queryset(queryset) + 1
|
||||
|
||||
|
||||
class ViewFilter(models.Model):
|
||||
class ViewFilter(ParentFieldTrashableModelMixin, models.Model):
|
||||
view = models.ForeignKey(
|
||||
View,
|
||||
on_delete=models.CASCADE,
|
||||
|
@ -85,7 +93,7 @@ class ViewFilter(models.Model):
|
|||
ordering = ("id",)
|
||||
|
||||
|
||||
class ViewSort(models.Model):
|
||||
class ViewSort(ParentFieldTrashableModelMixin, models.Model):
|
||||
view = models.ForeignKey(
|
||||
View,
|
||||
on_delete=models.CASCADE,
|
||||
|
@ -149,7 +157,7 @@ class GridView(View):
|
|||
return field_options
|
||||
|
||||
|
||||
class GridViewFieldOptions(models.Model):
|
||||
class GridViewFieldOptions(ParentFieldTrashableModelMixin, models.Model):
|
||||
grid_view = models.ForeignKey(GridView, on_delete=models.CASCADE)
|
||||
field = models.ForeignKey(Field, on_delete=models.CASCADE)
|
||||
# The defaults should be the same as in the `fieldCreated` of the `GridViewType`
|
||||
|
|
|
@ -25,6 +25,23 @@ def field_created(sender, field, user, **kwargs):
|
|||
)
|
||||
|
||||
|
||||
@receiver(field_signals.field_restored)
|
||||
def field_restored(sender, field, user, **kwargs):
|
||||
table_page_type = page_registry.get("table")
|
||||
transaction.on_commit(
|
||||
lambda: table_page_type.broadcast(
|
||||
{
|
||||
"type": "field_restored",
|
||||
"field": field_type_registry.get_serializer(
|
||||
field, FieldSerializer
|
||||
).data,
|
||||
},
|
||||
getattr(user, "web_socket_id", None),
|
||||
table_id=field.table_id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(field_signals.field_updated)
|
||||
def field_updated(sender, field, user, **kwargs):
|
||||
table_page_type = page_registry.get("table")
|
||||
|
|
|
@ -3,3 +3,11 @@ from django.apps import AppConfig
|
|||
|
||||
class CoreConfig(AppConfig):
|
||||
name = "baserow.core"
|
||||
|
||||
def ready(self):
|
||||
from baserow.core.trash.registries import trash_item_type_registry
|
||||
from baserow.core.trash.trash_types import GroupTrashableItemType
|
||||
from baserow.core.trash.trash_types import ApplicationTrashableItemType
|
||||
|
||||
trash_item_type_registry.register(GroupTrashableItemType())
|
||||
trash_item_type_registry.register(ApplicationTrashableItemType())
|
||||
|
|
|
@ -117,3 +117,9 @@ class TemplateFileDoesNotExist(Exception):
|
|||
Raised when the JSON template file does not exist in the
|
||||
APPLICATION_TEMPLATE_DIRS directory.
|
||||
"""
|
||||
|
||||
|
||||
class TrashItemDoesNotExist(Exception):
|
||||
"""
|
||||
Raised when the trash item does not exist in the database.
|
||||
"""
|
||||
|
|
|
@ -38,6 +38,7 @@ from .exceptions import (
|
|||
TemplateFileDoesNotExist,
|
||||
TemplateDoesNotExist,
|
||||
)
|
||||
from .trash.handler import TrashHandler
|
||||
from .utils import extract_allowed, set_allowed_attrs
|
||||
from .registries import application_type_registry
|
||||
from .signals import (
|
||||
|
@ -174,7 +175,8 @@ class CoreHandler:
|
|||
def delete_group(self, user, group):
|
||||
"""
|
||||
Deletes an existing group and related applications if the user has admin
|
||||
permissions to the group.
|
||||
permissions for the group. The group can be restored after deletion using the
|
||||
trash handler.
|
||||
|
||||
:param user: The user on whose behalf the delete is done.
|
||||
:type: user: User
|
||||
|
@ -193,23 +195,12 @@ class CoreHandler:
|
|||
group_id = group.id
|
||||
group_users = list(group.users.all())
|
||||
|
||||
self._delete_group(group)
|
||||
TrashHandler.trash(user, group, None, group)
|
||||
|
||||
group_deleted.send(
|
||||
self, group_id=group_id, group=group, group_users=group_users, user=user
|
||||
)
|
||||
|
||||
def _delete_group(self, group):
|
||||
"""Deletes the provided group."""
|
||||
|
||||
# Select all the applications so we can delete them via the handler which is
|
||||
# needed in order to call the pre_delete method for each application.
|
||||
applications = group.application_set.all().select_related("group")
|
||||
for application in applications:
|
||||
self._delete_application(application)
|
||||
|
||||
group.delete()
|
||||
|
||||
def order_groups(self, user, group_ids):
|
||||
"""
|
||||
Changes the order of groups for a user.
|
||||
|
@ -594,6 +585,11 @@ class CoreHandler:
|
|||
f"The application with id {application_id} does not exist."
|
||||
)
|
||||
|
||||
if TrashHandler.item_has_a_trashed_parent(application):
|
||||
raise ApplicationDoesNotExist(
|
||||
f"The application with id {application_id} does not exist."
|
||||
)
|
||||
|
||||
return application
|
||||
|
||||
def create_application(self, user, group, type_name, **kwargs):
|
||||
|
@ -625,9 +621,7 @@ class CoreHandler:
|
|||
group=group, order=last_order, **application_values
|
||||
)
|
||||
|
||||
application_created.send(
|
||||
self, application=instance, user=user, type_name=type_name
|
||||
)
|
||||
application_created.send(self, application=instance, user=user)
|
||||
|
||||
return instance
|
||||
|
||||
|
@ -703,21 +697,12 @@ class CoreHandler:
|
|||
application.group.has_user(user, raise_error=True)
|
||||
|
||||
application_id = application.id
|
||||
application = self._delete_application(application)
|
||||
TrashHandler.trash(user, application.group, application, application)
|
||||
|
||||
application_deleted.send(
|
||||
self, application_id=application_id, application=application, user=user
|
||||
)
|
||||
|
||||
def _delete_application(self, application):
|
||||
"""Deletes an application and the related relations in the correct way."""
|
||||
|
||||
application = application.specific
|
||||
application_type = application_type_registry.get_by_model(application)
|
||||
application_type.pre_delete(application)
|
||||
application.delete()
|
||||
return application
|
||||
|
||||
def export_group_applications(self, group, files_buffer, storage=None):
|
||||
"""
|
||||
Exports the applications of a group to a list. They can later be imported via
|
||||
|
@ -871,7 +856,7 @@ class CoreHandler:
|
|||
and installed_template.group
|
||||
and installed_template.export_hash != export_hash
|
||||
):
|
||||
self._delete_group(installed_template.group)
|
||||
TrashHandler.permanently_delete(installed_template.group)
|
||||
|
||||
# If the installed template does not yet exist or if there is a export
|
||||
# hash mismatch, which means the group has already been deleted, we can
|
||||
|
@ -944,7 +929,7 @@ class CoreHandler:
|
|||
for template_file_path in templates
|
||||
]
|
||||
for template in Template.objects.filter(~Q(slug__in=slugs)):
|
||||
self._delete_group(template.group)
|
||||
TrashHandler.permanently_delete(template.group)
|
||||
template.delete()
|
||||
|
||||
# Delete all the categories that don't have any templates anymore.
|
||||
|
|
|
@ -1,6 +1,36 @@
|
|||
from django.db import models
|
||||
|
||||
|
||||
class GroupQuerySet(models.QuerySet):
|
||||
def of_user(self, user):
|
||||
return self.filter(users__exact=user).order_by("groupuser__order")
|
||||
def make_trash_manager(trashed, parent=None):
|
||||
"""
|
||||
Constructs a Django Queryset Manager which filters down it's base queryset according
|
||||
to the provided parameters.
|
||||
|
||||
We need to use a method to construct a closed class rather than say, __init__
|
||||
parameters given to a single base class as Django will init a models managers
|
||||
without providing any kwargs breaking things horribly. This way django can init
|
||||
the manager without providing any kwargs and it will still filter correctly.
|
||||
|
||||
:param trashed: If true the manager will only return trashed entries, if false then
|
||||
only returns non-trashed entries.
|
||||
:param parent: If specified will use the trashed column in a related model where
|
||||
parent is the name of the FK to the related model.
|
||||
:return: A manager with an override get_queryset filtered accordingly.
|
||||
"""
|
||||
|
||||
filter_kwargs = {}
|
||||
|
||||
if parent is None:
|
||||
filter_kwargs["trashed"] = trashed
|
||||
else:
|
||||
filter_kwargs[f"{parent}__trashed"] = trashed
|
||||
|
||||
class Manager(models.Manager):
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(**filter_kwargs)
|
||||
|
||||
return Manager
|
||||
|
||||
|
||||
TrashOnlyManager = make_trash_manager(trashed=True)
|
||||
NoTrashManager = make_trash_manager(trashed=False)
|
||||
|
|
87
backend/src/baserow/core/migrations/0008_trash.py
Normal file
87
backend/src/baserow/core/migrations/0008_trash.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
# Generated by Django 2.2.11 on 2021-06-16 09:23
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("core", "0007_userlogentry"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="application",
|
||||
name="trashed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="group",
|
||||
name="trashed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="TrashEntry",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("trash_item_type", models.TextField()),
|
||||
(
|
||||
"parent_trash_item_id",
|
||||
models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
("trash_item_id", models.PositiveIntegerField()),
|
||||
("should_be_permanently_deleted", models.BooleanField(default=False)),
|
||||
("trashed_at", models.DateTimeField(auto_now_add=True)),
|
||||
("name", models.TextField()),
|
||||
("parent_name", models.TextField(blank=True, null=True)),
|
||||
("extra_description", models.TextField(blank=True, null=True)),
|
||||
(
|
||||
"application",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="core.Application",
|
||||
),
|
||||
),
|
||||
(
|
||||
"group",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="core.Group"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user_who_trashed",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="trashentry",
|
||||
index=models.Index(
|
||||
fields=["-trashed_at", "trash_item_type", "group", "application"],
|
||||
name="core_trashe_trashed_0bf61a_idx",
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="trashentry",
|
||||
unique_together={
|
||||
("trash_item_type", "parent_trash_item_id", "trash_item_id")
|
||||
},
|
||||
),
|
||||
]
|
|
@ -1,9 +1,15 @@
|
|||
from django.db import models
|
||||
from django.db.models import Case, When, Value
|
||||
from django.db.models.fields import NOT_PROVIDED
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.db.models import Case, When, Value, Manager
|
||||
from django.db.models.fields import NOT_PROVIDED
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from baserow.core.managers import (
|
||||
make_trash_manager,
|
||||
NoTrashManager,
|
||||
TrashOnlyManager,
|
||||
)
|
||||
|
||||
|
||||
class OrderableMixin:
|
||||
"""
|
||||
|
@ -162,3 +168,46 @@ class CreatedAndUpdatedOnMixin(models.Model):
|
|||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
def make_trashable_mixin(parent):
|
||||
"""
|
||||
Constructs a mixin class which overrides a models managers to ensure trashed entries
|
||||
are not available via objects, but instead via the new trash manager.
|
||||
|
||||
:param parent: If specified will use the trashed column in a related model where
|
||||
parent is the name of the FK to the related model.
|
||||
:return: A mixin with overridden managers which correctly filter out trashed rows.
|
||||
"""
|
||||
|
||||
no_trash_manager = make_trash_manager(trashed=False, parent=parent)
|
||||
trash_only_manager = make_trash_manager(trashed=True, parent=parent)
|
||||
|
||||
class TrashableMixin(models.Model):
|
||||
objects = no_trash_manager()
|
||||
trash = trash_only_manager()
|
||||
objects_and_trash = Manager()
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
return TrashableMixin
|
||||
|
||||
|
||||
ParentGroupTrashableModelMixin = make_trashable_mixin("group")
|
||||
|
||||
|
||||
class TrashableModelMixin(models.Model):
|
||||
"""
|
||||
This mixin allows this model to be trashed and restored from the trash by adding
|
||||
new columns recording it's trash status.
|
||||
"""
|
||||
|
||||
trashed = models.BooleanField(default=False)
|
||||
|
||||
objects = NoTrashManager()
|
||||
trash = TrashOnlyManager()
|
||||
objects_and_trash = Manager()
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
|
|
@ -6,16 +6,28 @@ from rest_framework.exceptions import NotAuthenticated
|
|||
|
||||
from baserow.core.user_files.models import UserFile
|
||||
|
||||
from .managers import GroupQuerySet
|
||||
from .mixins import (
|
||||
OrderableMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
TrashableModelMixin,
|
||||
ParentGroupTrashableModelMixin,
|
||||
)
|
||||
from .exceptions import UserNotInGroup, UserInvalidGroupPermissionsError
|
||||
|
||||
|
||||
__all__ = ["UserFile"]
|
||||
__all__ = [
|
||||
"Settings",
|
||||
"Group",
|
||||
"GroupUser",
|
||||
"GroupInvitation",
|
||||
"Application",
|
||||
"TemplateCategory",
|
||||
"Template",
|
||||
"UserLogEntry",
|
||||
"TrashEntry",
|
||||
"UserFile",
|
||||
]
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
|
@ -48,17 +60,26 @@ class Settings(models.Model):
|
|||
)
|
||||
|
||||
|
||||
class Group(CreatedAndUpdatedOnMixin, models.Model):
|
||||
class Group(TrashableModelMixin, CreatedAndUpdatedOnMixin):
|
||||
name = models.CharField(max_length=100)
|
||||
users = models.ManyToManyField(User, through="GroupUser")
|
||||
|
||||
objects = GroupQuerySet.as_manager()
|
||||
def application_set_including_trash(self):
|
||||
"""
|
||||
:return: The applications for this group including any trashed applications.
|
||||
"""
|
||||
return self.application_set(manager="objects_and_trash")
|
||||
|
||||
def has_template(self):
|
||||
return self.template_set.all().exists()
|
||||
|
||||
def has_user(
|
||||
self, user, permissions=None, raise_error=False, allow_if_template=False
|
||||
self,
|
||||
user,
|
||||
permissions=None,
|
||||
raise_error=False,
|
||||
allow_if_template=False,
|
||||
include_trash=False,
|
||||
):
|
||||
"""
|
||||
Checks if the provided user belongs to the group.
|
||||
|
@ -74,6 +95,9 @@ class Group(CreatedAndUpdatedOnMixin, models.Model):
|
|||
:param allow_if_template: If true and if the group is related to a template,
|
||||
then True is always returned and no exception will be raised.
|
||||
:type allow_if_template: bool
|
||||
:param include_trash: If true then also checks if the group has been trashed
|
||||
instead of raising a DoesNotExist exception.
|
||||
:type include_trash: bool
|
||||
:raises UserNotInGroup: If the user does not belong to the group.
|
||||
:raises UserInvalidGroupPermissionsError: If the user does belong to the group,
|
||||
but doesn't have the right permissions.
|
||||
|
@ -92,7 +116,12 @@ class Group(CreatedAndUpdatedOnMixin, models.Model):
|
|||
else:
|
||||
return False
|
||||
|
||||
queryset = GroupUser.objects.filter(user_id=user.id, group_id=self.id)
|
||||
if include_trash:
|
||||
manager = GroupUser.objects_and_trash
|
||||
else:
|
||||
manager = GroupUser.objects
|
||||
|
||||
queryset = manager.filter(user_id=user.id, group_id=self.id)
|
||||
|
||||
if raise_error:
|
||||
try:
|
||||
|
@ -112,7 +141,12 @@ class Group(CreatedAndUpdatedOnMixin, models.Model):
|
|||
return f"<Group id={self.id}, name={self.name}>"
|
||||
|
||||
|
||||
class GroupUser(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
||||
class GroupUser(
|
||||
ParentGroupTrashableModelMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
OrderableMixin,
|
||||
models.Model,
|
||||
):
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
|
@ -143,7 +177,9 @@ class GroupUser(CreatedAndUpdatedOnMixin, OrderableMixin, models.Model):
|
|||
return cls.get_highest_order_of_queryset(queryset) + 1
|
||||
|
||||
|
||||
class GroupInvitation(CreatedAndUpdatedOnMixin, models.Model):
|
||||
class GroupInvitation(
|
||||
ParentGroupTrashableModelMixin, CreatedAndUpdatedOnMixin, models.Model
|
||||
):
|
||||
group = models.ForeignKey(
|
||||
Group,
|
||||
on_delete=models.CASCADE,
|
||||
|
@ -178,7 +214,11 @@ class GroupInvitation(CreatedAndUpdatedOnMixin, models.Model):
|
|||
|
||||
|
||||
class Application(
|
||||
CreatedAndUpdatedOnMixin, OrderableMixin, PolymorphicContentTypeMixin, models.Model
|
||||
TrashableModelMixin,
|
||||
CreatedAndUpdatedOnMixin,
|
||||
OrderableMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
models.Model,
|
||||
):
|
||||
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||
name = models.CharField(max_length=50)
|
||||
|
@ -250,3 +290,63 @@ class UserLogEntry(models.Model):
|
|||
class Meta:
|
||||
get_latest_by = "timestamp"
|
||||
ordering = ["-timestamp"]
|
||||
|
||||
|
||||
class TrashEntry(models.Model):
|
||||
"""
|
||||
A TrashEntry is a record indicating that another model in Baserow has a trashed
|
||||
row. When a user deletes certain things in Baserow they are not actually deleted
|
||||
from the database, but instead marked as trashed. Trashed rows can be restored
|
||||
or permanently deleted.
|
||||
|
||||
The other model must mixin the TrashableModelMixin and also have a corresponding
|
||||
TrashableItemType registered specifying exactly how to delete and restore that
|
||||
model.
|
||||
"""
|
||||
|
||||
# The TrashableItemType.type of the item that is trashed.
|
||||
trash_item_type = models.TextField()
|
||||
# We need to also store the parent id as for some trashable items the
|
||||
# trash_item_type and the trash_item_id is not unique as the items of that type
|
||||
# could be spread over multiple tables with the same id.
|
||||
parent_trash_item_id = models.PositiveIntegerField(null=True, blank=True)
|
||||
# The actual id of the item that is trashed
|
||||
trash_item_id = models.PositiveIntegerField()
|
||||
|
||||
# If the user who trashed something gets deleted we still wish to preserve this
|
||||
# trash record as it is independent of if the user exists or not.
|
||||
user_who_trashed = models.ForeignKey(
|
||||
User, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
|
||||
# The group and application fields are used to group trash into separate "bins"
|
||||
# which can be viewed and emptied independently of each other.
|
||||
|
||||
# The group the item that is trashed is found in, if the trashed item is the
|
||||
# group itself then this should also be set to that trashed group.
|
||||
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||
# The application the item that is trashed is found in, if the trashed item is the
|
||||
# application itself then this should also be set to that trashed application.
|
||||
application = models.ForeignKey(
|
||||
Application, on_delete=models.CASCADE, null=True, blank=True
|
||||
)
|
||||
|
||||
# When set to true this trash entry will be picked up by a periodic job and the
|
||||
# underlying item will be actually permanently deleted along with the entry.
|
||||
should_be_permanently_deleted = models.BooleanField(default=False)
|
||||
trashed_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
# The name, name of the parent and any extra description are cached so lookups
|
||||
# of trashed items are simple and do not require joining to many different tables
|
||||
# to simply get these details.
|
||||
name = models.TextField()
|
||||
parent_name = models.TextField(null=True, blank=True)
|
||||
extra_description = models.TextField(null=True, blank=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = ("trash_item_type", "parent_trash_item_id", "trash_item_id")
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["-trashed_at", "trash_item_type", "group", "application"]
|
||||
)
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.dispatch import Signal
|
|||
group_created = Signal()
|
||||
group_updated = Signal()
|
||||
group_deleted = Signal()
|
||||
group_restored = Signal()
|
||||
|
||||
group_user_updated = Signal()
|
||||
group_user_deleted = Signal()
|
||||
|
|
11
backend/src/baserow/core/tasks.py
Normal file
11
backend/src/baserow/core/tasks.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
from .trash.tasks import (
|
||||
permanently_delete_marked_trash,
|
||||
mark_old_trash_for_permanent_deletion,
|
||||
setup_period_trash_tasks,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"permanently_delete_marked_trash",
|
||||
"mark_old_trash_for_permanent_deletion",
|
||||
"setup_period_trash_tasks",
|
||||
]
|
0
backend/src/baserow/core/trash/__init__.py
Normal file
0
backend/src/baserow/core/trash/__init__.py
Normal file
18
backend/src/baserow/core/trash/exceptions.py
Normal file
18
backend/src/baserow/core/trash/exceptions.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
class CannotRestoreChildBeforeParent(Exception):
|
||||
"""
|
||||
Raised when attempting to restore a trashed item when it's parent is also trashed.
|
||||
"""
|
||||
|
||||
|
||||
class ParentIdMustBeProvidedException(Exception):
|
||||
"""
|
||||
Raised when attempting to access or restore a trashed item without providing it's
|
||||
parent's id.
|
||||
"""
|
||||
|
||||
|
||||
class ParentIdMustNotBeProvidedException(Exception):
|
||||
"""
|
||||
Raised when attempting to access or restore a trashed item which should not have
|
||||
it's parent id provided, but it was anyway.
|
||||
"""
|
418
backend/src/baserow/core/trash/handler.py
Normal file
418
backend/src/baserow/core/trash/handler.py
Normal file
|
@ -0,0 +1,418 @@
|
|||
from typing import Optional, Dict, Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db import transaction
|
||||
from django.db.models import QuerySet
|
||||
from django.utils import timezone
|
||||
|
||||
from baserow.core.exceptions import (
|
||||
ApplicationNotInGroup,
|
||||
GroupDoesNotExist,
|
||||
ApplicationDoesNotExist,
|
||||
TrashItemDoesNotExist,
|
||||
)
|
||||
from baserow.core.models import TrashEntry, Application, Group
|
||||
from baserow.core.trash.exceptions import (
|
||||
CannotRestoreChildBeforeParent,
|
||||
ParentIdMustBeProvidedException,
|
||||
ParentIdMustNotBeProvidedException,
|
||||
)
|
||||
from baserow.core.trash.registries import TrashableItemType, trash_item_type_registry
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class TrashHandler:
|
||||
@staticmethod
|
||||
def trash(
|
||||
requesting_user: User,
|
||||
group: Group,
|
||||
application: Optional[Application],
|
||||
trash_item,
|
||||
parent_id=None,
|
||||
) -> TrashEntry:
|
||||
"""
|
||||
Marks the provided trashable item as trashed meaning it will no longer be
|
||||
visible or usable in Baserow. However any user with access to its group can
|
||||
restore the item after it is trashed to make it visible and usable again. After
|
||||
a configurable timeout period or when the a user explicitly empties the
|
||||
trash trashed items will be permanently deleted.
|
||||
|
||||
:param parent_id: The id of the parent object if known
|
||||
:param requesting_user: The user who is requesting that this item be trashed.
|
||||
:param group: The group the trashed item is in.
|
||||
:param application: If the item is in an application the application.
|
||||
:param trash_item: The item to be trashed.
|
||||
:return: A newly created entry in the TrashEntry table for this item.
|
||||
"""
|
||||
|
||||
# Check if the parent has a trash entry, if so link this new entry to it
|
||||
# via a cascading on delete FK to ensure if the parent entry is deleted then
|
||||
# this one is also deleted. We do this as say if a table is perm deleted,
|
||||
# we don't then want to
|
||||
with transaction.atomic():
|
||||
trash_item_type = trash_item_type_registry.get_by_model(trash_item)
|
||||
|
||||
_check_parent_id_valid(parent_id, trash_item_type)
|
||||
|
||||
items_to_trash = trash_item_type.get_items_to_trash(trash_item)
|
||||
for item in items_to_trash:
|
||||
item.trashed = True
|
||||
item.save()
|
||||
|
||||
parent = trash_item_type.get_parent(trash_item, parent_id)
|
||||
if parent is not None:
|
||||
parent_type = trash_item_type_registry.get_by_model(parent)
|
||||
parent_name = parent_type.get_name(parent)
|
||||
else:
|
||||
parent_name = None
|
||||
|
||||
return TrashEntry.objects.create(
|
||||
user_who_trashed=requesting_user,
|
||||
group=group,
|
||||
application=application,
|
||||
trash_item_type=trash_item_type.type,
|
||||
trash_item_id=trash_item.id,
|
||||
name=trash_item_type.get_name(trash_item),
|
||||
parent_name=parent_name,
|
||||
parent_trash_item_id=parent_id,
|
||||
# If we ever introduce the ability to trash many rows at once this
|
||||
# call will generate a model per row currently, instead a model cache
|
||||
# should be added so generated models can be shared.
|
||||
extra_description=trash_item_type.get_extra_description(
|
||||
trash_item, parent
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def restore_item(user, trash_item_type, trash_item_id, parent_trash_item_id=None):
|
||||
"""
|
||||
Restores an item from the trash re-instating it back in Baserow exactly how it
|
||||
was before it was trashed.
|
||||
:param user: The user requesting to restore trashed item.
|
||||
:param trash_item_type: The trashable item type of the item to restore.
|
||||
:param parent_trash_item_id: The parent id of the item to restore.
|
||||
:param trash_item_id: The trash item id of the item to restore.
|
||||
:raises CannotRestoreChildBeforeParent: Raised if the item being restored has
|
||||
any parent, or parent of a parent etc which is trashed as that item should
|
||||
be restored first.
|
||||
"""
|
||||
|
||||
with transaction.atomic():
|
||||
trashable_item_type = trash_item_type_registry.get(trash_item_type)
|
||||
_check_parent_id_valid(parent_trash_item_id, trashable_item_type)
|
||||
|
||||
trash_entry = _get_trash_entry(
|
||||
user, trash_item_type, parent_trash_item_id, trash_item_id
|
||||
)
|
||||
|
||||
trash_item = trashable_item_type.lookup_trashed_item(trash_entry, {})
|
||||
|
||||
items_to_restore = trashable_item_type.get_items_to_trash(trash_item)
|
||||
|
||||
if TrashHandler.item_has_a_trashed_parent(
|
||||
trash_item,
|
||||
parent_id=trash_entry.parent_trash_item_id,
|
||||
):
|
||||
raise CannotRestoreChildBeforeParent()
|
||||
|
||||
trash_entry.delete()
|
||||
|
||||
# Restore everything in the database first before we run any restored
|
||||
# hooks otherwise signals etc might try to be sent when dependent items are
|
||||
# still trashed in the database.
|
||||
for item in items_to_restore:
|
||||
item.trashed = False
|
||||
item.save()
|
||||
|
||||
for item in items_to_restore:
|
||||
restore_type = trash_item_type_registry.get_by_model(item)
|
||||
restore_type.trashed_item_restored(item, trash_entry)
|
||||
|
||||
@staticmethod
|
||||
def get_trash_structure(user: User) -> Dict[str, Any]:
|
||||
"""
|
||||
Returns the structure of the trash available to the user. This consists of the
|
||||
groups and their applications the user has access to. Each group and application
|
||||
indicates whether it itself has been trashed.
|
||||
|
||||
:param user: The user to return the trash structure for.
|
||||
:return: An ordered list of groups and their applications which could possibly
|
||||
have trash contents.
|
||||
"""
|
||||
|
||||
structure = {"groups": []}
|
||||
groups = _get_groups_excluding_perm_deleted(user)
|
||||
for group in groups:
|
||||
applications = _get_applications_excluding_perm_deleted(group)
|
||||
structure["groups"].append(
|
||||
{
|
||||
"id": group.id,
|
||||
"trashed": group.trashed,
|
||||
"name": group.name,
|
||||
"applications": applications,
|
||||
}
|
||||
)
|
||||
|
||||
return structure
|
||||
|
||||
@staticmethod
|
||||
def mark_old_trash_for_permanent_deletion():
|
||||
"""
|
||||
Updates all trash entries which are older than a django setting for permanent
|
||||
deletion. Does not perform the deletion itself.
|
||||
"""
|
||||
|
||||
now = timezone.now()
|
||||
cutoff = now - timezone.timedelta(
|
||||
hours=settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED
|
||||
)
|
||||
TrashEntry.objects.filter(trashed_at__lte=cutoff).update(
|
||||
should_be_permanently_deleted=True
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def empty(requesting_user: User, group_id: int, application_id: Optional[int]):
|
||||
"""
|
||||
Marks all items in the selected group (or application in the group if
|
||||
application_id is provided) as should be permanently deleted.
|
||||
"""
|
||||
|
||||
with transaction.atomic():
|
||||
trash_contents = TrashHandler.get_trash_contents(
|
||||
requesting_user, group_id, application_id
|
||||
)
|
||||
trash_contents.update(should_be_permanently_deleted=True)
|
||||
|
||||
@staticmethod
|
||||
def permanently_delete_marked_trash():
|
||||
"""
|
||||
Looks up every trash item marked for permanent deletion and removes them
|
||||
irreversibly from the database along with their corresponding trash entries.
|
||||
"""
|
||||
|
||||
trash_item_lookup_cache = {}
|
||||
for trash_entry in TrashEntry.objects.filter(
|
||||
should_be_permanently_deleted=True
|
||||
):
|
||||
with transaction.atomic():
|
||||
trash_item_type = trash_item_type_registry.get(
|
||||
trash_entry.trash_item_type
|
||||
)
|
||||
|
||||
try:
|
||||
to_delete = trash_item_type.lookup_trashed_item(
|
||||
trash_entry, trash_item_lookup_cache
|
||||
)
|
||||
trash_item_type.permanently_delete_item(to_delete)
|
||||
except TrashItemDoesNotExist:
|
||||
# When a parent item is deleted it should also delete all of it's
|
||||
# children. Hence we expect that many of these TrashEntries to no
|
||||
# longer point to an existing item. In such a situation we just want
|
||||
# to delete the entry as the item itself has been correctly deleted.
|
||||
pass
|
||||
trash_entry.delete()
|
||||
|
||||
@staticmethod
|
||||
def permanently_delete(trashable_item):
|
||||
"""
|
||||
Actually removes the provided trashable item from the database irreversibly.
|
||||
:param trashable_item: An instance of a TrashableItemType model_class to delete.
|
||||
"""
|
||||
|
||||
trash_item_type = trash_item_type_registry.get_by_model(trashable_item)
|
||||
trash_item_type.permanently_delete_item(trashable_item)
|
||||
|
||||
@staticmethod
|
||||
def get_trash_contents(
|
||||
user: User, group_id: int, application_id: Optional[int]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Looks up the trash contents for a particular group optionally filtered by
|
||||
the provided application id.
|
||||
:param user: The user who is requesting to see the trash contents.
|
||||
:param group_id: The group to lookup trash contents inside of.
|
||||
:param application_id: The optional application to filter down the trash
|
||||
contents to only this group.
|
||||
:raises GroupDoesNotExist: If the group_id is for an non
|
||||
existent group.
|
||||
:raises ApplicationDoesNotExist: If the application_id is for an non
|
||||
existent application.
|
||||
:raises ApplicationNotInGroup: If the application_id is for an application
|
||||
not in the requested group.
|
||||
:raises UserNotInGroup: If the user does not belong to the group.
|
||||
:return: a queryset of the trash items in the group optionally filtered by
|
||||
the provided application.
|
||||
"""
|
||||
|
||||
group = _get_group(group_id, user)
|
||||
|
||||
application = _get_application(application_id, group, user)
|
||||
|
||||
trash_contents = TrashEntry.objects.filter(
|
||||
group=group, should_be_permanently_deleted=False
|
||||
)
|
||||
if application:
|
||||
trash_contents = trash_contents.filter(application=application)
|
||||
return trash_contents.order_by("-trashed_at")
|
||||
|
||||
@staticmethod
|
||||
def item_has_a_trashed_parent(item, parent_id=None, check_item_also=False):
|
||||
"""
|
||||
Given an instance of a model which is trashable (item) checks if it has a parent
|
||||
which is trashed. Returns True if it's parent, or parent's parent (and so on)
|
||||
is trashed, False if no parent is trashed.
|
||||
|
||||
:param check_item_also: If true also checks if the provided item itself is
|
||||
trashed and returns True if so.
|
||||
:param item: An instance of a trashable model to check.
|
||||
:param parent_id: If the trashable type of the provided instance requires an
|
||||
id to lookup it's parent it must be provided here.
|
||||
:return: If the provided item has a trashed parent or not.
|
||||
"""
|
||||
|
||||
trash_item_type = trash_item_type_registry.get_by_model(item)
|
||||
|
||||
if check_item_also and item.trashed:
|
||||
return True
|
||||
|
||||
while True:
|
||||
_check_parent_id_valid(parent_id, trash_item_type)
|
||||
parent = trash_item_type.get_parent(item, parent_id)
|
||||
if parent is None:
|
||||
return False
|
||||
elif parent.trashed:
|
||||
return True
|
||||
else:
|
||||
item = parent
|
||||
# Right now only row the lowest item in the "trash hierarchy" requires
|
||||
# a parent id. Hence we know that as we go up into parents we will
|
||||
# no longer need parent id's to do the lookups. However if in the future
|
||||
# there is an intermediary trashable item which also requires a
|
||||
# parent_id this method will not work and will need to be changed.
|
||||
parent_id = None
|
||||
trash_item_type = trash_item_type_registry.get_by_model(item)
|
||||
|
||||
|
||||
def _get_group(group_id, user):
|
||||
try:
|
||||
group = Group.objects_and_trash.get(id=group_id)
|
||||
except Group.DoesNotExist:
|
||||
raise GroupDoesNotExist
|
||||
# Check that the group is not marked for perm deletion, if so we don't want
|
||||
# to display it's contents anymore as it should be permanently deleted soon.
|
||||
try:
|
||||
trash_entry = _get_trash_entry(user, "group", None, group.id)
|
||||
if trash_entry.should_be_permanently_deleted:
|
||||
raise GroupDoesNotExist
|
||||
except TrashItemDoesNotExist:
|
||||
pass
|
||||
group.has_user(user, raise_error=True, include_trash=True)
|
||||
return group
|
||||
|
||||
|
||||
def _get_application(application_id, group, user):
|
||||
if application_id is not None:
|
||||
try:
|
||||
application = Application.objects_and_trash.get(id=application_id)
|
||||
except Application.DoesNotExist:
|
||||
raise ApplicationDoesNotExist()
|
||||
|
||||
try:
|
||||
trash_entry = _get_trash_entry(user, "application", None, application.id)
|
||||
if trash_entry.should_be_permanently_deleted:
|
||||
raise ApplicationDoesNotExist
|
||||
except TrashItemDoesNotExist:
|
||||
pass
|
||||
|
||||
if application.group != group:
|
||||
raise ApplicationNotInGroup()
|
||||
else:
|
||||
application = None
|
||||
return application
|
||||
|
||||
|
||||
def _check_parent_id_valid(
|
||||
parent_trash_item_id: Optional[int], trashable_item_type: TrashableItemType
|
||||
):
|
||||
"""
|
||||
Raises an exception if the parent id is missing when it is required, or when the
|
||||
parent id is included when it is not required.
|
||||
|
||||
Because the parent id is stored in the database and used to lookup trash entries
|
||||
uniquely, we want to enforce it is not provided when not needed. For example, if
|
||||
the API allowed you to provide a parent id when trashing a table, that id will then
|
||||
be stored, and it must then be provided when trying to restore that table otherwise
|
||||
the entry will not be found. Hence by being strict we ensure it's not possible to
|
||||
accidentally trash an item which is hard to then restore.
|
||||
|
||||
:param parent_trash_item_id: The parent id
|
||||
:param trashable_item_type: The type to check to see if it needs a parent id or not.
|
||||
:return:
|
||||
"""
|
||||
|
||||
if trashable_item_type.requires_parent_id and parent_trash_item_id is None:
|
||||
raise ParentIdMustBeProvidedException()
|
||||
if not trashable_item_type.requires_parent_id and parent_trash_item_id is not None:
|
||||
raise ParentIdMustNotBeProvidedException()
|
||||
|
||||
|
||||
def _get_groups_excluding_perm_deleted(user):
|
||||
groups = Group.objects_and_trash.filter(groupuser__user=user)
|
||||
perm_deleted_groups = TrashEntry.objects.filter(
|
||||
trash_item_type="group",
|
||||
should_be_permanently_deleted=True,
|
||||
trash_item_id__in=groups.values_list("id", flat=True),
|
||||
).values_list("trash_item_id", flat=True)
|
||||
groups = groups.exclude(id__in=perm_deleted_groups).order_by("groupuser__order")
|
||||
return groups
|
||||
|
||||
|
||||
def _get_applications_excluding_perm_deleted(group):
|
||||
perm_deleted_apps = TrashEntry.objects.filter(
|
||||
trash_item_type="application",
|
||||
should_be_permanently_deleted=True,
|
||||
trash_item_id__in=group.application_set_including_trash().values_list(
|
||||
"id", flat=True
|
||||
),
|
||||
).values_list("trash_item_id", flat=True)
|
||||
applications = (
|
||||
group.application_set_including_trash()
|
||||
.exclude(id__in=perm_deleted_apps)
|
||||
.order_by("order", "id")
|
||||
)
|
||||
return applications
|
||||
|
||||
|
||||
def _get_trash_entry(
|
||||
requesting_user: User,
|
||||
trash_item_type: str,
|
||||
parent_trash_item_id: Optional[int],
|
||||
trash_item_id: int,
|
||||
) -> TrashEntry:
|
||||
"""
|
||||
Gets the trash entry for a particular resource in baserow which has been
|
||||
trashed.
|
||||
:param trash_item_id: The id of the item to look for a trash entry for.
|
||||
:param parent_trash_item_id: The parent id of the item to look for a trash
|
||||
entry for.
|
||||
:param trash_item_type: The trashable type of the item.
|
||||
:param requesting_user: The user requesting to get the trashed item ,
|
||||
they must be in the group of the trashed item otherwise this will raise
|
||||
UserNotInGroup if not.
|
||||
:returns The trash entry for the specified baserow item.
|
||||
:raises UserNotInGroup: If the requesting_user is not in the trashed items
|
||||
group.
|
||||
"""
|
||||
|
||||
try:
|
||||
trash_entry = TrashEntry.objects.get(
|
||||
parent_trash_item_id=parent_trash_item_id,
|
||||
trash_item_id=trash_item_id,
|
||||
trash_item_type=trash_item_type,
|
||||
)
|
||||
except TrashEntry.DoesNotExist:
|
||||
raise TrashItemDoesNotExist()
|
||||
trash_entry.group.has_user(requesting_user, raise_error=True, include_trash=True)
|
||||
return trash_entry
|
124
backend/src/baserow/core/trash/registries.py
Normal file
124
backend/src/baserow/core/trash/registries.py
Normal file
|
@ -0,0 +1,124 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Optional, List
|
||||
|
||||
from baserow.core.exceptions import TrashItemDoesNotExist
|
||||
from baserow.core.registry import (
|
||||
ModelRegistryMixin,
|
||||
Registry,
|
||||
ModelInstanceMixin,
|
||||
Instance,
|
||||
)
|
||||
|
||||
|
||||
class TrashableItemType(ModelInstanceMixin, Instance, ABC):
|
||||
"""
|
||||
A TrashableItemType specifies a baserow model which can be trashed.
|
||||
"""
|
||||
|
||||
def lookup_trashed_item(self, trashed_entry, trash_item_lookup_cache=None):
|
||||
"""
|
||||
Returns the actual instance of the trashed item. By default simply does a get
|
||||
on the model_class's trash manager.
|
||||
|
||||
:param trash_item_lookup_cache: A dictionary which can be used to store
|
||||
expensive objects used to lookup this item which could be re-used when
|
||||
looking up other items of this type.
|
||||
:param trashed_entry: The entry to get the real trashed instance for.
|
||||
:return: An instance of the model_class with trashed_item_id
|
||||
"""
|
||||
|
||||
try:
|
||||
return self.model_class.trash.get(id=trashed_entry.trash_item_id)
|
||||
except self.model_class.DoesNotExist:
|
||||
raise TrashItemDoesNotExist()
|
||||
|
||||
@abstractmethod
|
||||
def permanently_delete_item(self, trashed_item: Any):
|
||||
"""
|
||||
Should be implemented to actually delete the specified trashed item from the
|
||||
database and do any other required clean-up.
|
||||
|
||||
:param trashed_item: The item to delete permanently.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@property
|
||||
def requires_parent_id(self) -> bool:
|
||||
"""
|
||||
:returns True if this trash type requires a parent id to lookup a specific item,
|
||||
false if only the trash_item_id is required to perform a lookup.
|
||||
"""
|
||||
return False
|
||||
|
||||
@abstractmethod
|
||||
def get_parent(self, trashed_item: Any, parent_id: int) -> Optional[Any]:
|
||||
"""
|
||||
Returns the parent for this item.
|
||||
|
||||
:param trashed_item: The item to lookup a parent for.
|
||||
:returns Either the parent item or None if this item has no parent.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def trashed_item_restored(self, trashed_item: Any, trash_entry):
|
||||
"""
|
||||
Called when a trashed item is restored, should perform any extra operations
|
||||
such as sending web socket signals which occur when an item is "created" in
|
||||
baserow.
|
||||
|
||||
:param trash_entry: The trash entry that was restored from.
|
||||
:param trashed_item: The item that has been restored.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_name(self, trashed_item: Any) -> str:
|
||||
"""
|
||||
Should return the name of this particular trashed item to display in the trash
|
||||
modal.
|
||||
|
||||
:param trashed_item: The item to be named.
|
||||
:return The name of the trashed_group
|
||||
"""
|
||||
pass
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_items_to_trash(self, trashed_item: Any) -> List[Any]:
|
||||
"""
|
||||
When trashing some items you might also need to mark other related items also
|
||||
as trashed. Override this method and return instances of trashable models
|
||||
which should also be marked as trashed. Each of these instances will not
|
||||
however be given their own unique trash entry, but instead be restored
|
||||
all together from a single trash entry made for trashed_item only.
|
||||
|
||||
:return An iterable of trashable model instances.
|
||||
"""
|
||||
return [trashed_item]
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_extra_description(
|
||||
self, trashed_item: Any, parent: Optional[Any]
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Should return an optional extra description to show along with the trash
|
||||
entry for this particular trashed item.
|
||||
|
||||
:return A short string giving extra detail on what has been trashed.
|
||||
"""
|
||||
return None
|
||||
|
||||
|
||||
class TrashableItemTypeRegistry(ModelRegistryMixin, Registry):
|
||||
"""
|
||||
The TrashableItemTypeRegistry contains models which can be "trashed" in baserow.
|
||||
When an instance of a trashable model is trashed it is removed from baserow but
|
||||
not permanently. Once trashed an item can then be restored to add it back to
|
||||
baserow just as it was when it was trashed.
|
||||
"""
|
||||
|
||||
name = "trashable"
|
||||
|
||||
|
||||
trash_item_type_registry = TrashableItemTypeRegistry()
|
36
backend/src/baserow/core/trash/tasks.py
Normal file
36
backend/src/baserow/core/trash/tasks.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
from datetime import timedelta
|
||||
from django.conf import settings
|
||||
from baserow.config.celery import app
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@app.task(
|
||||
bind=True,
|
||||
)
|
||||
def mark_old_trash_for_permanent_deletion(self):
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
TrashHandler.mark_old_trash_for_permanent_deletion()
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@app.task(
|
||||
bind=True,
|
||||
)
|
||||
def permanently_delete_marked_trash(self):
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@app.on_after_finalize.connect
|
||||
def setup_period_trash_tasks(sender, **kwargs):
|
||||
sender.add_periodic_task(
|
||||
timedelta(minutes=settings.OLD_TRASH_CLEANUP_CHECK_INTERVAL_MINUTES),
|
||||
mark_old_trash_for_permanent_deletion.s(),
|
||||
)
|
||||
sender.add_periodic_task(
|
||||
timedelta(minutes=settings.OLD_TRASH_CLEANUP_CHECK_INTERVAL_MINUTES),
|
||||
permanently_delete_marked_trash.s(),
|
||||
)
|
74
backend/src/baserow/core/trash/trash_types.py
Normal file
74
backend/src/baserow/core/trash/trash_types.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
from typing import Any, Optional
|
||||
|
||||
from baserow.core.models import Application, TrashEntry, Group
|
||||
from baserow.core.registries import application_type_registry
|
||||
from baserow.core.signals import application_created, group_restored
|
||||
from baserow.core.trash.registries import TrashableItemType, trash_item_type_registry
|
||||
|
||||
|
||||
class ApplicationTrashableItemType(TrashableItemType):
|
||||
|
||||
type = "application"
|
||||
model_class = Application
|
||||
|
||||
def get_parent(self, trashed_item: Any, parent_id: int) -> Optional[Any]:
|
||||
return trashed_item.group
|
||||
|
||||
def get_name(self, trashed_item: Application) -> str:
|
||||
return trashed_item.name
|
||||
|
||||
def trashed_item_restored(self, trashed_item: Application, trash_entry: TrashEntry):
|
||||
application_created.send(
|
||||
self,
|
||||
application=trashed_item,
|
||||
user=None,
|
||||
)
|
||||
|
||||
def permanently_delete_item(self, trashed_item: Application):
|
||||
"""
|
||||
Deletes an application and the related relations in the correct way.
|
||||
"""
|
||||
|
||||
application = trashed_item.specific
|
||||
application_type = application_type_registry.get_by_model(application)
|
||||
application_type.pre_delete(application)
|
||||
application.delete()
|
||||
return application
|
||||
|
||||
|
||||
class GroupTrashableItemType(TrashableItemType):
|
||||
|
||||
type = "group"
|
||||
model_class = Group
|
||||
|
||||
def get_parent(self, trashed_item: Any, parent_id: int) -> Optional[Any]:
|
||||
return None
|
||||
|
||||
def get_name(self, trashed_item: Group) -> str:
|
||||
return trashed_item.name
|
||||
|
||||
def trashed_item_restored(self, trashed_item: Group, trash_entry: TrashEntry):
|
||||
"""
|
||||
Informs any clients that the group exists again.
|
||||
"""
|
||||
|
||||
for group_user in trashed_item.groupuser_set.all():
|
||||
group_restored.send(self, group_user=group_user, user=None)
|
||||
|
||||
def permanently_delete_item(self, trashed_group: Group):
|
||||
"""
|
||||
Deletes the provided group and all of its applications permanently.
|
||||
"""
|
||||
|
||||
# Select all the applications so we can delete them via the handler which is
|
||||
# needed in order to call the pre_delete method for each application.
|
||||
applications = (
|
||||
trashed_group.application_set(manager="objects_and_trash")
|
||||
.all()
|
||||
.select_related("group")
|
||||
)
|
||||
application_trashable_type = trash_item_type_registry.get("application")
|
||||
for application in applications:
|
||||
application_trashable_type.permanently_delete_item(application)
|
||||
|
||||
trashed_group.delete()
|
|
@ -1,10 +1,11 @@
|
|||
from django.dispatch import receiver
|
||||
from django.db import transaction
|
||||
from django.dispatch import receiver
|
||||
|
||||
from baserow.api.applications.serializers import (
|
||||
get_application_serializer,
|
||||
)
|
||||
from baserow.api.groups.serializers import GroupSerializer, GroupUserGroupSerializer
|
||||
from baserow.api.applications.serializers import get_application_serializer
|
||||
from baserow.core import signals
|
||||
|
||||
from .tasks import broadcast_to_group, broadcast_to_users
|
||||
|
||||
|
||||
|
@ -60,6 +61,27 @@ def group_user_updated(sender, group_user, user, **kwargs):
|
|||
)
|
||||
|
||||
|
||||
@receiver(signals.group_restored)
|
||||
def group_restored(sender, group_user, user, **kwargs):
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_users.delay(
|
||||
[group_user.user_id],
|
||||
{
|
||||
"type": "group_restored",
|
||||
"group_id": group_user.group_id,
|
||||
"group": GroupUserGroupSerializer(group_user).data,
|
||||
"applications": [
|
||||
get_application_serializer(application).data
|
||||
for application in group_user.group.application_set.select_related(
|
||||
"content_type", "group"
|
||||
).all()
|
||||
],
|
||||
},
|
||||
getattr(user, "web_socket_id", None),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(signals.group_user_deleted)
|
||||
def group_user_deleted(sender, group_user, user, **kwargs):
|
||||
transaction.on_commit(
|
||||
|
@ -72,7 +94,7 @@ def group_user_deleted(sender, group_user, user, **kwargs):
|
|||
|
||||
|
||||
@receiver(signals.application_created)
|
||||
def application_created(sender, application, user, type_name, **kwargs):
|
||||
def application_created(sender, application, user, **kwargs):
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
application.group_id,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import pytest
|
||||
|
||||
from django.shortcuts import reverse
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK,
|
||||
HTTP_204_NO_CONTENT,
|
||||
|
@ -8,8 +8,6 @@ from rest_framework.status import (
|
|||
HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
from django.shortcuts import reverse
|
||||
|
||||
from baserow.contrib.database.models import Database
|
||||
|
||||
|
||||
|
@ -80,6 +78,31 @@ def test_list_applications(api_client, data_fixture):
|
|||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
response = api_client.delete(
|
||||
reverse("api:groups:item", kwargs={"group_id": group_1.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse("api:applications:list", kwargs={"group_id": group_1.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
|
||||
|
||||
url = reverse(
|
||||
"api:applications:list",
|
||||
)
|
||||
response = api_client.get(
|
||||
url,
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
for application in response.json():
|
||||
assert application["group"]["id"] != group_1.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_application(api_client, data_fixture):
|
||||
|
@ -162,6 +185,20 @@ def test_get_application(api_client, data_fixture):
|
|||
assert response_json["id"] == application.id
|
||||
assert response_json["group"]["id"] == group.id
|
||||
|
||||
response = api_client.delete(
|
||||
reverse("api:groups:item", kwargs={"group_id": application.group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse("api:applications:item", kwargs={"application_id": application.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_application(api_client, data_fixture):
|
||||
|
|
|
@ -12,6 +12,7 @@ from django.shortcuts import reverse
|
|||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import GroupUser, GroupInvitation
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -585,3 +586,79 @@ def test_get_group_invitation_by_token(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json["id"] == invitation_2.id
|
||||
assert response_json["email_exists"] is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_when_group_is_trashed_so_is_invitation(data_fixture, api_client):
|
||||
user_1, token_1 = data_fixture.create_user_and_token(email="test1@test.nl")
|
||||
visible_group = data_fixture.create_group(user=user_1)
|
||||
trashed_group = data_fixture.create_group(user=user_1)
|
||||
visible_invitation = data_fixture.create_group_invitation(
|
||||
group=visible_group,
|
||||
invited_by=user_1,
|
||||
email="test4@test.nl",
|
||||
permissions="ADMIN",
|
||||
message="Test bericht 2",
|
||||
)
|
||||
trashed_invitation = data_fixture.create_group_invitation(
|
||||
group=trashed_group,
|
||||
invited_by=user_1,
|
||||
email="test4@test.nl",
|
||||
permissions="ADMIN",
|
||||
message="Test bericht 2",
|
||||
)
|
||||
# Put the trashed_group in the trash
|
||||
CoreHandler().delete_group(user=user_1, group=trashed_group)
|
||||
|
||||
# The trashed group 404's when asking for invitations
|
||||
response = api_client.get(
|
||||
reverse("api:groups:invitations:list", kwargs={"group_id": trashed_group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token_1}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
|
||||
|
||||
# The untrashed group still works like normal
|
||||
response = api_client.get(
|
||||
reverse("api:groups:invitations:list", kwargs={"group_id": visible_group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token_1}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["id"] == visible_invitation.id
|
||||
|
||||
# can't view the invitation for the trashed group
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:groups:invitations:item",
|
||||
kwargs={"group_invitation_id": trashed_invitation.id},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token_1}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_GROUP_INVITATION_DOES_NOT_EXIST"
|
||||
|
||||
TrashHandler.restore_item(user_1, "group", trashed_group.id)
|
||||
|
||||
# after restoring the group is visible again
|
||||
response = api_client.get(
|
||||
reverse("api:groups:invitations:list", kwargs={"group_id": trashed_group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token_1}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["id"] == trashed_invitation.id
|
||||
|
||||
# after restoring the invitation is visible again
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:groups:invitations:item",
|
||||
kwargs={"group_invitation_id": trashed_invitation.id},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token_1}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
assert response_json["id"] == trashed_invitation.id
|
||||
|
|
|
@ -9,7 +9,9 @@ from rest_framework.status import (
|
|||
|
||||
from django.shortcuts import reverse
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import GroupUser
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -163,3 +165,30 @@ def test_delete_group_user(api_client, data_fixture):
|
|||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
assert GroupUser.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_if_group_trashed_then_group_user_is_trashed(api_client, data_fixture):
|
||||
user_1, token_1 = data_fixture.create_user_and_token(email="test1@test.nl")
|
||||
trashed_group = data_fixture.create_group(user=user_1)
|
||||
CoreHandler().delete_group(user=user_1, group=trashed_group)
|
||||
|
||||
response = api_client.get(
|
||||
reverse("api:groups:users:list", kwargs={"group_id": trashed_group.id}),
|
||||
{"permissions": "MEMBER"},
|
||||
HTTP_AUTHORIZATION=f"JWT {token_1}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response_json["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
|
||||
|
||||
TrashHandler.restore_item(user_1, "group", trashed_group.id)
|
||||
|
||||
response = api_client.get(
|
||||
reverse("api:groups:users:list", kwargs={"group_id": trashed_group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token_1}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["email"] == user_1.email
|
||||
|
|
|
@ -4,6 +4,7 @@ from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NO
|
|||
|
||||
from django.shortcuts import reverse
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import Group, GroupUser
|
||||
|
||||
|
||||
|
@ -155,3 +156,22 @@ def test_reorder_groups(api_client, data_fixture):
|
|||
group_user_3.refresh_from_db()
|
||||
|
||||
assert [1, 2, 3] == [group_user_2.order, group_user_1.order, group_user_3.order]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashed_group_not_returned_by_views(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="test@test.nl", password="password", first_name="Test1"
|
||||
)
|
||||
trashed_group = data_fixture.create_group(user=user)
|
||||
visible_group = data_fixture.create_group(user=user)
|
||||
|
||||
CoreHandler().delete_group(user=user, group=trashed_group)
|
||||
|
||||
response = api_client.get(
|
||||
reverse("api:groups:list"), **{"HTTP_AUTHORIZATION": f"JWT {token}"}
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["id"] == visible_group.id
|
||||
|
|
717
backend/tests/baserow/api/trash/test_trash_views.py
Normal file
717
backend/tests/baserow/api/trash/test_trash_views.py
Normal file
|
@ -0,0 +1,717 @@
|
|||
import pytest
|
||||
from django.shortcuts import reverse
|
||||
from django.utils import timezone
|
||||
from freezegun import freeze_time
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK,
|
||||
HTTP_204_NO_CONTENT,
|
||||
HTTP_404_NOT_FOUND,
|
||||
HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
from baserow.core.models import Group, TrashEntry, Application
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_deleting_a_group_moves_it_to_the_trash_and_hides_it(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.get(
|
||||
reverse("api:groups:list"),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == []
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group_to_delete.id,
|
||||
},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == {
|
||||
"count": 1,
|
||||
"next": None,
|
||||
"previous": None,
|
||||
"results": [
|
||||
{
|
||||
"application": None,
|
||||
"group": group_to_delete.id,
|
||||
"id": TrashEntry.objects.first().id,
|
||||
"parent_trash_item_id": None,
|
||||
"trash_item_id": group_to_delete.id,
|
||||
"trash_item_type": "group",
|
||||
"trashed_at": "2020-01-01T12:00:00Z",
|
||||
"user_who_trashed": user.first_name,
|
||||
"name": group_to_delete.name,
|
||||
"parent_name": None,
|
||||
"extra_description": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_can_restore_a_deleted_trash_item(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:trash:restore",
|
||||
),
|
||||
{
|
||||
"trash_item_type": "group",
|
||||
"trash_item_id": group_to_delete.id,
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
assert Group.objects.count() == 1
|
||||
assert Group.trash.count() == 0
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group_to_delete.id,
|
||||
},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == {
|
||||
"count": 0,
|
||||
"next": None,
|
||||
"previous": None,
|
||||
"results": [],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cant_restore_a_deleted_trash_item_if_not_in_group(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
other_user, other_token = data_fixture.create_user_and_token()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:trash:restore",
|
||||
),
|
||||
{
|
||||
"trash_item_type": "group",
|
||||
"trash_item_id": group_to_delete.id,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {other_token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cant_restore_a_non_existent_trashed_item(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:trash:restore",
|
||||
),
|
||||
{
|
||||
"trash_item_type": "group",
|
||||
"trash_item_id": 99999,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_TRASH_ITEM_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cant_restore_a_trashed_item_with_a_missing_parent(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
application = data_fixture.create_database_application(user=user, group=group)
|
||||
table = data_fixture.create_database_table(user=user, database=application)
|
||||
model = table.get_model()
|
||||
row = model.objects.create()
|
||||
|
||||
url = reverse(
|
||||
"api:database:rows:item", kwargs={"table_id": table.id, "row_id": row.id}
|
||||
)
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:trash:restore",
|
||||
),
|
||||
{
|
||||
"trash_item_type": "row",
|
||||
"parent_trash_item_id": 99999,
|
||||
"trash_item_id": row.id,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_TRASH_ITEM_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cant_restore_a_trash_item_marked_for_perm_deletion(
|
||||
api_client, data_fixture, settings
|
||||
):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
trashed_at = timezone.now()
|
||||
time_when_trash_item_old_enough = trashed_at + timezone.timedelta(
|
||||
hours=settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED + 1
|
||||
)
|
||||
|
||||
with freeze_time(trashed_at):
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
with freeze_time(time_when_trash_item_old_enough):
|
||||
TrashHandler.mark_old_trash_for_permanent_deletion()
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:trash:restore",
|
||||
),
|
||||
{
|
||||
"trash_item_type": "group",
|
||||
"trash_item_id": group_to_delete.id,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_TRASH_ITEM_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_can_get_trash_structure(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group_to_delete = data_fixture.create_group()
|
||||
normal_group = data_fixture.create_group()
|
||||
data_fixture.create_user_group(user=user, group=group_to_delete, order=1)
|
||||
data_fixture.create_user_group(user=user, group=normal_group, order=2)
|
||||
# Another group for a different user which should not be displayed below
|
||||
data_fixture.create_group()
|
||||
application = data_fixture.create_database_application(
|
||||
user=user, group=group_to_delete
|
||||
)
|
||||
trashed_application = data_fixture.create_database_application(
|
||||
user=user, group=normal_group
|
||||
)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse(
|
||||
"api:applications:item", kwargs={"application_id": trashed_application.id}
|
||||
)
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:trash:list",
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == {
|
||||
"groups": [
|
||||
{
|
||||
"id": group_to_delete.id,
|
||||
"trashed": True,
|
||||
"name": group_to_delete.name,
|
||||
"applications": [
|
||||
{"id": application.id, "name": application.name, "trashed": False}
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": normal_group.id,
|
||||
"trashed": False,
|
||||
"name": normal_group.name,
|
||||
"applications": [
|
||||
{
|
||||
"id": trashed_application.id,
|
||||
"name": trashed_application.name,
|
||||
"trashed": True,
|
||||
}
|
||||
],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_getting_a_non_existent_group_returns_404(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={"group_id": 99999},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_getting_a_non_existent_app_returns_404(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={"group_id": group.id},
|
||||
)
|
||||
response = api_client.get(
|
||||
f"{url}?application_id=99999",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_getting_a_app_for_diff_group_returns_400(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
other_group = data_fixture.create_group(user=user)
|
||||
|
||||
app = data_fixture.create_database_application(user=user, group=other_group)
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={"group_id": group.id},
|
||||
)
|
||||
response = api_client.get(
|
||||
f"{url}?application_id={app.id}",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_NOT_IN_GROUP"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_cant_get_trash_contents_for_group_they_are_not_a_member_of(
|
||||
api_client, data_fixture
|
||||
):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
(
|
||||
other_unpermissioned_user,
|
||||
unpermissioned_token,
|
||||
) = data_fixture.create_user_and_token()
|
||||
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group_to_delete.id,
|
||||
},
|
||||
)
|
||||
response = api_client.get(
|
||||
url,
|
||||
HTTP_AUTHORIZATION=f"JWT {unpermissioned_token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_can_get_trash_contents_for_undeleted_group(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
|
||||
group = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group.id,
|
||||
},
|
||||
)
|
||||
response = api_client.get(
|
||||
url,
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == {
|
||||
"count": 0,
|
||||
"next": None,
|
||||
"previous": None,
|
||||
"results": [],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_can_get_trash_contents_for_undeleted_app(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
|
||||
group = data_fixture.create_group(user=user)
|
||||
app = data_fixture.create_database_application(user=user, group=group)
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group.id,
|
||||
},
|
||||
)
|
||||
response = api_client.get(
|
||||
f"{url}?application_id={app.id}",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == {
|
||||
"count": 0,
|
||||
"next": None,
|
||||
"previous": None,
|
||||
"results": [],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_emptying_a_trashed_group_marks_it_for_perm_deletion(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group_to_delete.id,
|
||||
},
|
||||
)
|
||||
response = api_client.delete(
|
||||
f"{url}",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
assert Group.objects.count() == 0
|
||||
assert Group.trash.count() == 1
|
||||
assert TrashEntry.objects.get(
|
||||
trash_item_id=group_to_delete.id
|
||||
).should_be_permanently_deleted
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group_to_delete.id,
|
||||
},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_emptying_a_non_existent_group_returns_404(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
|
||||
response = api_client.delete(
|
||||
reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={"group_id": 99999},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_emptying_a_non_existent_app_returns_404(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={"group_id": group.id},
|
||||
)
|
||||
response = api_client.delete(
|
||||
f"{url}?application_id=99999",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_emptying_a_app_for_diff_group_returns_400(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
other_group = data_fixture.create_group(user=user)
|
||||
|
||||
app = data_fixture.create_database_application(user=user, group=other_group)
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={"group_id": group.id},
|
||||
)
|
||||
response = api_client.delete(
|
||||
f"{url}?application_id={app.id}",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_NOT_IN_GROUP"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_cant_empty_trash_contents_for_group_they_are_not_a_member_of(
|
||||
api_client, data_fixture
|
||||
):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
(
|
||||
other_unpermissioned_user,
|
||||
unpermissioned_token,
|
||||
) = data_fixture.create_user_and_token()
|
||||
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group_to_delete.id,
|
||||
},
|
||||
)
|
||||
response = api_client.delete(
|
||||
url,
|
||||
HTTP_AUTHORIZATION=f"JWT {unpermissioned_token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_emptying_a_trashed_app_marks_it_for_perm_deletion(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
trashed_database = data_fixture.create_database_application(user=user, group=group)
|
||||
|
||||
url = reverse(
|
||||
"api:applications:item", kwargs={"application_id": trashed_database.id}
|
||||
)
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group.id,
|
||||
},
|
||||
)
|
||||
response = api_client.delete(
|
||||
f"{url}",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
assert Application.objects.count() == 0
|
||||
assert Application.trash.count() == 1
|
||||
assert TrashEntry.objects.get(
|
||||
trash_item_id=trashed_database.id
|
||||
).should_be_permanently_deleted
|
||||
|
||||
url = reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group.id,
|
||||
},
|
||||
)
|
||||
response = api_client.get(
|
||||
f"{url}?application_id={trashed_database.id}",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_deleting_a_user_who_trashed_something_returns_null_user_who_trashed(
|
||||
api_client, data_fixture
|
||||
):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
other_user, other_token = data_fixture.create_user_and_token()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
data_fixture.create_user_group(user=other_user, group=group_to_delete)
|
||||
|
||||
url = reverse("api:groups:item", kwargs={"group_id": group_to_delete.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
user.delete()
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:trash:contents",
|
||||
kwargs={
|
||||
"group_id": group_to_delete.id,
|
||||
},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {other_token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == {
|
||||
"count": 1,
|
||||
"next": None,
|
||||
"previous": None,
|
||||
"results": [
|
||||
{
|
||||
"application": None,
|
||||
"group": group_to_delete.id,
|
||||
"id": TrashEntry.objects.first().id,
|
||||
"parent_trash_item_id": None,
|
||||
"trash_item_id": group_to_delete.id,
|
||||
"trash_item_type": "group",
|
||||
"trashed_at": "2020-01-01T12:00:00Z",
|
||||
"user_who_trashed": None,
|
||||
"name": group_to_delete.name,
|
||||
"parent_name": None,
|
||||
"extra_description": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_restoring_an_item_which_doesnt_need_parent_id_with_one_returns_error(
|
||||
api_client, data_fixture
|
||||
):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
application = data_fixture.create_database_application(user=user, group=group)
|
||||
|
||||
url = reverse("api:applications:item", kwargs={"application_id": application.id})
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:trash:restore",
|
||||
),
|
||||
{
|
||||
"trash_item_type": "application",
|
||||
"parent_trash_item_id": 99999,
|
||||
"trash_item_id": application.id,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_PARENT_ID_MUST_NOT_BE_PROVIDED"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cant_restore_a_trashed_item_requiring_a_parent_id_without_providing_it(
|
||||
api_client, data_fixture
|
||||
):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user)
|
||||
application = data_fixture.create_database_application(user=user, group=group)
|
||||
table = data_fixture.create_database_table(user=user, database=application)
|
||||
model = table.get_model()
|
||||
row = model.objects.create()
|
||||
|
||||
url = reverse(
|
||||
"api:database:rows:item", kwargs={"table_id": table.id, "row_id": row.id}
|
||||
)
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
response = api_client.delete(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:trash:restore",
|
||||
),
|
||||
{
|
||||
"trash_item_type": "row",
|
||||
"trash_item_id": row.id,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_PARENT_ID_MUST_BE_PROVIDED"
|
|
@ -5,6 +5,7 @@ from rest_framework.status import (
|
|||
HTTP_400_BAD_REQUEST,
|
||||
HTTP_401_UNAUTHORIZED,
|
||||
HTTP_404_NOT_FOUND,
|
||||
HTTP_204_NO_CONTENT,
|
||||
)
|
||||
|
||||
from django.shortcuts import reverse
|
||||
|
@ -107,6 +108,21 @@ def test_list_fields(api_client, data_fixture):
|
|||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
response = api_client.delete(
|
||||
reverse(
|
||||
"api:groups:item",
|
||||
kwargs={"group_id": table_1.database.group.id},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
response = api_client.get(
|
||||
reverse("api:database:fields:list", kwargs={"table_id": table_1.id}),
|
||||
**{"HTTP_AUTHORIZATION": f"JWT {jwt_token}"},
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_TABLE_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_field(api_client, data_fixture):
|
||||
|
@ -239,6 +255,19 @@ def test_get_field(api_client, data_fixture):
|
|||
assert response_json["table_id"] == text.table_id
|
||||
assert not response_json["text_default"]
|
||||
|
||||
response = api_client.delete(
|
||||
reverse(
|
||||
"api:groups:item",
|
||||
kwargs={"group_id": table.database.group.id},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
url = reverse("api:database:fields:item", kwargs={"field_id": text.id})
|
||||
response = api_client.get(url, format="json", HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_field(api_client, data_fixture):
|
||||
|
|
|
@ -42,6 +42,17 @@ def test_list_tables(api_client, data_fixture):
|
|||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_DOES_NOT_EXIST"
|
||||
|
||||
response = api_client.delete(
|
||||
reverse("api:groups:item", kwargs={"group_id": database.group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse("api:database:tables:list", kwargs={"database_id": database.id})
|
||||
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_APPLICATION_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_table(api_client, data_fixture):
|
||||
|
|
|
@ -78,6 +78,17 @@ def test_list_views(api_client, data_fixture):
|
|||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
response = api_client.delete(
|
||||
reverse("api:groups:item", kwargs={"group_id": table_1.database.group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse("api:database:views:list", kwargs={"table_id": table_1.id})
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_TABLE_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_list_views_including_filters(api_client, data_fixture):
|
||||
|
@ -326,6 +337,20 @@ def test_get_view(api_client, data_fixture):
|
|||
assert response_json["filters"][0]["value"] == filter.value
|
||||
assert response_json["sortings"] == []
|
||||
|
||||
response = api_client.delete(
|
||||
reverse("api:groups:item", kwargs={"group_id": view.table.database.group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
url = reverse("api:database:views:item", kwargs={"view_id": view.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_VIEW_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_view(api_client, data_fixture):
|
||||
|
@ -548,6 +573,18 @@ def test_list_view_filters(api_client, data_fixture):
|
|||
assert response_json[0]["value"] == filter_1.value
|
||||
assert response_json[1]["id"] == filter_2.id
|
||||
|
||||
response = api_client.delete(
|
||||
reverse("api:groups:item", kwargs={"group_id": table_1.database.group.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
response = api_client.get(
|
||||
reverse("api:database:views:list_filters", kwargs={"view_id": view_1.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_VIEW_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_view_filter(api_client, data_fixture):
|
||||
|
@ -701,6 +738,24 @@ def test_get_view_filter(api_client, data_fixture):
|
|||
assert response_json["type"] == "equal"
|
||||
assert response_json["value"] == "test"
|
||||
|
||||
response = api_client.delete(
|
||||
reverse(
|
||||
"api:groups:item",
|
||||
kwargs={"group_id": filter_1.view.table.database.group.id},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:database:views:filter_item", kwargs={"view_filter_id": filter_1.id}
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_VIEW_FILTER_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_view_filter(api_client, data_fixture):
|
||||
|
@ -926,6 +981,22 @@ def test_list_view_sortings(api_client, data_fixture):
|
|||
assert response_json[0]["order"] == sort_1.order
|
||||
assert response_json[1]["id"] == sort_2.id
|
||||
|
||||
response = api_client.delete(
|
||||
reverse(
|
||||
"api:groups:item",
|
||||
kwargs={"group_id": view_1.table.database.group.id},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.get(
|
||||
reverse("api:database:views:list_sortings", kwargs={"view_id": view_1.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_VIEW_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_view_sort(api_client, data_fixture):
|
||||
|
@ -1092,6 +1163,22 @@ def test_get_view_sort(api_client, data_fixture):
|
|||
assert response_json["field"] == first.field_id
|
||||
assert response_json["order"] == "DESC"
|
||||
|
||||
response = api_client.delete(
|
||||
reverse(
|
||||
"api:groups:item",
|
||||
kwargs={"group_id": sort_1.view.table.database.group.id},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_204_NO_CONTENT
|
||||
|
||||
response = api_client.get(
|
||||
reverse("api:database:views:sort_item", kwargs={"view_sort_id": sort_1.id}),
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response.json()["error"] == "ERROR_VIEW_SORT_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_view_sort(api_client, data_fixture):
|
||||
|
|
|
@ -234,7 +234,7 @@ def test_can_export_every_interesting_different_field_to_csv(
|
|||
|
||||
|
||||
def run_export_job_over_interesting_table(data_fixture, storage_mock, options):
|
||||
table, user = setup_interesting_test_table(data_fixture)
|
||||
table, user, _ = setup_interesting_test_table(data_fixture)
|
||||
grid_view = data_fixture.create_grid_view(table=table)
|
||||
job, contents = run_export_job_with_mock_storage(
|
||||
table, grid_view, storage_mock, user, options
|
||||
|
|
|
@ -754,6 +754,7 @@ def test_delete_field(send_mock, data_fixture):
|
|||
field_id = text_field.id
|
||||
handler.delete_field(user=user, field=text_field)
|
||||
assert Field.objects.all().count() == 0
|
||||
assert Field.trash.all().count() == 1
|
||||
assert TextField.objects.all().count() == 0
|
||||
|
||||
send_mock.assert_called_once()
|
||||
|
@ -761,10 +762,6 @@ def test_delete_field(send_mock, data_fixture):
|
|||
assert send_mock.call_args[1]["field"].id == field_id
|
||||
assert send_mock.call_args[1]["user"].id == user.id
|
||||
|
||||
table_model = table.get_model()
|
||||
field_name = f"field_{text_field.id}"
|
||||
assert field_name not in [field.name for field in table_model._meta.get_fields()]
|
||||
|
||||
primary = data_fixture.create_text_field(table=table, primary=True)
|
||||
with pytest.raises(CannotDeletePrimaryField):
|
||||
handler.delete_field(user=user, field=primary)
|
||||
|
|
|
@ -15,6 +15,7 @@ from baserow.contrib.database.fields.models import (
|
|||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from tests.test_utils import setup_interesting_test_table
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -435,3 +436,37 @@ def test_phone_number_field_type(data_fixture):
|
|||
|
||||
field_handler.delete_field(user=user, field=phone_number_field)
|
||||
assert len(PhoneNumberField.objects.all()) == 3
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_human_readable_values(data_fixture):
|
||||
table, user, row = setup_interesting_test_table(data_fixture)
|
||||
model = table.get_model()
|
||||
results = {}
|
||||
for field in model._field_objects.values():
|
||||
value = field["type"].get_human_readable_value(
|
||||
getattr(row, field["name"]), field
|
||||
)
|
||||
results[field["field"].name] = value
|
||||
|
||||
assert results == {
|
||||
"boolean": "True",
|
||||
"date_eu": "01/02/2020",
|
||||
"date_us": "02/01/2020",
|
||||
"datetime_eu": "01/02/2020 01:23",
|
||||
"datetime_us": "02/01/2020 01:23",
|
||||
"decimal_link_row": "1.234,-123.456,unnamed row 3",
|
||||
"email": "test@example.com",
|
||||
"file": "a.txt,b.txt",
|
||||
"file_link_row": "name.txt,unnamed row 2",
|
||||
"link_row": "linked_row_1,linked_row_2,unnamed row 3",
|
||||
"long_text": "long_text",
|
||||
"negative_decimal": "-1.2",
|
||||
"negative_int": "-1",
|
||||
"phone_number": "+4412345678",
|
||||
"positive_decimal": "1.2",
|
||||
"positive_int": "1",
|
||||
"single_select": "A",
|
||||
"text": "text",
|
||||
"url": "https://www.google.com",
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ from baserow.contrib.database.fields.exceptions import (
|
|||
LinkRowTableNotProvided,
|
||||
)
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -194,10 +195,6 @@ def test_link_row_field_type(data_fixture):
|
|||
# Delete the existing field. Alter that the related field should be deleted and
|
||||
# no table named _relation_ should exist.
|
||||
field_handler.delete_field(user, link_field_1)
|
||||
assert LinkRowField.objects.all().count() == 0
|
||||
for t in connection.introspection.table_names():
|
||||
if "_relation_" in t:
|
||||
assert False
|
||||
|
||||
# Change a the text field back into a link row field.
|
||||
link_field_2 = field_handler.update_field(
|
||||
|
@ -356,7 +353,9 @@ def test_link_row_field_type_rows(data_fixture):
|
|||
# Just check if the field can be deleted can be deleted.
|
||||
field_handler.delete_field(user=user, field=link_row_field)
|
||||
# We expect only the primary field to be left.
|
||||
assert Field.objects.all().count() == 1
|
||||
objects_all = Field.objects.all()
|
||||
print(objects_all.query)
|
||||
assert objects_all.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -662,6 +661,7 @@ def test_link_row_field_type_api_row_views(api_client, data_fixture):
|
|||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
response_json = response.json()
|
||||
row_id = response_json["id"]
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
@ -802,3 +802,104 @@ def test_import_export_link_row_field(data_fixture, user_tables_in_separate_db):
|
|||
assert [
|
||||
r.id for r in getattr(imported_row, f"field_{imported_link_row_field.id}").all()
|
||||
] == [imported_c_row.id, imported_c_row_2.id]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_creating_a_linked_row_pointing_at_trashed_row_works_but_does_not_display(
|
||||
data_fixture, api_client
|
||||
):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
table_with_trashed_row = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
table_linking_to_trashed_row = data_fixture.create_database_table(
|
||||
name="Cars", database=database
|
||||
)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user,
|
||||
table=table_with_trashed_row,
|
||||
type_name="text",
|
||||
name="Name",
|
||||
primary=True,
|
||||
)
|
||||
trashed_row = row_handler.create_row(
|
||||
user=user,
|
||||
table=table_with_trashed_row,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
|
||||
link_field_1 = field_handler.create_field(
|
||||
user=user,
|
||||
table=table_linking_to_trashed_row,
|
||||
type_name="link_row",
|
||||
name="customer",
|
||||
link_row_table=table_with_trashed_row,
|
||||
)
|
||||
# Create a primary field and some example data for the cars table.
|
||||
field_handler.create_field(
|
||||
user=user,
|
||||
table=table_linking_to_trashed_row,
|
||||
type_name="text",
|
||||
name="Name",
|
||||
primary=True,
|
||||
)
|
||||
TrashHandler.trash(
|
||||
user,
|
||||
database.group,
|
||||
database,
|
||||
trashed_row,
|
||||
parent_id=table_with_trashed_row.id,
|
||||
)
|
||||
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:database:rows:list",
|
||||
kwargs={"table_id": table_linking_to_trashed_row.id},
|
||||
),
|
||||
{
|
||||
f"field_{link_field_1.id}": [trashed_row.id],
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
# Even though the call succeeded, the linked row is not returned
|
||||
assert response_json[f"field_{link_field_1.id}"] == []
|
||||
|
||||
row_id = response_json["id"]
|
||||
|
||||
url = reverse(
|
||||
"api:database:rows:item",
|
||||
kwargs={"table_id": table_linking_to_trashed_row.id, "row_id": row_id},
|
||||
)
|
||||
response = api_client.get(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_200_OK
|
||||
# Other endpoints also don't return this connection made whilst trashed
|
||||
assert response.json()[f"field_{link_field_1.id}"] == []
|
||||
|
||||
TrashHandler.restore_item(
|
||||
user,
|
||||
"row",
|
||||
row_id,
|
||||
parent_trash_item_id=table_with_trashed_row.id,
|
||||
)
|
||||
|
||||
url = reverse(
|
||||
"api:database:rows:item",
|
||||
kwargs={"table_id": table_linking_to_trashed_row.id, "row_id": row_id},
|
||||
)
|
||||
response = api_client.get(url, HTTP_AUTHORIZATION=f"JWT {token}")
|
||||
assert response.status_code == HTTP_200_OK
|
||||
# Now that the row was un-trashed, it appears.
|
||||
linked_field_values = response.json()[f"field_{link_field_1.id}"]
|
||||
assert len(linked_field_values) == 1
|
||||
assert linked_field_values[0]["id"] == trashed_row.id
|
||||
|
|
|
@ -9,6 +9,7 @@ from django.db import models
|
|||
from baserow.core.exceptions import UserNotInGroup
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.rows.exceptions import RowDoesNotExist
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
def test_get_field_ids_from_dict():
|
||||
|
@ -432,6 +433,9 @@ def test_delete_row(before_send_mock, send_mock, data_fixture):
|
|||
row_id = row.id
|
||||
handler.delete_row(user=user, table=table, row_id=row.id)
|
||||
assert model.objects.all().count() == 1
|
||||
assert model.trash.all().count() == 1
|
||||
row.refresh_from_db()
|
||||
assert row.trashed
|
||||
|
||||
before_send_mock.assert_called_once()
|
||||
assert before_send_mock.call_args[1]["row"]
|
||||
|
@ -446,3 +450,45 @@ def test_delete_row(before_send_mock, send_mock, data_fixture):
|
|||
assert send_mock.call_args[1]["table"].id == table.id
|
||||
assert send_mock.call_args[1]["model"]._generated_table_model
|
||||
assert send_mock.call_args[1]["before_return"] == before_send_mock.return_value
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.contrib.database.rows.signals.row_created.send")
|
||||
def test_restore_row(send_mock, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(name="Car", user=user)
|
||||
name_field = data_fixture.create_text_field(
|
||||
table=table, name="Name", text_default="Test"
|
||||
)
|
||||
speed_field = data_fixture.create_number_field(
|
||||
table=table, name="Max speed", number_negative=True
|
||||
)
|
||||
price_field = data_fixture.create_number_field(
|
||||
table=table,
|
||||
name="Price",
|
||||
number_type="DECIMAL",
|
||||
number_decimal_places=2,
|
||||
number_negative=False,
|
||||
)
|
||||
|
||||
handler = RowHandler()
|
||||
|
||||
row_1 = handler.create_row(
|
||||
user=user,
|
||||
table=table,
|
||||
values={
|
||||
name_field.id: "Tesla",
|
||||
speed_field.id: 240,
|
||||
f"field_{price_field.id}": 59999.99,
|
||||
},
|
||||
)
|
||||
|
||||
handler.delete_row(user, table, row_1.id)
|
||||
TrashHandler.restore_item(user, "row", row_1.id, parent_trash_item_id=table.id)
|
||||
|
||||
assert len(send_mock.call_args) == 2
|
||||
assert send_mock.call_args[1]["row"].id == row_1.id
|
||||
assert send_mock.call_args[1]["user"] is None
|
||||
assert send_mock.call_args[1]["table"].id == table.id
|
||||
assert send_mock.call_args[1]["before"] is None
|
||||
assert send_mock.call_args[1]["model"]._generated_table_model
|
||||
|
|
|
@ -21,6 +21,7 @@ from baserow.contrib.database.fields.models import (
|
|||
BooleanField,
|
||||
)
|
||||
from baserow.contrib.database.views.models import GridView, GridViewFieldOptions
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -42,6 +43,17 @@ def test_get_database_table(data_fixture):
|
|||
table_copy = handler.get_table(table_id=table.id)
|
||||
assert table_copy.id == table.id
|
||||
|
||||
TrashHandler.trash(user, table.database.group, table.database, table.database)
|
||||
|
||||
with pytest.raises(TableDoesNotExist):
|
||||
handler.get_table(table_id=table.id)
|
||||
|
||||
TrashHandler.restore_item(user, "application", table.database.id)
|
||||
|
||||
TrashHandler.trash(user, table.database.group, None, table.database.group)
|
||||
with pytest.raises(TableDoesNotExist):
|
||||
handler.get_table(table_id=table.id)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.contrib.database.table.signals.table_created.send")
|
||||
|
@ -326,7 +338,7 @@ def test_delete_database_table(send_mock, data_fixture):
|
|||
handler.delete_table(user=user_2, table=table)
|
||||
|
||||
assert Table.objects.all().count() == 1
|
||||
assert f"database_table_{table.id}" in connection.introspection.table_names()
|
||||
assert Table.trash.all().count() == 0
|
||||
|
||||
table_id = table.id
|
||||
handler.delete_table(user=user, table=table)
|
||||
|
@ -336,4 +348,5 @@ def test_delete_database_table(send_mock, data_fixture):
|
|||
assert send_mock.call_args[1]["user"].id == user.id
|
||||
|
||||
assert Table.objects.all().count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
||||
assert Table.trash.all().count() == 1
|
||||
assert f"database_table_{table.id}" in connection.introspection.table_names()
|
||||
|
|
|
@ -34,7 +34,7 @@ def test_group_user_get_next_order(data_fixture):
|
|||
|
||||
@pytest.mark.django_db
|
||||
def test_get_table_model(data_fixture):
|
||||
default_model_fields_count = 3
|
||||
default_model_fields_count = 4
|
||||
table = data_fixture.create_database_table(name="Cars")
|
||||
text_field = data_fixture.create_text_field(
|
||||
table=table, order=0, name="Color", text_default="white"
|
||||
|
|
|
@ -0,0 +1,748 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.db import connection
|
||||
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import Field, TextField, LinkRowField
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.core.models import TrashEntry
|
||||
from baserow.core.trash.exceptions import (
|
||||
ParentIdMustBeProvidedException,
|
||||
ParentIdMustNotBeProvidedException,
|
||||
)
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_row(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(name="Car", user=user)
|
||||
data_fixture.create_text_field(table=table, name="Name", text_default="Test")
|
||||
|
||||
handler = RowHandler()
|
||||
model = table.get_model()
|
||||
row = handler.create_row(user=user, table=table)
|
||||
handler.create_row(user=user, table=table)
|
||||
|
||||
TrashHandler.permanently_delete(row)
|
||||
assert model.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_deleting_many_rows_at_once_only_looks_up_the_model_once(
|
||||
data_fixture, django_assert_num_queries
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(name="Car", user=user)
|
||||
data_fixture.create_text_field(table=table, name="Name", text_default="Test")
|
||||
|
||||
handler = RowHandler()
|
||||
model = table.get_model()
|
||||
row_1 = handler.create_row(user=user, table=table)
|
||||
|
||||
TrashHandler.trash(
|
||||
user, table.database.group, table.database, row_1, parent_id=table.id
|
||||
)
|
||||
assert model.objects.all().count() == 0
|
||||
assert model.trash.all().count() == 1
|
||||
assert TrashEntry.objects.count() == 1
|
||||
|
||||
TrashEntry.objects.update(should_be_permanently_deleted=True)
|
||||
|
||||
with django_assert_num_queries(9):
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
row_2 = handler.create_row(user=user, table=table)
|
||||
row_3 = handler.create_row(user=user, table=table)
|
||||
TrashHandler.trash(
|
||||
user, table.database.group, table.database, row_2, parent_id=table.id
|
||||
)
|
||||
TrashHandler.trash(
|
||||
user, table.database.group, table.database, row_3, parent_id=table.id
|
||||
)
|
||||
|
||||
assert model.objects.all().count() == 0
|
||||
assert model.trash.all().count() == 2
|
||||
assert TrashEntry.objects.count() == 2
|
||||
|
||||
TrashEntry.objects.update(should_be_permanently_deleted=True)
|
||||
|
||||
# We only want five more queries when deleting 2 rows instead of 1 compared to
|
||||
# above:
|
||||
# 1. A query to lookup the extra row we are deleting
|
||||
# 2. A query to delete said row
|
||||
# 3. A query to delete it's trash entry.
|
||||
# 4. Queries to open and close transactions for each deletion
|
||||
# If we weren't caching the table models an extra number of queries would be first
|
||||
# performed to lookup the table information which breaks this assertion.
|
||||
with django_assert_num_queries(14):
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_row_when_in_separate_user_db(data_fixture, user_tables_in_separate_db):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(name="Car", user=user)
|
||||
data_fixture.create_text_field(table=table, name="Name", text_default="Test")
|
||||
|
||||
handler = RowHandler()
|
||||
row = handler.create_row(user=user, table=table)
|
||||
handler.create_row(user=user, table=table)
|
||||
|
||||
TrashHandler.trash(
|
||||
user, table.database.group, table.database, row, parent_id=table.id
|
||||
)
|
||||
model = table.get_model()
|
||||
assert model.objects.all().count() == 1
|
||||
assert model.trash.all().count() == 1
|
||||
|
||||
TrashHandler.permanently_delete(row)
|
||||
assert model.objects.all().count() == 1
|
||||
assert model.trash.all().count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_delete_table(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(user=user, database=database)
|
||||
|
||||
assert Table.objects.all().count() == 1
|
||||
assert f"database_table_{table.id}" in connection.introspection.table_names()
|
||||
|
||||
TrashHandler.permanently_delete(table)
|
||||
|
||||
assert Table.objects.all().count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_delete_field(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
text_field = data_fixture.create_text_field(table=table)
|
||||
|
||||
assert Field.objects.all().count() == 1
|
||||
assert TextField.objects.all().count() == 1
|
||||
TrashHandler.permanently_delete(text_field)
|
||||
assert Field.objects.all().count() == 0
|
||||
assert Field.trash.all().count() == 0
|
||||
assert TextField.objects.all().count() == 0
|
||||
|
||||
table_model = table.get_model()
|
||||
field_name = f"field_{text_field.id}"
|
||||
assert field_name not in [field.name for field in table_model._meta.get_fields()]
|
||||
assert f"trashed_{field_name}" not in [
|
||||
field.name for field in table_model._meta.get_fields()
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_delete_link_row_field(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
table = data_fixture.create_database_table(name="Example", database=database)
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
cars_table = data_fixture.create_database_table(name="Cars", database=database)
|
||||
data_fixture.create_database_table(name="Unrelated")
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "Jane"},
|
||||
)
|
||||
|
||||
# Create a primary field and some example data for the cars table.
|
||||
cars_primary_field = field_handler.create_field(
|
||||
user=user, table=cars_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user, table=cars_table, values={f"field_{cars_primary_field.id}": "BMW"}
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user, table=cars_table, values={f"field_{cars_primary_field.id}": "Audi"}
|
||||
)
|
||||
|
||||
link_field_1 = field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
type_name="link_row",
|
||||
name="Customer",
|
||||
link_row_table=customers_table,
|
||||
)
|
||||
TrashHandler.permanently_delete(link_field_1)
|
||||
assert LinkRowField.objects.all().count() == 0
|
||||
for t in connection.introspection.table_names():
|
||||
if "_relation_" in t:
|
||||
assert False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashing_a_table_with_link_fields_pointing_at_it_also_trashes_those_fields(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
table = data_fixture.create_database_table(name="Example", database=database)
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
cars_table = data_fixture.create_database_table(name="Cars", database=database)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "Jane"},
|
||||
)
|
||||
|
||||
# Create a primary field and some example data for the cars table.
|
||||
cars_primary_field = field_handler.create_field(
|
||||
user=user, table=cars_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user, table=cars_table, values={f"field_{cars_primary_field.id}": "BMW"}
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user, table=cars_table, values={f"field_{cars_primary_field.id}": "Audi"}
|
||||
)
|
||||
|
||||
link_field_1 = field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
type_name="link_row",
|
||||
name="Customer",
|
||||
link_row_table=customers_table,
|
||||
)
|
||||
TrashHandler.trash(user, database.group, database, customers_table)
|
||||
|
||||
link_field_1.refresh_from_db()
|
||||
assert link_field_1.trashed
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashed_row_entry_includes_the_rows_primary_key_value_as_an_extra_description(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
row = row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
trash_entry = TrashHandler.trash(
|
||||
user, database.group, database, row, parent_id=customers_table.id
|
||||
)
|
||||
|
||||
assert trash_entry.extra_description == "John"
|
||||
assert trash_entry.name == str(row.id)
|
||||
assert trash_entry.parent_name == "Customers"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashed_row_entry_extra_description_is_unnamed_when_no_value_pk(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
row = row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={},
|
||||
)
|
||||
trash_entry = TrashHandler.trash(
|
||||
user, database.group, database, row, parent_id=customers_table.id
|
||||
)
|
||||
|
||||
assert trash_entry.extra_description == f"unnamed row {row.id}"
|
||||
assert trash_entry.name == str(row.id)
|
||||
assert trash_entry.parent_name == "Customers"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_restoring_a_trashed_link_field_restores_the_opposing_field_also(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "Jane"},
|
||||
)
|
||||
|
||||
link_field_1 = field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
type_name="link_row",
|
||||
name="Customer",
|
||||
link_row_table=customers_table,
|
||||
)
|
||||
TrashHandler.trash(user, database.group, database, link_field_1)
|
||||
|
||||
assert LinkRowField.trash.count() == 2
|
||||
|
||||
TrashHandler.restore_item(user, "field", link_field_1.id)
|
||||
|
||||
assert LinkRowField.objects.count() == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashing_a_row_hides_it_from_a_link_row_field_pointing_at_it(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
cars_table = data_fixture.create_database_table(name="Cars", database=database)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
john_row = row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
jane_row = row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "Jane"},
|
||||
)
|
||||
|
||||
link_field_1 = field_handler.create_field(
|
||||
user=user,
|
||||
table=cars_table,
|
||||
type_name="link_row",
|
||||
name="customer",
|
||||
link_row_table=customers_table,
|
||||
)
|
||||
# Create a primary field and some example data for the cars table.
|
||||
cars_primary_field = field_handler.create_field(
|
||||
user=user, table=cars_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
linked_row_pointing_at_john = row_handler.create_row(
|
||||
user=user,
|
||||
table=cars_table,
|
||||
values={
|
||||
f"field_{cars_primary_field.id}": "BMW",
|
||||
f"field_{link_field_1.id}": [john_row.id],
|
||||
},
|
||||
)
|
||||
linked_row_pointing_at_jane = row_handler.create_row(
|
||||
user=user,
|
||||
table=cars_table,
|
||||
values={
|
||||
f"field_{cars_primary_field.id}": "Audi",
|
||||
f"field_{link_field_1.id}": [jane_row.id],
|
||||
},
|
||||
)
|
||||
|
||||
cars_model = cars_table.get_model(attribute_names=True)
|
||||
assert list(cars_model.objects.values_list("customer", flat=True)) == [
|
||||
john_row.id,
|
||||
jane_row.id,
|
||||
]
|
||||
row = RowHandler().get_row(user, cars_table, linked_row_pointing_at_john.id)
|
||||
assert list(
|
||||
getattr(row, f"field_{link_field_1.id}").values_list("id", flat=True)
|
||||
) == [john_row.id]
|
||||
|
||||
TrashHandler.trash(
|
||||
user, database.group, database, john_row, parent_id=customers_table.id
|
||||
)
|
||||
|
||||
row = RowHandler().get_row(user, cars_table, linked_row_pointing_at_john.id)
|
||||
assert list(getattr(row, f"field_{link_field_1.id}").all()) == []
|
||||
row = RowHandler().get_row(user, cars_table, linked_row_pointing_at_jane.id)
|
||||
assert list(
|
||||
getattr(row, f"field_{link_field_1.id}").values_list("id", flat=True)
|
||||
) == [jane_row.id]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_trashed_linked_row_pointing_at_a_trashed_row_is_restored_correctly(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
cars_table = data_fixture.create_database_table(name="Cars", database=database)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
john_row = row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
jane_row = row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "Jane"},
|
||||
)
|
||||
|
||||
link_field_1 = field_handler.create_field(
|
||||
user=user,
|
||||
table=cars_table,
|
||||
type_name="link_row",
|
||||
name="customer",
|
||||
link_row_table=customers_table,
|
||||
)
|
||||
# Create a primary field and some example data for the cars table.
|
||||
cars_primary_field = field_handler.create_field(
|
||||
user=user, table=cars_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
linked_row_pointing_at_john = row_handler.create_row(
|
||||
user=user,
|
||||
table=cars_table,
|
||||
values={
|
||||
f"field_{cars_primary_field.id}": "BMW",
|
||||
f"field_{link_field_1.id}": [john_row.id],
|
||||
},
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=cars_table,
|
||||
values={
|
||||
f"field_{cars_primary_field.id}": "Audi",
|
||||
f"field_{link_field_1.id}": [jane_row.id],
|
||||
},
|
||||
)
|
||||
|
||||
TrashHandler.trash(
|
||||
user,
|
||||
database.group,
|
||||
database,
|
||||
linked_row_pointing_at_john,
|
||||
parent_id=cars_table.id,
|
||||
)
|
||||
TrashHandler.trash(
|
||||
user, database.group, database, john_row, parent_id=customers_table.id
|
||||
)
|
||||
TrashHandler.restore_item(
|
||||
user, "row", linked_row_pointing_at_john.id, parent_trash_item_id=cars_table.id
|
||||
)
|
||||
|
||||
row = RowHandler().get_row(user, cars_table, linked_row_pointing_at_john.id)
|
||||
assert list(getattr(row, f"field_{link_field_1.id}").all()) == []
|
||||
|
||||
TrashHandler.restore_item(
|
||||
user, "row", john_row.id, parent_trash_item_id=customers_table.id
|
||||
)
|
||||
|
||||
row = RowHandler().get_row(user, cars_table, linked_row_pointing_at_john.id)
|
||||
assert list(
|
||||
getattr(row, f"field_{link_field_1.id}").values_list("id", flat=True)
|
||||
) == [john_row.id]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_parent_id_must_be_provided_when_trashing_or_restoring_a_row(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
john_row = row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={f"field_{customers_primary_field.id}": "John"},
|
||||
)
|
||||
|
||||
with pytest.raises(ParentIdMustBeProvidedException):
|
||||
TrashHandler.trash(
|
||||
user,
|
||||
database.group,
|
||||
database,
|
||||
john_row,
|
||||
)
|
||||
|
||||
TrashHandler.trash(
|
||||
user, database.group, database, john_row, parent_id=customers_table.id
|
||||
)
|
||||
|
||||
with pytest.raises(ParentIdMustBeProvidedException):
|
||||
TrashHandler.restore_item(user, "row", john_row.id)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_parent_id_must_not_be_provided_when_trashing_or_restoring_an_app(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
with pytest.raises(ParentIdMustNotBeProvidedException):
|
||||
TrashHandler.trash(
|
||||
user, database.group, database, database, parent_id=database.group.id
|
||||
)
|
||||
|
||||
TrashHandler.trash(user, database.group, database, database)
|
||||
|
||||
with pytest.raises(ParentIdMustNotBeProvidedException):
|
||||
TrashHandler.restore_item(
|
||||
user, "application", database.id, parent_trash_item_id=database.group.id
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashing_a_field_with_a_filter_trashes_the_filter(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
other_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Other"
|
||||
)
|
||||
grid_view = data_fixture.create_grid_view(user=user, table=customers_table)
|
||||
data_fixture.create_view_filter(
|
||||
view=grid_view, user=user, field=other_field, value="Steve"
|
||||
)
|
||||
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={
|
||||
f"field_{customers_primary_field.id}": "John",
|
||||
f"field_{other_field.id}": "Test",
|
||||
},
|
||||
)
|
||||
|
||||
TrashHandler.trash(
|
||||
user,
|
||||
database.group,
|
||||
database,
|
||||
other_field,
|
||||
)
|
||||
|
||||
model = customers_table.get_model()
|
||||
filtered_qs = ViewHandler().apply_filters(grid_view, model.objects.all())
|
||||
assert filtered_qs.count() == 1
|
||||
TrashHandler.restore_item(
|
||||
user,
|
||||
"field",
|
||||
other_field.id,
|
||||
)
|
||||
|
||||
model = customers_table.get_model()
|
||||
filtered_qs = ViewHandler().apply_filters(grid_view, model.objects.all())
|
||||
assert filtered_qs.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashing_a_field_with_a_sort_trashes_the_sort(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user, name="Placeholder")
|
||||
customers_table = data_fixture.create_database_table(
|
||||
name="Customers", database=database
|
||||
)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
|
||||
# Create a primary field and some example data for the customers table.
|
||||
customers_primary_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Name", primary=True
|
||||
)
|
||||
other_field = field_handler.create_field(
|
||||
user=user, table=customers_table, type_name="text", name="Other"
|
||||
)
|
||||
grid_view = data_fixture.create_grid_view(user=user, table=customers_table)
|
||||
data_fixture.create_view_sort(
|
||||
view=grid_view, user=user, field=other_field, order="ASC"
|
||||
)
|
||||
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={
|
||||
f"field_{customers_primary_field.id}": "1",
|
||||
f"field_{other_field.id}": "2",
|
||||
},
|
||||
)
|
||||
row_handler.create_row(
|
||||
user=user,
|
||||
table=customers_table,
|
||||
values={
|
||||
f"field_{customers_primary_field.id}": "2",
|
||||
f"field_{other_field.id}": "1",
|
||||
},
|
||||
)
|
||||
|
||||
TrashHandler.trash(
|
||||
user,
|
||||
database.group,
|
||||
database,
|
||||
other_field,
|
||||
)
|
||||
|
||||
model = customers_table.get_model()
|
||||
filtered_qs = ViewHandler().apply_sorting(grid_view, model.objects.all())
|
||||
assert list(
|
||||
filtered_qs.values_list(f"field_{customers_primary_field.id}", flat=True)
|
||||
) == ["1", "2"]
|
||||
|
||||
TrashHandler.restore_item(
|
||||
user,
|
||||
"field",
|
||||
other_field.id,
|
||||
)
|
||||
|
||||
model = customers_table.get_model()
|
||||
filtered_qs = ViewHandler().apply_sorting(grid_view, model.objects.all())
|
||||
assert list(
|
||||
filtered_qs.values_list(f"field_{customers_primary_field.id}", flat=True)
|
||||
) == ["2", "1"]
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_can_perm_delete_tables_in_another_user_db(
|
||||
data_fixture,
|
||||
user_tables_in_separate_db,
|
||||
):
|
||||
patcher = patch("baserow.core.models.TrashEntry.delete")
|
||||
trash_entry_delete = patcher.start()
|
||||
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(name="Car", user=user)
|
||||
data_fixture.create_text_field(table=table, name="Name", text_default="Test")
|
||||
|
||||
TrashHandler.trash(user, table.database.group, table.database, table)
|
||||
assert TrashEntry.objects.count() == 1
|
||||
assert (
|
||||
f"database_table_{table.id}"
|
||||
in user_tables_in_separate_db.introspection.table_names()
|
||||
)
|
||||
|
||||
TrashEntry.objects.update(should_be_permanently_deleted=True)
|
||||
|
||||
trash_entry_delete.side_effect = RuntimeError("Force the outer transaction to fail")
|
||||
with pytest.raises(RuntimeError):
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
assert Table.trash.filter(id=table.id).exists()
|
||||
assert TrashEntry.objects.count() == 1
|
||||
# Even though the transaction rolled back and restored the Table and TrashEntry the
|
||||
# actual table was still deleted as that happened in a different connection!
|
||||
assert (
|
||||
f"database_table_{table.id}"
|
||||
not in user_tables_in_separate_db.introspection.table_names()
|
||||
)
|
||||
|
||||
# Now make it so the deletion will work the second time, as long as it handles
|
||||
# the actual table no longer being there.
|
||||
patcher.stop()
|
||||
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
assert not Table.trash.filter(id=table.id).exists()
|
||||
assert TrashEntry.objects.count() == 0
|
||||
assert (
|
||||
f"database_table_{table.id}"
|
||||
not in user_tables_in_separate_db.introspection.table_names()
|
||||
)
|
|
@ -26,6 +26,7 @@ from baserow.contrib.database.views.exceptions import (
|
|||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.exceptions import FieldNotInTable
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
@ -61,6 +62,16 @@ def test_get_view(data_fixture):
|
|||
view_id=grid.id, base_queryset=View.objects.prefetch_related("UNKNOWN")
|
||||
)
|
||||
|
||||
# If the table is trashed the view should not be available.
|
||||
TrashHandler.trash(user, grid.table.database.group, grid.table.database, grid.table)
|
||||
with pytest.raises(ViewDoesNotExist):
|
||||
handler.get_view(view_id=grid.id, view_model=GridView)
|
||||
|
||||
# Restoring the table should restore the view
|
||||
TrashHandler.restore_item(user, "table", grid.table.id)
|
||||
view = handler.get_view(view_id=grid.id, view_model=GridView)
|
||||
assert view.id == grid.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.contrib.database.views.signals.view_created.send")
|
||||
|
@ -243,6 +254,37 @@ def test_delete_view(send_mock, data_fixture):
|
|||
assert send_mock.call_args[1]["user"].id == user.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashed_fields_are_not_included_in_grid_view_field_options(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
grid_view = data_fixture.create_grid_view(table=table)
|
||||
field_1 = data_fixture.create_text_field(table=table)
|
||||
field_2 = data_fixture.create_text_field(table=table)
|
||||
|
||||
ViewHandler().update_grid_view_field_options(
|
||||
user=user,
|
||||
grid_view=grid_view,
|
||||
field_options={str(field_1.id): {"width": 150}, field_2.id: {"width": 250}},
|
||||
)
|
||||
options = grid_view.get_field_options()
|
||||
assert options.count() == 2
|
||||
|
||||
TrashHandler.trash(user, table.database.group, table.database, field_1)
|
||||
|
||||
options = grid_view.get_field_options()
|
||||
assert options.count() == 1
|
||||
|
||||
with pytest.raises(UnrelatedFieldError):
|
||||
ViewHandler().update_grid_view_field_options(
|
||||
user=user,
|
||||
grid_view=grid_view,
|
||||
field_options={
|
||||
field_1.id: {"width": 150},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.contrib.database.views.signals.grid_view_field_options_updated.send")
|
||||
def test_update_grid_view_field_options(send_mock, data_fixture):
|
||||
|
|
|
@ -3,6 +3,7 @@ import pytest
|
|||
from unittest.mock import patch
|
||||
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
|
@ -21,6 +22,20 @@ def test_field_created(mock_broadcast_to_channel_group, data_fixture):
|
|||
assert args[0][1]["field"]["id"] == field.id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.registries.broadcast_to_channel_group")
|
||||
def test_field_restored(mock_broadcast_to_channel_group, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
field = data_fixture.create_text_field(user=user)
|
||||
FieldHandler().delete_field(user, field)
|
||||
TrashHandler.restore_item(user, "field", field.id)
|
||||
|
||||
args = mock_broadcast_to_channel_group.delay.call_args
|
||||
assert args[0][0] == f"table-{field.table.id}"
|
||||
assert args[0][1]["type"] == "field_restored"
|
||||
assert args[0][1]["field"]["id"] == field.id, args[0]
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.registries.broadcast_to_channel_group")
|
||||
def test_field_updated(mock_broadcast_to_channel_group, data_fixture):
|
||||
|
|
|
@ -1,16 +1,14 @@
|
|||
import os
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from itsdangerous.exc import BadSignature
|
||||
|
||||
from baserow.contrib.database.models import Database, Table
|
||||
from baserow.contrib.database.models import Database
|
||||
from baserow.core.exceptions import (
|
||||
UserNotInGroup,
|
||||
ApplicationTypeDoesNotExist,
|
||||
|
@ -38,6 +36,7 @@ from baserow.core.models import (
|
|||
TemplateCategory,
|
||||
GROUP_USER_PERMISSION_ADMIN,
|
||||
)
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.user_files.models import UserFile
|
||||
|
||||
|
||||
|
@ -199,6 +198,36 @@ def test_create_group(send_mock, data_fixture):
|
|||
assert GroupUser.objects.all().count() == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.core.signals.group_restored.send")
|
||||
def test_restore_group(group_restored_mock, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(name="Test group", user=user)
|
||||
|
||||
handler = CoreHandler()
|
||||
|
||||
handler.delete_group(user, group)
|
||||
|
||||
assert Group.objects.count() == 0
|
||||
|
||||
TrashHandler.restore_item(user, "group", group.id)
|
||||
|
||||
group_restored_mock.assert_called_once()
|
||||
assert group_restored_mock.call_args[1]["user"] is None
|
||||
assert (
|
||||
group_restored_mock.call_args[1]["group_user"].id
|
||||
== group.groupuser_set.get(user=user).id
|
||||
)
|
||||
|
||||
group = Group.objects.all().first()
|
||||
user_group = GroupUser.objects.all().first()
|
||||
|
||||
assert group.name == "Test group"
|
||||
assert user_group.user == user
|
||||
assert user_group.group == group
|
||||
assert user_group.permissions == GROUP_USER_PERMISSION_ADMIN
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.core.signals.group_updated.send")
|
||||
def test_update_group(send_mock, data_fixture):
|
||||
|
@ -230,7 +259,7 @@ def test_delete_group(send_mock, data_fixture):
|
|||
user = data_fixture.create_user()
|
||||
group_1 = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(group=group_1)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
data_fixture.create_database_table(database=database)
|
||||
data_fixture.create_group(user=user)
|
||||
user_2 = data_fixture.create_user()
|
||||
group_3 = data_fixture.create_group(user=user_2)
|
||||
|
@ -238,25 +267,28 @@ def test_delete_group(send_mock, data_fixture):
|
|||
handler = CoreHandler()
|
||||
handler.delete_group(user, group_1)
|
||||
|
||||
assert group_1.trashed
|
||||
|
||||
send_mock.assert_called_once()
|
||||
assert send_mock.call_args[1]["group"].id == group_1.id
|
||||
assert send_mock.call_args[1]["user"].id == user.id
|
||||
assert len(send_mock.call_args[1]["group_users"]) == 1
|
||||
assert send_mock.call_args[1]["group_users"][0].id == user.id
|
||||
|
||||
assert Database.objects.all().count() == 0
|
||||
assert Table.objects.all().count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
||||
assert Group.objects.all().count() == 2
|
||||
assert GroupUser.objects.all().count() == 2
|
||||
assert Group.objects.count() == 2
|
||||
assert GroupUser.objects.count() == 2
|
||||
assert Group.trash.count() == 1
|
||||
assert GroupUser.trash.count() == 1
|
||||
|
||||
with pytest.raises(UserNotInGroup):
|
||||
handler.delete_group(user, group_3)
|
||||
|
||||
handler.delete_group(user_2, group_3)
|
||||
|
||||
assert Group.objects.all().count() == 1
|
||||
assert GroupUser.objects.all().count() == 1
|
||||
assert Group.objects.count() == 1
|
||||
assert GroupUser.objects.count() == 1
|
||||
assert Group.trash.count() == 2
|
||||
assert GroupUser.trash.count() == 2
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
handler.delete_group(user=user_2, group=object())
|
||||
|
@ -671,7 +703,6 @@ def test_create_database_application(send_mock, data_fixture):
|
|||
send_mock.assert_called_once()
|
||||
assert send_mock.call_args[1]["application"].id == database.id
|
||||
assert send_mock.call_args[1]["user"].id == user.id
|
||||
assert send_mock.call_args[1]["type_name"] == "database"
|
||||
|
||||
with pytest.raises(UserNotInGroup):
|
||||
handler.create_application(
|
||||
|
@ -776,7 +807,7 @@ def test_delete_database_application(send_mock, data_fixture):
|
|||
user_2 = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
data_fixture.create_database_table(database=database)
|
||||
|
||||
handler = CoreHandler()
|
||||
|
||||
|
@ -788,9 +819,11 @@ def test_delete_database_application(send_mock, data_fixture):
|
|||
|
||||
handler.delete_application(user=user, application=database)
|
||||
|
||||
database.refresh_from_db()
|
||||
assert database.trashed
|
||||
|
||||
assert Database.objects.all().count() == 0
|
||||
assert Table.objects.all().count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
||||
assert Database.trash.all().count() == 1
|
||||
|
||||
send_mock.assert_called_once()
|
||||
assert send_mock.call_args[1]["application_id"] == database.id
|
||||
|
@ -970,7 +1003,6 @@ def test_install_template(send_mock, tmpdir, data_fixture):
|
|||
send_mock.assert_called_once()
|
||||
assert send_mock.call_args[1]["application"].id == applications[0].id
|
||||
assert send_mock.call_args[1]["user"].id == user.id
|
||||
assert send_mock.call_args[1]["type_name"] == "database"
|
||||
|
||||
# Because the `example-template.json` has a file field that contains the hello
|
||||
# world file, we expect it to exist after syncing the templates.
|
||||
|
@ -982,3 +1014,28 @@ def test_install_template(send_mock, tmpdir, data_fixture):
|
|||
assert file_path.open().read() == "Hello World"
|
||||
|
||||
settings.APPLICATION_TEMPLATES_DIR = old_templates
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.core.signals.application_created.send")
|
||||
def test_restore_application(application_created_mock, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(name="Test group", user=user)
|
||||
database = data_fixture.create_database_application(user=user, group=group)
|
||||
|
||||
handler = CoreHandler()
|
||||
|
||||
handler.delete_application(user, application=database)
|
||||
|
||||
assert Application.objects.count() == 0
|
||||
|
||||
TrashHandler.restore_item(user, "application", database.id)
|
||||
|
||||
application_created_mock.assert_called_once()
|
||||
assert application_created_mock.call_args[1]["application"].id == database.id
|
||||
assert application_created_mock.call_args[1]["user"] is None
|
||||
|
||||
restored_app = Application.objects.all().first()
|
||||
|
||||
assert restored_app.name == database.name
|
||||
assert restored_app.id == database.id
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
import pytest
|
||||
|
||||
from baserow.core.models import Group
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_groups_of_user(data_fixture):
|
||||
user_1 = data_fixture.create_user()
|
||||
user_group_1 = data_fixture.create_user_group(user=user_1, order=1)
|
||||
user_group_2 = data_fixture.create_user_group(user=user_1, order=2)
|
||||
user_group_3 = data_fixture.create_user_group(user=user_1, order=0)
|
||||
|
||||
user_2 = data_fixture.create_user()
|
||||
user_group_4 = data_fixture.create_user_group(user=user_2, order=0)
|
||||
|
||||
groups_user_1 = Group.objects.of_user(user=user_1)
|
||||
assert len(groups_user_1) == 3
|
||||
|
||||
assert groups_user_1[0].id == user_group_3.group.id
|
||||
assert groups_user_1[1].id == user_group_1.group.id
|
||||
assert groups_user_1[2].id == user_group_2.group.id
|
||||
|
||||
groups_user_2 = Group.objects.of_user(user=user_2)
|
||||
assert len(groups_user_2) == 1
|
||||
assert groups_user_2[0].id == user_group_4.group.id
|
435
backend/tests/baserow/core/trash/test_trash_handler.py
Normal file
435
backend/tests/baserow/core/trash/test_trash_handler.py
Normal file
|
@ -0,0 +1,435 @@
|
|||
import pytest
|
||||
from django.db import connection
|
||||
from django.utils import timezone
|
||||
from freezegun import freeze_time
|
||||
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.core.exceptions import GroupDoesNotExist, ApplicationDoesNotExist
|
||||
from baserow.core.models import Group, Application
|
||||
from baserow.core.models import TrashEntry
|
||||
from baserow.core.trash.exceptions import CannotRestoreChildBeforeParent
|
||||
from baserow.core.trash.handler import TrashHandler, _get_trash_entry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashing_an_item_creates_a_trash_entry_in_the_db_and_marks_it_as_trashed(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
assert not group_to_delete.trashed
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group_to_delete, None, group_to_delete)
|
||||
assert group_to_delete.trashed
|
||||
trash_entry = TrashEntry.objects.get(
|
||||
trash_item_id=group_to_delete.id, trash_item_type="group"
|
||||
)
|
||||
assert trash_entry.trashed_at.isoformat() == "2020-01-01T12:00:00+00:00"
|
||||
assert Group.objects.count() == 0
|
||||
assert Group.trash.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_restoring_a_trashed_item_unmarks_it_as_trashed_and_deletes_the_entry(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
TrashHandler.trash(user, group_to_delete, None, group_to_delete)
|
||||
assert group_to_delete.trashed
|
||||
assert TrashEntry.objects.count() == 1
|
||||
|
||||
TrashHandler.restore_item(user, "group", group_to_delete.id)
|
||||
|
||||
group_to_delete.refresh_from_db()
|
||||
assert not group_to_delete.trashed
|
||||
assert TrashEntry.objects.count() == 0
|
||||
assert Group.trash.count() == 0
|
||||
assert Group.objects.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_trash_entry_older_than_setting_gets_marked_for_permanent_deletion(
|
||||
data_fixture, settings
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
trashed_at = timezone.now()
|
||||
half_time = timezone.timedelta(
|
||||
hours=settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED / 2
|
||||
)
|
||||
plus_one_hour_over = timezone.timedelta(
|
||||
hours=settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED + 1
|
||||
)
|
||||
with freeze_time(trashed_at):
|
||||
TrashHandler.trash(user, group_to_delete, None, group_to_delete)
|
||||
|
||||
entry = _get_trash_entry(user, "group", None, group_to_delete.id)
|
||||
assert not entry.should_be_permanently_deleted
|
||||
|
||||
datetime_when_trash_item_should_still_be_kept = trashed_at + half_time
|
||||
with freeze_time(datetime_when_trash_item_should_still_be_kept):
|
||||
TrashHandler.mark_old_trash_for_permanent_deletion()
|
||||
|
||||
entry.refresh_from_db()
|
||||
assert not entry.should_be_permanently_deleted
|
||||
|
||||
datetime_when_trash_item_old_enough_to_be_deleted = trashed_at + plus_one_hour_over
|
||||
with freeze_time(datetime_when_trash_item_old_enough_to_be_deleted):
|
||||
TrashHandler.mark_old_trash_for_permanent_deletion()
|
||||
|
||||
entry.refresh_from_db()
|
||||
assert entry.should_be_permanently_deleted
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_trash_entry_marked_for_permanent_deletion_gets_deleted_by_task(
|
||||
data_fixture, settings
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
|
||||
trashed_at = timezone.now()
|
||||
plus_one_hour_over = timezone.timedelta(
|
||||
hours=settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED + 1
|
||||
)
|
||||
with freeze_time(trashed_at):
|
||||
TrashHandler.trash(user, group_to_delete, None, group_to_delete)
|
||||
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
assert Group.trash.count() == 1
|
||||
|
||||
datetime_when_trash_item_old_enough_to_be_deleted = trashed_at + plus_one_hour_over
|
||||
with freeze_time(datetime_when_trash_item_old_enough_to_be_deleted):
|
||||
TrashHandler.mark_old_trash_for_permanent_deletion()
|
||||
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
assert Group.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_group_marked_for_perm_deletion_raises_a_404_when_asked_for_trash_contents(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
assert not group_to_delete.trashed
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group_to_delete, None, group_to_delete)
|
||||
trash_entry = TrashEntry.objects.get(
|
||||
trash_item_id=group_to_delete.id, trash_item_type="group"
|
||||
)
|
||||
trash_entry.should_be_permanently_deleted = True
|
||||
trash_entry.save()
|
||||
|
||||
with pytest.raises(GroupDoesNotExist):
|
||||
TrashHandler.get_trash_contents(user, group_to_delete.id, None)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_group_marked_for_perm_deletion_no_longer_shows_up_in_trash_structure(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group_to_delete = data_fixture.create_group(user=user)
|
||||
assert not group_to_delete.trashed
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group_to_delete, None, group_to_delete)
|
||||
trash_entry = TrashEntry.objects.get(
|
||||
trash_item_id=group_to_delete.id, trash_item_type="group"
|
||||
)
|
||||
trash_entry.should_be_permanently_deleted = True
|
||||
trash_entry.save()
|
||||
|
||||
assert len(TrashHandler.get_trash_structure(user)["groups"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_an_app_marked_for_perm_deletion_raises_a_404_when_asked_for_trash_contents(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
trashed_database = data_fixture.create_database_application(user=user, group=group)
|
||||
assert not trashed_database.trashed
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group, trashed_database, trashed_database)
|
||||
trash_entry = TrashEntry.objects.get(
|
||||
trash_item_id=trashed_database.id, trash_item_type="application"
|
||||
)
|
||||
trash_entry.should_be_permanently_deleted = True
|
||||
trash_entry.save()
|
||||
|
||||
with pytest.raises(ApplicationDoesNotExist):
|
||||
TrashHandler.get_trash_contents(user, group.id, trashed_database.id)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_a_trashed_app_shows_up_in_trash_structure(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
trashed_database = data_fixture.create_database_application(user=user, group=group)
|
||||
assert not trashed_database.trashed
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group, trashed_database, trashed_database)
|
||||
|
||||
structure = TrashHandler.get_trash_structure(user)
|
||||
applications_qs = structure["groups"][0]["applications"]
|
||||
assert applications_qs.count() == 1
|
||||
assert applications_qs.get().trashed
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_an_app_marked_for_perm_deletion_no_longer_shows_up_in_trash_structure(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
trashed_database = data_fixture.create_database_application(user=user, group=group)
|
||||
assert not trashed_database.trashed
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group, trashed_database, trashed_database)
|
||||
trash_entry = TrashEntry.objects.get(
|
||||
trash_item_id=trashed_database.id, trash_item_type="application"
|
||||
)
|
||||
trash_entry.should_be_permanently_deleted = True
|
||||
trash_entry.save()
|
||||
|
||||
for group in TrashHandler.get_trash_structure(user)["groups"]:
|
||||
assert group["applications"].count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_deleting_a_parent_with_a_trashed_child_also_cleans_up_the_child_entry(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(user=user, group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
field = data_fixture.create_text_field(user=user, table=table)
|
||||
table_model = table.get_model()
|
||||
row = table_model.objects.create(**{f"field_{field.id}": "Test"})
|
||||
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group, database, row, parent_id=table.id)
|
||||
TrashHandler.trash(user, group, database, field)
|
||||
TrashHandler.trash(user, group, database, table)
|
||||
TrashHandler.trash(user, group, database, database)
|
||||
TrashHandler.trash(user, group, None, group)
|
||||
|
||||
TrashHandler.empty(user, group.id, None)
|
||||
|
||||
assert TrashEntry.objects.count() == 5
|
||||
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
assert TrashEntry.objects.count() == 0
|
||||
assert Group.objects_and_trash.count() == 0
|
||||
assert Application.objects_and_trash.count() == 0
|
||||
assert Table.objects_and_trash.count() == 0
|
||||
assert Field.objects_and_trash.count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_deleting_a_table_with_a_trashed_row_also_cleans_up_the_row_entry(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(user=user, group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
field = data_fixture.create_text_field(user=user, table=table)
|
||||
table_model = table.get_model()
|
||||
row = table_model.objects.create(**{f"field_{field.id}": "Test"})
|
||||
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group, database, database)
|
||||
TrashHandler.trash(user, group, database, row, parent_id=table.id)
|
||||
TrashHandler.trash(user, group, database, table)
|
||||
|
||||
TrashHandler.empty(user, group.id, database.id)
|
||||
|
||||
assert TrashEntry.objects.count() == 3
|
||||
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
assert TrashEntry.objects.count() == 0
|
||||
assert Table.objects_and_trash.count() == 0
|
||||
assert Field.objects_and_trash.count() == 0
|
||||
assert Application.objects_and_trash.count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trash_contents_are_ordered_from_newest_to_oldest_entries(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
trashed_database = data_fixture.create_database_application(user=user, group=group)
|
||||
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, group, trashed_database, trashed_database)
|
||||
with freeze_time("2020-01-01 12:02"):
|
||||
TrashHandler.trash(user, group, None, group)
|
||||
|
||||
contents = TrashHandler.get_trash_contents(user, group.id, None)
|
||||
|
||||
assert contents[0].trash_item_type == "group"
|
||||
assert contents[0].trash_item_id == group.id
|
||||
assert contents[0].trashed_at.isoformat() == "2020-01-01T12:02:00+00:00"
|
||||
|
||||
assert contents[1].trash_item_type == "application"
|
||||
assert contents[1].trash_item_id == trashed_database.id
|
||||
assert contents[1].trashed_at.isoformat() == "2020-01-01T12:00:00+00:00"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_deleting_one_group_should_not_effect_another_trashed_group(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
trashed_group = data_fixture.create_group(user=user)
|
||||
other_trashed_group = data_fixture.create_group(user=user)
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, trashed_group, None, trashed_group)
|
||||
TrashHandler.trash(user, other_trashed_group, None, other_trashed_group)
|
||||
|
||||
# Only mark one for deletion
|
||||
parent_trash_entry = TrashEntry.objects.get(
|
||||
trash_item_id=trashed_group.id, trash_item_type="group"
|
||||
)
|
||||
parent_trash_entry.should_be_permanently_deleted = True
|
||||
parent_trash_entry.save()
|
||||
|
||||
assert TrashEntry.objects.count() == 2
|
||||
assert TrashEntry.objects.filter(should_be_permanently_deleted=True).count() == 1
|
||||
assert Group.objects_and_trash.count() == 2
|
||||
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
|
||||
assert TrashEntry.objects.count() == 1
|
||||
assert Group.objects_and_trash.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_deleting_a_user_who_trashed_items_should_still_leave_those_items_trashed(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
trashed_group = data_fixture.create_group(user=user)
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(user, trashed_group, None, trashed_group)
|
||||
|
||||
assert TrashEntry.objects.count() == 1
|
||||
assert Group.objects_and_trash.count() == 1
|
||||
|
||||
user.delete()
|
||||
|
||||
assert TrashEntry.objects.count() == 1
|
||||
assert Group.objects_and_trash.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_trashing_two_rows_in_different_tables_works_as_expected(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
table_1 = data_fixture.create_database_table(name="Car", user=user)
|
||||
table_2 = data_fixture.create_database_table(name="Other Cars", user=user)
|
||||
group = data_fixture.create_group(user=user)
|
||||
name_field = data_fixture.create_text_field(
|
||||
table=table_1, name="Name", text_default="Test"
|
||||
)
|
||||
|
||||
handler = RowHandler()
|
||||
|
||||
row_in_table_1 = handler.create_row(
|
||||
user=user,
|
||||
table=table_1,
|
||||
values={
|
||||
name_field.id: "Tesla",
|
||||
},
|
||||
)
|
||||
row_in_table_2 = handler.create_row(
|
||||
user=user,
|
||||
table=table_2,
|
||||
values={
|
||||
name_field.id: "Ford",
|
||||
},
|
||||
)
|
||||
with freeze_time("2020-01-01 12:00"):
|
||||
TrashHandler.trash(
|
||||
user, group, table_1.database, row_in_table_1, parent_id=table_1.id
|
||||
)
|
||||
TrashHandler.trash(
|
||||
user, group, table_2.database, row_in_table_2, parent_id=table_2.id
|
||||
)
|
||||
|
||||
table_1_model = table_1.get_model()
|
||||
table_2_model = table_2.get_model()
|
||||
|
||||
assert table_1_model.trash.count() == 1
|
||||
assert table_1_model.objects.count() == 0
|
||||
|
||||
assert table_2_model.trash.count() == 1
|
||||
assert table_2_model.objects.count() == 0
|
||||
|
||||
TrashHandler.restore_item(
|
||||
user, "row", row_in_table_1.id, parent_trash_item_id=table_1.id
|
||||
)
|
||||
|
||||
assert table_1_model.trash.count() == 0
|
||||
assert table_1_model.objects.count() == 1
|
||||
|
||||
assert table_2_model.trash.count() == 1
|
||||
assert table_2_model.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cannot_restore_a_child_before_the_parent(
|
||||
data_fixture,
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
table_1 = data_fixture.create_database_table(name="Car", user=user)
|
||||
group = table_1.database.group
|
||||
name_field = data_fixture.create_text_field(
|
||||
table=table_1, name="Name", text_default="Test"
|
||||
)
|
||||
|
||||
handler = RowHandler()
|
||||
|
||||
row_in_table_1 = handler.create_row(
|
||||
user=user,
|
||||
table=table_1,
|
||||
values={
|
||||
name_field.id: "Tesla",
|
||||
},
|
||||
)
|
||||
TrashHandler.trash(
|
||||
user, group, table_1.database, row_in_table_1, parent_id=table_1.id
|
||||
)
|
||||
TrashHandler.trash(user, group, table_1.database, table_1)
|
||||
|
||||
with pytest.raises(CannotRestoreChildBeforeParent):
|
||||
TrashHandler.restore_item(
|
||||
user, "row", row_in_table_1.id, parent_trash_item_id=table_1.id
|
||||
)
|
||||
|
||||
TrashHandler.trash(user, group, table_1.database, table_1.database)
|
||||
TrashHandler.trash(user, group, None, group)
|
||||
|
||||
with pytest.raises(CannotRestoreChildBeforeParent):
|
||||
TrashHandler.restore_item(user, "application", table_1.database.id)
|
||||
|
||||
TrashHandler.restore_item(user, "group", group.id)
|
||||
|
||||
with pytest.raises(CannotRestoreChildBeforeParent):
|
||||
TrashHandler.restore_item(user, "table", table_1.id)
|
49
backend/tests/baserow/core/trash/test_trash_types.py
Normal file
49
backend/tests/baserow/core/trash/test_trash_types.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
import pytest
|
||||
from django.db import connection
|
||||
|
||||
from baserow.contrib.database.models import Database
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.core.models import Group, GroupUser
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.trash.trash_types import GroupTrashableItemType
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_delete_group(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group_1 = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(group=group_1)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
data_fixture.create_group(user=user)
|
||||
user_2 = data_fixture.create_user()
|
||||
group_3 = data_fixture.create_group(user=user_2)
|
||||
|
||||
handler = GroupTrashableItemType()
|
||||
handler.permanently_delete_item(group_1)
|
||||
|
||||
assert Database.objects.all().count() == 0
|
||||
assert Table.objects.all().count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
||||
assert Group.objects.all().count() == 2
|
||||
assert GroupUser.objects.all().count() == 2
|
||||
|
||||
handler.permanently_delete_item(group_3)
|
||||
|
||||
assert Group.objects.all().count() == 1
|
||||
assert GroupUser.objects.all().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_perm_delete_application(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
|
||||
handler = TrashHandler()
|
||||
|
||||
handler.permanently_delete(database)
|
||||
|
||||
assert Database.objects.all().count() == 0
|
||||
assert Table.objects.all().count() == 0
|
||||
assert f"database_table_{table.id}" not in connection.introspection.table_names()
|
40
backend/tests/baserow/performance/test_trash_performance.py
Normal file
40
backend/tests/baserow/performance/test_trash_performance.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
import pytest
|
||||
|
||||
from baserow.contrib.database.management.commands.fill_table import fill_table
|
||||
from baserow.core.models import TrashEntry
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from tests.test_utils import setup_interesting_test_table
|
||||
|
||||
from pyinstrument import Profiler
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.slow
|
||||
# You must add --runslow -s to pytest to run this test, you can do this in intellij by
|
||||
# editing the run config for this test and adding --runslow -s to additional args.
|
||||
def test_deleting_many_of_rows_is_fast(data_fixture):
|
||||
|
||||
table, user, row = setup_interesting_test_table(data_fixture)
|
||||
count = 1000
|
||||
fill_table(count, table)
|
||||
|
||||
model = table.get_model()
|
||||
for row in model.objects.all():
|
||||
TrashHandler.trash(
|
||||
user, table.database.group, table.database, row, parent_id=table.id
|
||||
)
|
||||
|
||||
TrashEntry.objects.update(should_be_permanently_deleted=True)
|
||||
|
||||
assert model.objects.all().count() == 0
|
||||
assert model.trash.all().count() == count + 2
|
||||
assert TrashEntry.objects.count() == count + 2
|
||||
|
||||
profiler = Profiler()
|
||||
profiler.start()
|
||||
TrashHandler.permanently_delete_marked_trash()
|
||||
profiler.stop()
|
||||
# Add -s also the the additional args to see the profiling output!
|
||||
# As of 22/06/2021 on a 5900X the profiler output showed 0.82 seconds to
|
||||
# perm delete these 1000 rows.
|
||||
print(profiler.output_text(unicode=True, color=True))
|
|
@ -1,8 +1,13 @@
|
|||
import pytest
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import (
|
||||
GROUP_USER_PERMISSION_ADMIN,
|
||||
GROUP_USER_PERMISSION_MEMBER,
|
||||
)
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
|
@ -18,6 +23,50 @@ def test_group_created(mock_broadcast_to_group, data_fixture):
|
|||
assert args[0][1]["group"]["id"] == group_user.group_id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.signals.broadcast_to_users")
|
||||
def test_group_restored(mock_broadcast_to_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
member_user = data_fixture.create_user()
|
||||
# This user should not be sent the restore signal
|
||||
data_fixture.create_user()
|
||||
group = data_fixture.create_group()
|
||||
group_user = data_fixture.create_user_group(
|
||||
user=user, group=group, permissions=GROUP_USER_PERMISSION_ADMIN
|
||||
)
|
||||
member_group_user = data_fixture.create_user_group(
|
||||
user=member_user, group=group, permissions=GROUP_USER_PERMISSION_MEMBER
|
||||
)
|
||||
database = data_fixture.create_database_application(user=user, group=group)
|
||||
TrashHandler.trash(user, group, None, group)
|
||||
|
||||
TrashHandler.restore_item(user, "group", group.id)
|
||||
|
||||
args = mock_broadcast_to_users.delay.call_args_list
|
||||
assert len(args) == 2
|
||||
member_call = args[1][0]
|
||||
admin_call = args[0][0]
|
||||
assert member_call[0] == [member_user.id]
|
||||
assert member_call[1]["type"] == "group_restored"
|
||||
assert member_call[1]["group"]["id"] == member_group_user.group_id
|
||||
assert member_call[1]["group"]["permissions"] == "MEMBER"
|
||||
expected_group_json = {
|
||||
"id": database.id,
|
||||
"name": database.name,
|
||||
"order": 0,
|
||||
"type": "database",
|
||||
"tables": [],
|
||||
"group": {"id": group.id, "name": group.name},
|
||||
}
|
||||
assert member_call[1]["applications"] == [expected_group_json]
|
||||
assert admin_call[0] == [user.id]
|
||||
assert admin_call[1]["type"] == "group_restored"
|
||||
assert admin_call[1]["group"]["id"] == group_user.group_id
|
||||
assert admin_call[1]["group"]["permissions"] == "ADMIN"
|
||||
assert admin_call[1]["group"]["id"] == group_user.group_id
|
||||
assert admin_call[1]["applications"] == [expected_group_json]
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.signals.broadcast_to_group")
|
||||
def test_group_updated(mock_broadcast_to_group, data_fixture):
|
||||
|
|
|
@ -185,4 +185,4 @@ def setup_interesting_test_table(data_fixture):
|
|||
getattr(row, f"field_{name_to_field_id['file_link_row']}").add(
|
||||
linked_row_7.id, linked_row_8.id
|
||||
)
|
||||
return table, user
|
||||
return table, user, row
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
## Unreleased
|
||||
|
||||
* Made it possible to list table field meta-data with a token.
|
||||
* Add trash where deleted apps, groups, tables, fields and rows can be restored
|
||||
deletion.
|
||||
* Fix the create group invite endpoint failing when no message provided.
|
||||
* Single select options can now be ordered by drag and drop.
|
||||
* Added before and after date filters.
|
||||
|
|
|
@ -133,3 +133,5 @@ are accepted.
|
|||
* `EMAIL_SMTP_PORT` (default `25`): The port of the SMTP server.
|
||||
* `EMAIL_SMTP_USER` (default ``): The username for the SMTP server.
|
||||
* `EMAIL_SMTP_PASSWORD` (default ``): The password of the SMTP server.
|
||||
* `HOURS_UNTIL_TRASH_PERMANENTLY_DELETED` (default 72): The number of hours to keep
|
||||
trashed items until they are permanently deleted.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.signals import group_deleted
|
||||
from baserow.core.exceptions import IsNotAdminError
|
||||
from baserow.core.signals import group_deleted
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow_premium.admin.groups.exceptions import CannotDeleteATemplateGroupError
|
||||
|
||||
|
||||
|
@ -27,7 +27,7 @@ class GroupsAdminHandler:
|
|||
group_id = group.id
|
||||
group_users = list(group.users.all())
|
||||
|
||||
CoreHandler()._delete_group(group)
|
||||
TrashHandler.permanently_delete(group)
|
||||
|
||||
group_deleted.send(
|
||||
self, group_id=group_id, group=group, group_users=group_users
|
||||
|
|
|
@ -267,7 +267,7 @@ def strip_indents_and_newlines(xml):
|
|||
|
||||
|
||||
def run_export_over_interesting_test_table(data_fixture, storage_mock, options):
|
||||
table, user = setup_interesting_test_table(data_fixture)
|
||||
table, user, _ = setup_interesting_test_table(data_fixture)
|
||||
grid_view = data_fixture.create_grid_view(table=table)
|
||||
job, contents = run_export_job_with_mock_storage(
|
||||
table, grid_view, storage_mock, user, options
|
||||
|
|
|
@ -65,3 +65,6 @@
|
|||
@import 'paginator';
|
||||
@import 'sortable';
|
||||
@import 'export';
|
||||
@import 'trash';
|
||||
@import 'trash_entry';
|
||||
@import 'infinite_scroll';
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
.infinite-scroll {
|
||||
@include absolute(0, 0, 0, 0);
|
||||
|
||||
overflow-y: auto;
|
||||
}
|
|
@ -32,28 +32,47 @@
|
|||
padding: 0;
|
||||
}
|
||||
|
||||
&.modal__box--full-height {
|
||||
height: calc(100% - 80px);
|
||||
}
|
||||
|
||||
&.modal__box--small {
|
||||
max-width: 520px;
|
||||
}
|
||||
|
||||
&.modal__box--tiny {
|
||||
max-width: 320px;
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
.box__title {
|
||||
margin-top: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.modal__box-sidebar {
|
||||
position: relative;
|
||||
flex: 0 0 200px;
|
||||
background-color: $color-neutral-50;
|
||||
border-right: 1px solid $color-neutral-200;
|
||||
border-top-left-radius: 6px;
|
||||
border-bottom-left-radius: 6px;
|
||||
|
||||
.modal__box--full-height & {
|
||||
overflow-y: auto;
|
||||
}
|
||||
}
|
||||
|
||||
.modal__box-content {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
padding: 32px 40px;
|
||||
min-width: 0;
|
||||
flex: 1;
|
||||
|
||||
.modal__box--full-height & {
|
||||
overflow-y: auto;
|
||||
}
|
||||
}
|
||||
|
||||
.modal__close {
|
||||
|
|
|
@ -5,6 +5,11 @@
|
|||
}
|
||||
|
||||
.modal-sidebar__head-icon {
|
||||
flex: 0 0 32px;
|
||||
margin-right: 16px;
|
||||
}
|
||||
|
||||
.modal-sidebar__head-initials-icon {
|
||||
flex: 0 0 32px;
|
||||
border-radius: 100%;
|
||||
margin-right: 16px;
|
||||
|
@ -19,6 +24,17 @@
|
|||
font-weight: 600;
|
||||
}
|
||||
|
||||
.modal-sidebar__head-icon-and-name {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
line-height: 140%;
|
||||
}
|
||||
|
||||
.modal-sidebar__head-icon-and-name-icon {
|
||||
margin-right: 10px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.modal-sidebar__nav {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
|
@ -30,6 +46,7 @@
|
|||
|
||||
position: relative;
|
||||
display: block;
|
||||
height: 42px;
|
||||
line-height: 42px;
|
||||
padding: 0 20px 0 50px;
|
||||
border-left: solid 4px transparent;
|
||||
|
|
|
@ -1,7 +1,40 @@
|
|||
.notifications {
|
||||
.top-right-notifications {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
right: 30px;
|
||||
width: 320px;
|
||||
z-index: $z-index-notifications;
|
||||
}
|
||||
|
||||
.bottom-right-notifications {
|
||||
display: flex;
|
||||
flex-direction: column-reverse;
|
||||
align-items: flex-end;
|
||||
position: fixed;
|
||||
bottom: 10px;
|
||||
right: 30px;
|
||||
z-index: $z-index-notifications;
|
||||
}
|
||||
|
||||
.undo-delete-notification {
|
||||
margin: 5px;
|
||||
box-shadow: $color-primary-200;
|
||||
|
||||
&.undo-delete-notification--pulsing {
|
||||
animation: pulse 1s 2;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
box-shadow: 0 0 0 0 $color-primary-400;
|
||||
}
|
||||
|
||||
70% {
|
||||
box-shadow: 0 0 0 5px rgba(0, 0, 0, 0);
|
||||
}
|
||||
|
||||
100% {
|
||||
box-shadow: 0 0 0 0 rgba(0, 0, 0, 0);
|
||||
}
|
||||
}
|
||||
|
|
155
web-frontend/modules/core/assets/scss/components/trash.scss
Normal file
155
web-frontend/modules/core/assets/scss/components/trash.scss
Normal file
|
@ -0,0 +1,155 @@
|
|||
.trash-sidebar__groups {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0 0 20px 0;
|
||||
}
|
||||
|
||||
.trash-sidebar__group {
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.trash-sidebar__group-link {
|
||||
@extend %ellipsis;
|
||||
|
||||
position: relative;
|
||||
display: block;
|
||||
padding: 0 16px 0 32px;
|
||||
line-height: 33px;
|
||||
font-size: 14px;
|
||||
color: $color-primary-900;
|
||||
border-left: solid 3px transparent;
|
||||
|
||||
&:hover {
|
||||
text-decoration: none;
|
||||
background-color: $color-neutral-200;
|
||||
}
|
||||
|
||||
.trash-sidebar__group--active & {
|
||||
background-color: $color-neutral-200;
|
||||
border-left-color: $color-primary-500;
|
||||
}
|
||||
|
||||
.trash-sidebar__group--trashed & {
|
||||
text-decoration-line: line-through;
|
||||
}
|
||||
|
||||
&::before {
|
||||
@extend .fas;
|
||||
@extend .fa-fw;
|
||||
|
||||
@include absolute(50%, auto, auto, 10px);
|
||||
|
||||
content: fa-content($fa-var-caret-right);
|
||||
color: $color-primary-900;
|
||||
font-size: 14px;
|
||||
line-height: 12px;
|
||||
margin-top: -6px;
|
||||
|
||||
.trash-sidebar__group--active & {
|
||||
content: fa-content($fa-var-caret-down);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.trash-sidebar__applications {
|
||||
display: none;
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
|
||||
.trash-sidebar__group--active &,
|
||||
.trash-sidebar__group--open & {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
.trash-sidebar__application {
|
||||
margin: 4px 0;
|
||||
}
|
||||
|
||||
.trash-sidebar__application-link {
|
||||
@extend %ellipsis;
|
||||
|
||||
display: block;
|
||||
line-height: 33px;
|
||||
font-size: 13px;
|
||||
padding: 0 20px 0 28px;
|
||||
color: $color-primary-900;
|
||||
border-left: solid 3px transparent;
|
||||
|
||||
&:hover {
|
||||
background-color: $color-neutral-200;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.trash-sidebar__application--active & {
|
||||
background-color: $color-neutral-200;
|
||||
border-left-color: $color-primary-500;
|
||||
}
|
||||
|
||||
.trash-sidebar__application--trashed & {
|
||||
text-decoration-line: line-through;
|
||||
}
|
||||
}
|
||||
|
||||
.trash__title {
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
width: 100%;
|
||||
height: 74px;
|
||||
padding-bottom: 20px;
|
||||
margin-bottom: 20px;
|
||||
border-bottom: solid 1px $color-neutral-200;
|
||||
background-color: $white;
|
||||
}
|
||||
|
||||
.trash__title-left {
|
||||
width: 100%;
|
||||
min-width: 0;
|
||||
margin-right: 20px;
|
||||
}
|
||||
|
||||
.trash__title-heading {
|
||||
@extend %ellipsis;
|
||||
|
||||
font-size: 22px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.trash__title-description {
|
||||
@extend %ellipsis;
|
||||
|
||||
font-size: 14px;
|
||||
color: $color-neutral-500;
|
||||
}
|
||||
|
||||
.trash__title-right {
|
||||
flex: 0 0;
|
||||
}
|
||||
|
||||
.trash__empty {
|
||||
text-align: center;
|
||||
margin: 60px 0 30px 0;
|
||||
}
|
||||
|
||||
.trash__empty-icon {
|
||||
font-size: 30px;
|
||||
color: $color-primary-500;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
|
||||
.trash__empty-text {
|
||||
font-size: 14px;
|
||||
color: $color-neutral-500;
|
||||
}
|
||||
|
||||
.trash__entries {
|
||||
@include absolute(106px, 0, 0, 0);
|
||||
}
|
||||
|
||||
.trash__entries-loading-wrapper {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
padding: 20px 0;
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
.trash-entry {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
padding: 14px 40px;
|
||||
background-color: $white;
|
||||
|
||||
&:first-child {
|
||||
padding-top: 28px;
|
||||
}
|
||||
|
||||
&:last-child {
|
||||
padding-bottom: 28px;
|
||||
}
|
||||
|
||||
&.trash-entry--disabled {
|
||||
opacity: 0.4;
|
||||
}
|
||||
}
|
||||
|
||||
.trash-entry__initials {
|
||||
flex: 0 0 36px;
|
||||
margin-right: 16px;
|
||||
border-radius: 100%;
|
||||
background-color: $color-primary-500;
|
||||
color: $white;
|
||||
font-weight: 700;
|
||||
font-size: 15px;
|
||||
|
||||
@include center-text(36px, 13px);
|
||||
}
|
||||
|
||||
.trash-entry__content {
|
||||
min-width: 0;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
line-height: normal;
|
||||
}
|
||||
|
||||
.trash-entry__name {
|
||||
@extend %ellipsis;
|
||||
|
||||
font-size: 13px;
|
||||
color: $color-primary-900;
|
||||
margin-bottom: 6px;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.trash-entry__deleted-at-display {
|
||||
@extend %ellipsis;
|
||||
|
||||
font-size: 12px;
|
||||
color: $color-neutral-500;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.trash-entry__extra-description {
|
||||
font-size: 12px;
|
||||
background-color: $color-primary-100;
|
||||
border-radius: 3px;
|
||||
padding: 4px;
|
||||
margin-top: 6px;
|
||||
|
||||
&:not(:last-child) {
|
||||
margin-right: 6px;
|
||||
}
|
||||
}
|
||||
|
||||
.trash-entry__actions {
|
||||
flex: 0 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
justify-content: center;
|
||||
margin-left: 20px;
|
||||
}
|
||||
|
||||
.trash-entry__action {
|
||||
position: relative;
|
||||
font-size: 13px;
|
||||
width: 48px;
|
||||
|
||||
&:not(:last-child) {
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
&.trash-entry__action--loading {
|
||||
cursor: inherit;
|
||||
|
||||
&::after {
|
||||
content: '';
|
||||
margin-left: 17px;
|
||||
|
||||
@include loading(13px);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -62,7 +62,11 @@
|
|||
}
|
||||
|
||||
.margin-right-1 {
|
||||
margin-right: 8px;
|
||||
margin-right: 8px !important;
|
||||
}
|
||||
|
||||
.margin-right-2 {
|
||||
margin-right: 16px !important;
|
||||
}
|
||||
|
||||
.resizing-horizontal {
|
||||
|
|
|
@ -8,14 +8,13 @@
|
|||
<div
|
||||
class="modal__box"
|
||||
:class="{
|
||||
'modal__box--full-height': fullHeight,
|
||||
'modal__box--with-sidebar': sidebar,
|
||||
'modal__box--full-screen': fullScreen,
|
||||
'modal__box--small': small,
|
||||
'modal__box--tiny': tiny,
|
||||
}"
|
||||
>
|
||||
<a v-if="closeButton" class="modal__close" @click="hide()">
|
||||
<i class="fas fa-times"></i>
|
||||
</a>
|
||||
<template v-if="sidebar">
|
||||
<div class="modal__box-sidebar">
|
||||
<slot name="sidebar"></slot>
|
||||
|
@ -27,6 +26,9 @@
|
|||
<template v-if="!sidebar">
|
||||
<slot></slot>
|
||||
</template>
|
||||
<a v-if="closeButton" class="modal__close" @click="hide()">
|
||||
<i class="fas fa-times"></i>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
@ -53,11 +55,21 @@ export default {
|
|||
default: false,
|
||||
required: false,
|
||||
},
|
||||
tiny: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
closeButton: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
required: false,
|
||||
},
|
||||
fullHeight: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -1,106 +0,0 @@
|
|||
<template>
|
||||
<Modal>
|
||||
<h2 class="box__title">Delete {{ group.name }}</h2>
|
||||
<Error :error="error"></Error>
|
||||
<div>
|
||||
<p>
|
||||
Are you sure you want to delete the group
|
||||
<strong>{{ group.name }}</strong
|
||||
>?
|
||||
<span v-if="applications.length > 0">
|
||||
The following
|
||||
<template v-if="applications.length == 1"
|
||||
>application including its data is</template
|
||||
>
|
||||
<template v-else>applications including their data are</template>
|
||||
going to be permanently deleted:</span
|
||||
>
|
||||
</p>
|
||||
<div v-if="applications.length > 0" class="delete-section">
|
||||
<div class="delete-section__label">
|
||||
<div class="delete-section__label-icon">
|
||||
<i class="fas fa-exclamation"></i>
|
||||
</div>
|
||||
Will also be permanently deleted
|
||||
</div>
|
||||
<ul class="delete-section__list">
|
||||
<li v-for="application in applications" :key="application.id">
|
||||
<i
|
||||
class="delete-section__list-icon fas fa-database"
|
||||
:class="'fa-' + application._.type.iconClass"
|
||||
></i>
|
||||
{{ application.name }}
|
||||
<small>{{ getApplicationDependentsText(application) }}</small>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="actions">
|
||||
<div class="align-right">
|
||||
<button
|
||||
class="button button--large button--error"
|
||||
:class="{ 'button--loading': loading }"
|
||||
:disabled="loading"
|
||||
@click="deleteGroup()"
|
||||
>
|
||||
Delete group
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapGetters } from 'vuex'
|
||||
import modal from '@baserow/modules/core/mixins/modal'
|
||||
import error from '@baserow/modules/core/mixins/error'
|
||||
|
||||
export default {
|
||||
name: 'DeleteGroupModal',
|
||||
mixins: [modal, error],
|
||||
props: {
|
||||
group: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
loading: false,
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
...mapGetters({
|
||||
getAllOfGroup: 'application/getAllOfGroup',
|
||||
}),
|
||||
applications() {
|
||||
return this.getAllOfGroup(this.group)
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
async deleteGroup() {
|
||||
this.hideError()
|
||||
this.loading = true
|
||||
|
||||
try {
|
||||
await this.$store.dispatch('group/delete', this.group)
|
||||
this.hide()
|
||||
} catch (error) {
|
||||
this.handleError(error, 'application')
|
||||
}
|
||||
|
||||
this.loading = false
|
||||
},
|
||||
getApplicationDependentsText(application) {
|
||||
const dependents = this.$registry
|
||||
.get('application', application.type)
|
||||
.getDependents(application)
|
||||
const names = this.$registry
|
||||
.get('application', application.type)
|
||||
.getDependentsName(application)
|
||||
const name = dependents.length === 1 ? names[0] : names[1]
|
||||
return `including ${dependents.length} ${name}`
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -15,7 +15,16 @@
|
|||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a @click="$refs.deleteGroupModal.show()">
|
||||
<a @click="showGroupTrashModal">
|
||||
<i class="context__menu-icon fas fa-fw fa-recycle"></i>
|
||||
View trash
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a
|
||||
:class="{ 'context__menu-item--loading': loading }"
|
||||
@click="deleteGroup"
|
||||
>
|
||||
<i class="context__menu-icon fas fa-fw fa-trash"></i>
|
||||
Delete group
|
||||
</a>
|
||||
|
@ -25,18 +34,19 @@
|
|||
ref="groupMembersModal"
|
||||
:group="group"
|
||||
></GroupMembersModal>
|
||||
<DeleteGroupModal ref="deleteGroupModal" :group="group" />
|
||||
<TrashModal ref="groupTrashModal" :initial-group="group"> </TrashModal>
|
||||
</Context>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import DeleteGroupModal from '@baserow/modules/core/components/group/DeleteGroupModal'
|
||||
import GroupMembersModal from '@baserow/modules/core/components/group/GroupMembersModal'
|
||||
import context from '@baserow/modules/core/mixins/context'
|
||||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
import TrashModal from '@baserow/modules/core/components/trash/TrashModal'
|
||||
|
||||
export default {
|
||||
name: 'GroupContext',
|
||||
components: { DeleteGroupModal, GroupMembersModal },
|
||||
components: { TrashModal, GroupMembersModal },
|
||||
mixins: [context],
|
||||
props: {
|
||||
group: {
|
||||
|
@ -44,10 +54,35 @@ export default {
|
|||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
loading: false,
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
showGroupMembersModal() {
|
||||
this.$refs.groupMembersModal.show()
|
||||
},
|
||||
showGroupTrashModal() {
|
||||
this.$refs.context.hide()
|
||||
this.$refs.groupTrashModal.show()
|
||||
},
|
||||
async deleteGroup() {
|
||||
this.loading = true
|
||||
|
||||
try {
|
||||
await this.$store.dispatch('group/delete', this.group)
|
||||
await this.$store.dispatch('notification/restore', {
|
||||
trash_item_type: 'group',
|
||||
trash_item_id: this.group.id,
|
||||
})
|
||||
this.hide()
|
||||
} catch (error) {
|
||||
notifyIf(error, 'application')
|
||||
}
|
||||
|
||||
this.loading = false
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
<template>
|
||||
<section class="infinite-scroll" @scroll="handleScroll">
|
||||
<slot />
|
||||
</section>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
props: {
|
||||
currentCount: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
maxCount: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
currentPage: 1,
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
handleScroll({ target: { scrollTop, clientHeight, scrollHeight } }) {
|
||||
if (scrollTop + clientHeight >= scrollHeight) this.loadNextPage()
|
||||
},
|
||||
loadNextPage() {
|
||||
if (this.currentCount < this.maxCount) {
|
||||
this.currentPage = this.currentPage + 1
|
||||
this.$emit('load-next-page', this.currentPage)
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -1,14 +1,23 @@
|
|||
<template>
|
||||
<div class="notifications">
|
||||
<ConnectingNotification v-if="connecting"></ConnectingNotification>
|
||||
<FailedConnectingNotification
|
||||
v-if="failedConnecting"
|
||||
></FailedConnectingNotification>
|
||||
<Notification
|
||||
v-for="notification in notifications"
|
||||
:key="notification.id"
|
||||
:notification="notification"
|
||||
></Notification>
|
||||
<div class="top-right-notifications">
|
||||
<ConnectingNotification v-if="connecting"></ConnectingNotification>
|
||||
<FailedConnectingNotification
|
||||
v-if="failedConnecting"
|
||||
></FailedConnectingNotification>
|
||||
<Notification
|
||||
v-for="notification in normalNotifications"
|
||||
:key="notification.id"
|
||||
:notification="notification"
|
||||
></Notification>
|
||||
</div>
|
||||
<div class="bottom-right-notifications">
|
||||
<RestoreNotification
|
||||
v-for="notification in restoreNotifications"
|
||||
:key="notification.id"
|
||||
:notification="notification"
|
||||
></RestoreNotification>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
@ -18,15 +27,23 @@ import { mapState } from 'vuex'
|
|||
import Notification from '@baserow/modules/core/components/notifications/Notification'
|
||||
import ConnectingNotification from '@baserow/modules/core/components/notifications/ConnectingNotification'
|
||||
import FailedConnectingNotification from '@baserow/modules/core/components/notifications/FailedConnectingNotification'
|
||||
import RestoreNotification from '@baserow/modules/core/components/notifications/RestoreNotification'
|
||||
|
||||
export default {
|
||||
name: 'Notifications',
|
||||
components: {
|
||||
RestoreNotification,
|
||||
Notification,
|
||||
ConnectingNotification,
|
||||
FailedConnectingNotification,
|
||||
},
|
||||
computed: {
|
||||
restoreNotifications() {
|
||||
return this.notifications.filter((n) => n.type === 'restore')
|
||||
},
|
||||
normalNotifications() {
|
||||
return this.notifications.filter((n) => n.type !== 'restore')
|
||||
},
|
||||
...mapState({
|
||||
connecting: (state) => state.notification.connecting,
|
||||
failedConnecting: (state) => state.notification.failedConnecting,
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
<template>
|
||||
<button
|
||||
class="button undo-delete-notification"
|
||||
:disabled="loading"
|
||||
:class="{
|
||||
'button--loading': loading,
|
||||
'undo-delete-notification--pulsing': pulsing,
|
||||
}"
|
||||
@click="restore"
|
||||
>
|
||||
<i class="button__icon fas fa-undo"> </i>
|
||||
Restore deleted {{ notification.data.trash_item_type }}
|
||||
</button>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import TrashService from '@baserow/modules/core/services/trash'
|
||||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
|
||||
export default {
|
||||
name: 'RestoreNotification',
|
||||
props: {
|
||||
notification: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
loading: false,
|
||||
pulsing: true,
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
setTimeout(() => {
|
||||
this.pulsing = false
|
||||
}, 2000)
|
||||
setTimeout(() => {
|
||||
this.close()
|
||||
}, 5000)
|
||||
},
|
||||
methods: {
|
||||
close() {
|
||||
this.$store.dispatch('notification/remove', this.notification)
|
||||
},
|
||||
async restore() {
|
||||
this.loading = true
|
||||
this.pulsing = false
|
||||
try {
|
||||
await TrashService(this.$client).restore(this.notification.data)
|
||||
} catch (error) {
|
||||
notifyIf(error, 'trash')
|
||||
}
|
||||
this.close()
|
||||
this.loading = false
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -2,7 +2,7 @@
|
|||
<Modal :sidebar="true">
|
||||
<template #sidebar>
|
||||
<div class="modal-sidebar__head">
|
||||
<div class="modal-sidebar__head-icon">
|
||||
<div class="modal-sidebar__head-initials-icon">
|
||||
{{ name | nameAbbreviation }}
|
||||
</div>
|
||||
<div class="modal-sidebar__head-name">Settings</div>
|
||||
|
|
|
@ -1,98 +0,0 @@
|
|||
<template>
|
||||
<Modal>
|
||||
<h2 class="box__title">Delete {{ application.name }}</h2>
|
||||
<Error :error="error"></Error>
|
||||
<div>
|
||||
<p>
|
||||
Are you sure you want to delete the
|
||||
{{ application._.type.name | lowercase }}
|
||||
<strong>{{ application.name }}</strong
|
||||
>?
|
||||
<span v-if="dependents.length > 0"
|
||||
>The following {{ dependentsName }}
|
||||
<template v-if="dependents.length === 1">is</template>
|
||||
<template v-else>are</template>
|
||||
also going to be permanently deleted:</span
|
||||
>
|
||||
</p>
|
||||
<div v-if="dependents.length > 0" class="delete-section">
|
||||
<div class="delete-section__label">
|
||||
<div class="delete-section__label-icon">
|
||||
<i class="fas fa-exclamation"></i>
|
||||
</div>
|
||||
Will also be permanently deleted
|
||||
</div>
|
||||
<ul class="delete-section__list">
|
||||
<li v-for="dependent in dependents" :key="dependent.id">
|
||||
<i
|
||||
class="delete-section__list-icon fas fa-database"
|
||||
:class="'fa-' + dependent.iconClass"
|
||||
></i>
|
||||
{{ dependent.name }}
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="actions">
|
||||
<div class="align-right">
|
||||
<button
|
||||
class="button button--large button--error"
|
||||
:class="{ 'button--loading': loading }"
|
||||
:disabled="loading"
|
||||
@click="deleteApplication()"
|
||||
>
|
||||
Delete {{ application._.type.name | lowercase }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import modal from '@baserow/modules/core/mixins/modal'
|
||||
import error from '@baserow/modules/core/mixins/error'
|
||||
|
||||
export default {
|
||||
name: 'DeleteApplicationModal',
|
||||
mixins: [modal, error],
|
||||
props: {
|
||||
application: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
loading: false,
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
dependentsName() {
|
||||
const names = this.$registry
|
||||
.get('application', this.application.type)
|
||||
.getDependentsName(this.application)
|
||||
return this.dependents.length === 1 ? names[0] : names[1]
|
||||
},
|
||||
dependents() {
|
||||
return this.$registry
|
||||
.get('application', this.application.type)
|
||||
.getDependents(this.application)
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
async deleteApplication() {
|
||||
this.hideError()
|
||||
this.loading = true
|
||||
|
||||
try {
|
||||
await this.$store.dispatch('application/delete', this.application)
|
||||
this.hide()
|
||||
} catch (error) {
|
||||
this.handleError(error, 'application')
|
||||
}
|
||||
|
||||
this.loading = false
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -69,6 +69,17 @@
|
|||
</nuxt-link>
|
||||
</div>
|
||||
</li>
|
||||
<li class="tree__item">
|
||||
<div class="tree__action sidebar__action">
|
||||
<a class="tree__link" @click="$refs.trashModal.show()">
|
||||
<div>
|
||||
<i class="tree__icon fas fa-trash"></i>
|
||||
<span class="sidebar__item-name">Trash</span>
|
||||
</div>
|
||||
</a>
|
||||
<TrashModal ref="trashModal"></TrashModal>
|
||||
</div>
|
||||
</li>
|
||||
<li v-if="isStaff" class="tree__item">
|
||||
<div
|
||||
class="tree__action sidebar__action"
|
||||
|
@ -147,6 +158,7 @@
|
|||
handle: '[data-sortable-handle]',
|
||||
}"
|
||||
:application="application"
|
||||
:group="selectedGroup"
|
||||
></component>
|
||||
</ul>
|
||||
<li class="sidebar__new-wrapper">
|
||||
|
@ -232,6 +244,7 @@ import CreateApplicationContext from '@baserow/modules/core/components/applicati
|
|||
import GroupsContext from '@baserow/modules/core/components/group/GroupsContext'
|
||||
import CreateGroupModal from '@baserow/modules/core/components/group/CreateGroupModal'
|
||||
import GroupMembersModal from '@baserow/modules/core/components/group/GroupMembersModal'
|
||||
import TrashModal from '@baserow/modules/core/components/trash/TrashModal'
|
||||
|
||||
export default {
|
||||
name: 'Sidebar',
|
||||
|
@ -242,6 +255,7 @@ export default {
|
|||
GroupsContext,
|
||||
CreateGroupModal,
|
||||
GroupMembersModal,
|
||||
TrashModal,
|
||||
},
|
||||
computed: {
|
||||
/**
|
||||
|
|
|
@ -37,17 +37,28 @@
|
|||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a @click="deleteApplication()">
|
||||
<a @click="showApplicationTrashModal">
|
||||
<i class="context__menu-icon fas fa-fw fa-recycle"></i>
|
||||
View trash
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a
|
||||
:class="{ 'context__menu-item--loading': deleteLoading }"
|
||||
@click="deleteApplication()"
|
||||
>
|
||||
<i class="context__menu-icon fas fa-fw fa-trash"></i>
|
||||
Delete {{ application._.type.name | lowercase }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
<DeleteApplicationModal
|
||||
ref="deleteApplicationModal"
|
||||
:application="application"
|
||||
/>
|
||||
</Context>
|
||||
<TrashModal
|
||||
ref="applicationTrashModal"
|
||||
:initial-group="group"
|
||||
:initial-application="application"
|
||||
>
|
||||
</TrashModal>
|
||||
</div>
|
||||
<slot name="body"></slot>
|
||||
</li>
|
||||
|
@ -55,16 +66,25 @@
|
|||
|
||||
<script>
|
||||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
import DeleteApplicationModal from './DeleteApplicationModal'
|
||||
import TrashModal from '@baserow/modules/core/components/trash/TrashModal'
|
||||
|
||||
export default {
|
||||
name: 'SidebarApplication',
|
||||
components: { DeleteApplicationModal },
|
||||
components: { TrashModal },
|
||||
props: {
|
||||
application: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
group: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
deleteLoading: false,
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
setLoading(application, value) {
|
||||
|
@ -94,9 +114,24 @@ export default {
|
|||
|
||||
this.setLoading(application, false)
|
||||
},
|
||||
deleteApplication() {
|
||||
async deleteApplication() {
|
||||
this.deleteLoading = true
|
||||
|
||||
try {
|
||||
await this.$store.dispatch('application/delete', this.application)
|
||||
await this.$store.dispatch('notification/restore', {
|
||||
trash_item_type: 'application',
|
||||
trash_item_id: this.application.id,
|
||||
})
|
||||
} catch (error) {
|
||||
notifyIf(error, 'application')
|
||||
}
|
||||
|
||||
this.deleteLoading = false
|
||||
},
|
||||
showApplicationTrashModal() {
|
||||
this.$refs.context.hide()
|
||||
this.$refs.deleteApplicationModal.show()
|
||||
this.$refs.applicationTrashModal.show()
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
157
web-frontend/modules/core/components/trash/TrashContents.vue
Normal file
157
web-frontend/modules/core/components/trash/TrashContents.vue
Normal file
|
@ -0,0 +1,157 @@
|
|||
<template>
|
||||
<div>
|
||||
<div class="trash__title">
|
||||
<div class="trash__title-left">
|
||||
<h2 class="trash__title-heading">{{ title }}</h2>
|
||||
<div class="trash__title-description">
|
||||
Restore deleted items from the past {{ trashDuration }}
|
||||
</div>
|
||||
</div>
|
||||
<div class="trash__title-right">
|
||||
<a
|
||||
v-show="totalServerSideTrashContentsCount > 0 && !parentIsTrashed"
|
||||
class="button button--error"
|
||||
:disabled="loadingContents"
|
||||
@click="showEmptyModalIfNotLoading"
|
||||
>{{ emptyButtonText }}</a
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="loadingContents" class="loading-overlay"></div>
|
||||
<div
|
||||
v-else-if="totalServerSideTrashContentsCount === 0"
|
||||
class="trash__empty"
|
||||
>
|
||||
<i class="trash__empty-icon fas fa-recycle"></i>
|
||||
<div class="trash__empty-text">
|
||||
Nothing has been deleted in the past three days.
|
||||
</div>
|
||||
</div>
|
||||
<div v-else class="trash__entries">
|
||||
<InfiniteScroll
|
||||
:max-count="totalServerSideTrashContentsCount"
|
||||
:current-count="trashContents.length"
|
||||
@load-next-page="$emit('load-next-page', $event)"
|
||||
>
|
||||
<TrashEntry
|
||||
v-for="item in trashContents"
|
||||
:key="'trash-item-' + item.id"
|
||||
:trash-entry="item"
|
||||
:disabled="loadingContents || shouldTrashEntryBeDisabled(item)"
|
||||
@restore="$emit('restore', $event)"
|
||||
></TrashEntry>
|
||||
<div v-if="loadingNextPage" class="trash__entries-loading-wrapper">
|
||||
<div class="loading"></div>
|
||||
</div>
|
||||
</InfiniteScroll>
|
||||
</div>
|
||||
<TrashEmptyModal
|
||||
ref="emptyModal"
|
||||
:name="title"
|
||||
:loading="loadingContents"
|
||||
:selected-is-trashed="selectedItem.trashed"
|
||||
@empty="$emit('empty')"
|
||||
></TrashEmptyModal>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
/**
|
||||
* Displays a infinite scrolling list of trash contents for either a selectedTrashGroup or
|
||||
* a specific selectedTrashApplication in the selectedTrashGroup. The user can empty the trash
|
||||
* contents permanently deleting them all, or restore individual trashed items.
|
||||
*
|
||||
* If the selectedItem (the selectedTrashApplication if provided, otherwise the selectedTrashGroup
|
||||
* ) is trashed itself then the modal will display buttons and modals which indicate
|
||||
* that they will permanently delete the selectedItem instead of just emptying it's
|
||||
* contents.
|
||||
*/
|
||||
|
||||
import moment from 'moment'
|
||||
import TrashEntry from '@baserow/modules/core/components/trash/TrashEntry'
|
||||
import InfiniteScroll from '@baserow/modules/core/components/infinite_scroll/InfiniteScroll'
|
||||
import TrashEmptyModal from '@baserow/modules/core/components/trash/TrashEmptyModal'
|
||||
|
||||
export default {
|
||||
name: 'TrashContents',
|
||||
components: { InfiniteScroll, TrashEntry, TrashEmptyModal },
|
||||
mixins: [],
|
||||
props: {
|
||||
selectedTrashGroup: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
selectedTrashApplication: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
trashContents: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
loadingContents: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
loadingNextPage: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
totalServerSideTrashContentsCount: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
parentIsTrashed() {
|
||||
return (
|
||||
this.selectedTrashApplication !== null &&
|
||||
this.selectedTrashGroup.trashed
|
||||
)
|
||||
},
|
||||
selectedItem() {
|
||||
return this.selectedTrashApplication === null
|
||||
? this.selectedTrashGroup
|
||||
: this.selectedTrashApplication
|
||||
},
|
||||
selectedItemType() {
|
||||
return this.selectedTrashApplication === null ? 'Group' : 'Application'
|
||||
},
|
||||
title() {
|
||||
const title = this.selectedItem.name
|
||||
return title === ''
|
||||
? `Unnamed ${this.selectedItemType} ${this.selectedItem.id}`
|
||||
: title
|
||||
},
|
||||
emptyButtonText() {
|
||||
if (this.selectedItem.trashed) {
|
||||
return `Delete ${this.selectedItemType} permanently`
|
||||
} else {
|
||||
return `Empty this ${this.selectedItemType}'s trash`
|
||||
}
|
||||
},
|
||||
trashDuration() {
|
||||
const hours = this.$env.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED
|
||||
return moment().subtract(hours, 'hours').fromNow().replace('ago', '')
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
showEmptyModalIfNotLoading() {
|
||||
if (!this.loadingContents) {
|
||||
this.$refs.emptyModal.show()
|
||||
}
|
||||
},
|
||||
shouldTrashEntryBeDisabled(entry) {
|
||||
const selectedItemType = this.selectedItemType.toLowerCase()
|
||||
const entryIsForSelectedItem =
|
||||
entry.trash_item_id === this.selectedItem.id &&
|
||||
entry.trash_item_type === selectedItemType
|
||||
return (
|
||||
this.parentIsTrashed ||
|
||||
(this.selectedItem.trashed && !entryIsForSelectedItem)
|
||||
)
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -0,0 +1,55 @@
|
|||
<template>
|
||||
<Modal :tiny="true" :close-button="false">
|
||||
<h3>
|
||||
Are you sure you want to
|
||||
{{ selectedIsTrashed ? 'permanently delete' : 'empty the trash of' }}
|
||||
{{ name }}?
|
||||
</h3>
|
||||
<p>
|
||||
This will permanently delete
|
||||
{{
|
||||
selectedIsTrashed ? 'it and all of its contents' : 'the listed items'
|
||||
}}. After which they cannot be recovered.
|
||||
</p>
|
||||
<div class="actions">
|
||||
<ul class="action__links">
|
||||
<li>
|
||||
<a @click.prevent="hide()">Cancel</a>
|
||||
</li>
|
||||
</ul>
|
||||
<a class="button button button--error" @click.prevent="emitEmptyAndClose">
|
||||
{{ selectedIsTrashed ? 'Permanently delete' : 'Empty' }}
|
||||
</a>
|
||||
</div>
|
||||
</Modal>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
/**
|
||||
* A simple confirmation modal to check that the user is sure they want to permanently
|
||||
* delete / empty.
|
||||
*/
|
||||
import modal from '@baserow/modules/core/mixins/modal'
|
||||
|
||||
export default {
|
||||
name: 'TrashEmptyModal',
|
||||
components: {},
|
||||
mixins: [modal],
|
||||
props: {
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
selectedIsTrashed: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
emitEmptyAndClose() {
|
||||
this.$emit('empty')
|
||||
this.hide()
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
87
web-frontend/modules/core/components/trash/TrashEntry.vue
Normal file
87
web-frontend/modules/core/components/trash/TrashEntry.vue
Normal file
|
@ -0,0 +1,87 @@
|
|||
<template>
|
||||
<div
|
||||
ref="member"
|
||||
class="trash-entry"
|
||||
:class="{ 'trash-entry--disabled': disabled }"
|
||||
>
|
||||
<div class="trash-entry__initials">
|
||||
{{ trashEntry.user_who_trashed | nameAbbreviation }}
|
||||
</div>
|
||||
<div class="trash-entry__content">
|
||||
<div class="trash-entry__name">
|
||||
{{ trashEntry.user_who_trashed || 'A Deleted User' }} Deleted
|
||||
{{ trashEntry.trash_item_type }}
|
||||
<strong>{{ trashItemTitle }}</strong>
|
||||
{{ trashEntry.parent_name ? ' from ' + trashEntry.parent_name : '' }}
|
||||
</div>
|
||||
<div class="trash-entry__deleted-at-display">{{ timeAgo }}</div>
|
||||
<span
|
||||
v-if="trashEntry.extra_description"
|
||||
class="trash-entry__extra-description"
|
||||
>
|
||||
{{ trashEntry.extra_description }}
|
||||
</span>
|
||||
</div>
|
||||
<div class="trash-entry__actions">
|
||||
<a
|
||||
v-if="!disabled"
|
||||
class="trash-entry__action"
|
||||
:class="{ 'trash-entry__action--loading': trashEntry.loading }"
|
||||
@click="emitRestoreIfNotLoading"
|
||||
>
|
||||
{{ trashEntry.loading ? '' : 'Restore' }}
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
/**
|
||||
* Displays a specific TrashEntry with a link which will trigger the restoring of the
|
||||
* trashed entry. Shows extra information about the entry like it's name, who trashed it
|
||||
* , how long ago it was trashed etc.
|
||||
*/
|
||||
import moment from 'moment'
|
||||
|
||||
export default {
|
||||
name: 'TrashEntry',
|
||||
props: {
|
||||
trashEntry: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {}
|
||||
},
|
||||
computed: {
|
||||
timeAgo() {
|
||||
return moment.utc(this.trashEntry.trashed_at).fromNow()
|
||||
},
|
||||
trashItemTitle() {
|
||||
if (this.trashEntry.name === '') {
|
||||
return (
|
||||
'Unnamed ' +
|
||||
this.trashEntry.trash_item_type +
|
||||
' ' +
|
||||
this.trashEntry.trash_item_id
|
||||
)
|
||||
} else {
|
||||
return this.trashEntry.name
|
||||
}
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
emitRestoreIfNotLoading() {
|
||||
if (!this.trashEntry.loading) {
|
||||
this.$emit('restore', this.trashEntry)
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
333
web-frontend/modules/core/components/trash/TrashModal.vue
Normal file
333
web-frontend/modules/core/components/trash/TrashModal.vue
Normal file
|
@ -0,0 +1,333 @@
|
|||
<template>
|
||||
<Modal :sidebar="true" :full-height="true">
|
||||
<template #sidebar>
|
||||
<TrashSidebar
|
||||
v-if="!loading"
|
||||
:groups="groups"
|
||||
:selected-trash-group="selectedTrashGroup"
|
||||
:selected-trash-application="selectedTrashApplication"
|
||||
@selected="selectGroupOrApp"
|
||||
></TrashSidebar>
|
||||
</template>
|
||||
<template #content>
|
||||
<div v-if="loading" class="loading-absolute-center"></div>
|
||||
<div v-else-if="groups.length === 0" class="placeholder">
|
||||
<div class="placeholder__icon">
|
||||
<i class="fas fa-layer-group"></i>
|
||||
</div>
|
||||
<h1 class="placeholder__title">No groups found</h1>
|
||||
<p class="placeholder__content">
|
||||
You aren't a member of any group. Applications like databases belong
|
||||
to a group, so in order to create them you need to create a group.
|
||||
</p>
|
||||
</div>
|
||||
<TrashContent
|
||||
v-else
|
||||
:selected-trash-group="selectedTrashGroup"
|
||||
:selected-trash-application="selectedTrashApplication"
|
||||
:trash-contents="trashContents"
|
||||
:loading-contents="loadingContents"
|
||||
:loading-next-page="loadingNextPage"
|
||||
:total-server-side-trash-contents-count="
|
||||
totalServerSideTrashContentsCount
|
||||
"
|
||||
@empty="onEmpty"
|
||||
@restore="onRestore"
|
||||
@load-next-page="loadNextPage"
|
||||
></TrashContent>
|
||||
</template>
|
||||
</Modal>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapState } from 'vuex'
|
||||
|
||||
import modal from '@baserow/modules/core/mixins/modal'
|
||||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
import TrashService from '@baserow/modules/core/services/trash'
|
||||
import TrashSidebar from '@baserow/modules/core/components/trash/TrashSidebar'
|
||||
import TrashContent from '@baserow/modules/core/components/trash/TrashContents'
|
||||
|
||||
export default {
|
||||
name: 'TrashModal',
|
||||
components: { TrashSidebar, TrashContent },
|
||||
mixins: [modal],
|
||||
props: {
|
||||
initialGroup: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
initialApplication: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
loading: true,
|
||||
loadingContents: true,
|
||||
loadingNextPage: false,
|
||||
groups: [],
|
||||
trashContents: [],
|
||||
selectedTrashGroup: null,
|
||||
selectedTrashApplication: null,
|
||||
totalServerSideTrashContentsCount: 0,
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
...mapState({
|
||||
selectedGroup: (state) => state.group.selected,
|
||||
selectedApplication: (state) => state.application.selected,
|
||||
}),
|
||||
},
|
||||
methods: {
|
||||
/**
|
||||
* Chooses which group to show when the modal is shown.
|
||||
**/
|
||||
pickInitialGroupToSelect() {
|
||||
// The initial or selected groups will not contain the trashed flag as they so
|
||||
// we must look them up in the groups fetched from the trash api.
|
||||
const initialGroupWithTrashInfo = this.initialGroup
|
||||
? this.groups.find((i) => i.id === this.initialGroup.id)
|
||||
: null
|
||||
const selectedGroupWithTrashInfo = this.selectedGroup
|
||||
? this.groups.find((i) => i.id === this.selectedGroup.id)
|
||||
: null
|
||||
return (
|
||||
initialGroupWithTrashInfo ||
|
||||
selectedGroupWithTrashInfo ||
|
||||
this.groups[0] || // When all groups are trashed we want to pick the first one.
|
||||
null
|
||||
)
|
||||
},
|
||||
/**
|
||||
* Chooses which app to show when the modal is shown.
|
||||
**/
|
||||
pickInitialApplicationToSelect(firstGroupToShow) {
|
||||
if (firstGroupToShow === null) {
|
||||
return null
|
||||
} else {
|
||||
// The initial or selected apps will not contain the trashed flag as they so
|
||||
// we must look them up in the groups fetched from the trash api.
|
||||
const applications = firstGroupToShow.applications
|
||||
if (this.initialApplication || this.initialGroup) {
|
||||
// When either of the initial props are set we have been opened via a context
|
||||
// menu shortcut.
|
||||
return this.initialApplication
|
||||
? applications.find((i) => i.id === this.initialApplication.id)
|
||||
: null
|
||||
} else {
|
||||
return this.selectedApplication
|
||||
? applications.find((i) => i.id === this.selectedApplication.id)
|
||||
: null
|
||||
}
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Loads the structure of the trash modal from the server, selects an initial
|
||||
* group or application depending on the props and shows the trash modal.
|
||||
**/
|
||||
async show(...args) {
|
||||
modal.methods.show.call(this, ...args)
|
||||
|
||||
this.loading = true
|
||||
this.groups = []
|
||||
this.selectedTrashGroup = null
|
||||
this.selectedTrashApplication = null
|
||||
|
||||
try {
|
||||
const { data } = await TrashService(this.$client).fetchStructure()
|
||||
this.groups = data.groups
|
||||
const initialGroup = this.pickInitialGroupToSelect()
|
||||
await this.selectGroupOrApp({
|
||||
group: initialGroup,
|
||||
application: this.pickInitialApplicationToSelect(initialGroup),
|
||||
})
|
||||
} catch (error) {
|
||||
notifyIf(error, 'trash')
|
||||
this.hide()
|
||||
}
|
||||
this.loading = false
|
||||
},
|
||||
/**
|
||||
* Loads the next page of trash contents for the currently selected application.
|
||||
*/
|
||||
async loadTrashContentsPage(nextPage) {
|
||||
if (
|
||||
this.selectedTrashGroup === null &&
|
||||
this.selectedTrashApplication === null
|
||||
) {
|
||||
return
|
||||
}
|
||||
try {
|
||||
const { data } = await TrashService(this.$client).fetchContents({
|
||||
page: nextPage,
|
||||
groupId: this.selectedTrashGroup.id,
|
||||
applicationId:
|
||||
this.selectedTrashApplication !== null
|
||||
? this.selectedTrashApplication.id
|
||||
: null,
|
||||
})
|
||||
this.totalServerSideTrashContentsCount = data.count
|
||||
data.results.forEach((entry) => {
|
||||
entry.loading = false
|
||||
this.trashContents.push(entry)
|
||||
})
|
||||
} catch (error) {
|
||||
notifyIf(error, 'trash')
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Switches to a different group or application to display the trash contents for
|
||||
* and triggers the fetch for the first page of contents.
|
||||
*/
|
||||
async selectGroupOrApp({ group, application = null }) {
|
||||
this.selectedTrashGroup = group
|
||||
this.selectedTrashApplication = application
|
||||
this.loadingContents = true
|
||||
this.trashContents = []
|
||||
this.totalServerSideTrashContentsCount = 0
|
||||
await this.loadTrashContentsPage(1)
|
||||
this.loadingContents = false
|
||||
},
|
||||
/**
|
||||
* Loads another page of contents in after we have already loaded the initial
|
||||
* page of contents, hence we want to use a different loading indicator as it is
|
||||
* ok to say, restore an item whilst we are loading in another page.
|
||||
*/
|
||||
async loadNextPage(nextPage) {
|
||||
this.loadingNextPage = true
|
||||
await this.loadTrashContentsPage(nextPage)
|
||||
this.loadingNextPage = false
|
||||
},
|
||||
/**
|
||||
* Triggered when a user requests a trashEntry be restored. Sends the request to
|
||||
* the server, updates the client side state if successful and updates the trash
|
||||
* structure if say a group or application was restored.
|
||||
*/
|
||||
async onRestore(trashEntry) {
|
||||
try {
|
||||
trashEntry.loading = true
|
||||
await TrashService(this.$client).restore({
|
||||
trash_item_type: trashEntry.trash_item_type,
|
||||
trash_item_id: trashEntry.trash_item_id,
|
||||
parent_trash_item_id: trashEntry.parent_trash_item_id,
|
||||
})
|
||||
const index = this.trashContents.findIndex(
|
||||
(t) => t.id === trashEntry.id
|
||||
)
|
||||
this.trashContents.splice(index, 1)
|
||||
this.totalServerSideTrashContentsCount--
|
||||
this.updateStructureIfGroupOrAppRestored(trashEntry)
|
||||
} catch (error) {
|
||||
notifyIf(error, 'trash')
|
||||
}
|
||||
trashEntry.loading = false
|
||||
},
|
||||
updateStructureIfGroupOrAppRestored(trashEntry) {
|
||||
/**
|
||||
* If a group or app is trashed it is displayed with a strike through it's text.
|
||||
* This method checks if a restored trash entry is a group or application and
|
||||
* if so updates the state of said group/app so it no longer is displayed as
|
||||
* trashed.
|
||||
*/
|
||||
const trashItemId = trashEntry.trash_item_id
|
||||
const trashItemType = trashEntry.trash_item_type
|
||||
if (trashItemType === 'group') {
|
||||
const index = this.groups.findIndex((group) => group.id === trashItemId)
|
||||
this.groups[index].trashed = false
|
||||
} else if (trashItemType === 'application') {
|
||||
const index = this.selectedTrashGroup.applications.findIndex(
|
||||
(app) => app.id === trashItemId
|
||||
)
|
||||
this.selectedTrashGroup.applications[index].trashed = false
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Triggered when the user has requested the currently selected group or app
|
||||
* should be emptied. If the selected item is trashed itself the empty operation
|
||||
* will permanently delete the selected item also. Once emptied this method will
|
||||
* ensure that any now permanently deleted groups or apps are removed from the
|
||||
* sidebar.
|
||||
*/
|
||||
async onEmpty() {
|
||||
this.loadingContents = true
|
||||
try {
|
||||
const applicationIdOrNull =
|
||||
this.selectedTrashApplication !== null
|
||||
? this.selectedTrashApplication.id
|
||||
: null
|
||||
await TrashService(this.$client).emptyContents({
|
||||
groupId: this.selectedTrashGroup.id,
|
||||
applicationId: applicationIdOrNull,
|
||||
})
|
||||
this.removeGroupOrAppFromSidebarIfNowPermDeleted()
|
||||
this.trashContents = []
|
||||
this.totalServerSideTrashContentsCount = 0
|
||||
} catch (error) {
|
||||
notifyIf(error, 'trash')
|
||||
}
|
||||
this.loadingContents = false
|
||||
},
|
||||
removeSelectedAppFromSidebar() {
|
||||
const applicationId = this.selectedTrashApplication.id
|
||||
|
||||
const indexToDelete = this.selectedTrashGroup.applications.findIndex(
|
||||
(app) => app.id === applicationId
|
||||
)
|
||||
this.selectedTrashGroup.applications.splice(indexToDelete, 1)
|
||||
if (this.selectedTrashGroup.applications.length > 0) {
|
||||
this.selectedTrashApplication = this.selectedTrashGroup.applications[0]
|
||||
} else {
|
||||
this.selectedTrashApplication = null
|
||||
}
|
||||
},
|
||||
removeSelectedTrashGroupFromSidebar() {
|
||||
const indexToDelete = this.groups.findIndex(
|
||||
(group) => group.id === this.selectedTrashGroup.id
|
||||
)
|
||||
this.groups.splice(indexToDelete, 1)
|
||||
if (this.groups.length > 0) {
|
||||
this.selectedTrashGroup = this.groups[0]
|
||||
} else {
|
||||
this.selectedTrashGroup = null
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Updates the trash structure to remove any deleted groups or applications after
|
||||
* an empty is performed.
|
||||
*/
|
||||
removeGroupOrAppFromSidebarIfNowPermDeleted() {
|
||||
if (
|
||||
this.selectedTrashApplication !== null &&
|
||||
this.selectedTrashApplication.trashed
|
||||
) {
|
||||
this.removeSelectedAppFromSidebar()
|
||||
this.selectGroupOrApp({
|
||||
group: this.selectedTrashGroup,
|
||||
application: this.selectedTrashApplication,
|
||||
})
|
||||
} else if (this.selectedTrashGroup.trashed) {
|
||||
this.removeSelectedTrashGroupFromSidebar()
|
||||
this.selectGroupOrApp({
|
||||
group: this.selectedTrashGroup,
|
||||
application: this.selectedTrashApplication,
|
||||
})
|
||||
} else if (this.selectedTrashApplication === null) {
|
||||
// The group was emptied, it might have contained trashed applications hence
|
||||
// we need to search through the trash and remove any now deleted applications.
|
||||
for (const app of this.selectedTrashGroup.applications.slice()) {
|
||||
if (app.trashed) {
|
||||
const index = this.selectedTrashGroup.applications.findIndex(
|
||||
(i) => i.id === app.id
|
||||
)
|
||||
this.selectedTrashGroup.applications.splice(index, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
105
web-frontend/modules/core/components/trash/TrashSidebar.vue
Normal file
105
web-frontend/modules/core/components/trash/TrashSidebar.vue
Normal file
|
@ -0,0 +1,105 @@
|
|||
<template>
|
||||
<div>
|
||||
<div class="modal-sidebar__head">
|
||||
<div class="modal-sidebar__head-icon-and-name">
|
||||
<i class="modal-sidebar__head-icon-and-name-icon fas fa-trash"></i>
|
||||
Trash
|
||||
</div>
|
||||
</div>
|
||||
<ul class="trash-sidebar__groups">
|
||||
<li
|
||||
v-for="group in groups"
|
||||
:key="'trash-group-' + group.id"
|
||||
class="trash-sidebar__group"
|
||||
:class="{
|
||||
'trash-sidebar__group--active': isSelectedTrashGroup(group),
|
||||
'trash-sidebar__group--open': isSelectedTrashGroupApplication(group),
|
||||
'trash-sidebar__group--trashed': group.trashed,
|
||||
}"
|
||||
>
|
||||
<a
|
||||
class="trash-sidebar__group-link"
|
||||
@click="emitIfNotAlreadySelectedTrashGroup(group)"
|
||||
>
|
||||
{{ group.name || 'Unnamed group ' + group.id }}
|
||||
</a>
|
||||
<ul class="trash-sidebar__applications">
|
||||
<li
|
||||
v-for="application in group.applications"
|
||||
:key="'trash-application-' + application.id"
|
||||
class="trash-sidebar__application"
|
||||
:class="{
|
||||
'trash-sidebar__application--active': isSelectedApp(application),
|
||||
'trash-sidebar__application--trashed':
|
||||
group.trashed || application.trashed,
|
||||
}"
|
||||
>
|
||||
<a
|
||||
class="trash-sidebar__application-link"
|
||||
@click="
|
||||
emitIfNotAlreadySelectedTrashApplication(group, application)
|
||||
"
|
||||
>{{
|
||||
application.name || 'Unnamed application ' + application.id
|
||||
}}</a
|
||||
>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'TrashSidebar',
|
||||
props: {
|
||||
groups: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
selectedTrashGroup: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
selectedTrashApplication: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
isSelectedTrashGroup(group) {
|
||||
return (
|
||||
group.id === this.selectedTrashGroup.id &&
|
||||
this.selectedTrashApplication === null
|
||||
)
|
||||
},
|
||||
isSelectedTrashGroupApplication(group) {
|
||||
return group.applications.some((application) =>
|
||||
this.isSelectedApp(application)
|
||||
)
|
||||
},
|
||||
isSelectedApp(app) {
|
||||
return (
|
||||
this.selectedTrashApplication !== null &&
|
||||
app.id === this.selectedTrashApplication.id
|
||||
)
|
||||
},
|
||||
emitIfNotAlreadySelectedTrashGroup(group) {
|
||||
if (!this.isSelectedTrashGroup(group)) {
|
||||
this.emitSelected({ group })
|
||||
}
|
||||
},
|
||||
emitIfNotAlreadySelectedTrashApplication(group, application) {
|
||||
if (!this.isSelectedApp(application)) {
|
||||
this.emitSelected({ group, application })
|
||||
}
|
||||
},
|
||||
emitSelected(selected) {
|
||||
this.$emit('selected', selected)
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -51,6 +51,12 @@ export default function DatabaseModule(options) {
|
|||
key: 'INITIAL_TABLE_DATA_LIMIT',
|
||||
default: null,
|
||||
},
|
||||
{
|
||||
// If you change this default please also update the default for the
|
||||
// backend found in src/baserow/config/settings/base.py:321
|
||||
key: 'HOURS_UNTIL_TRASH_PERMANENTLY_DELETED',
|
||||
default: 24 * 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
|
|
|
@ -907,7 +907,7 @@
|
|||
</a>
|
||||
<div class="modal__box-sidebar">
|
||||
<div class="modal-sidebar__head">
|
||||
<div class="modal-sidebar__head-icon">B</div>
|
||||
<div class="modal-sidebar__head-initials-icon">B</div>
|
||||
<div class="modal-sidebar__head-name">Settings</div>
|
||||
</div>
|
||||
<ul class="modal-sidebar__nav">
|
||||
|
|
|
@ -68,6 +68,11 @@ class ErrorHandler {
|
|||
"Couldn't create field.",
|
||||
"The action couldn't be completed because the field count exceeds the limit"
|
||||
),
|
||||
ERROR_CANNOT_RESTORE_PARENT_BEFORE_CHILD: new ResponseErrorMessage(
|
||||
'Please restore the parent first.',
|
||||
'You cannot restore this because as within something which is already' +
|
||||
' trashed. Please restore the parent item first.'
|
||||
),
|
||||
}
|
||||
|
||||
// A temporary notFoundMap containing the error messages for when the
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue