mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-01-25 00:49:35 +00:00
Resolve "Prevent websockets from leaking information about resources in Baserow"
This commit is contained in:
parent
87f2f96ff4
commit
581ee16c76
19 changed files with 734 additions and 106 deletions
backend
src/baserow
contrib/database
core
ws
tests/baserow
enterprise/backend/tests/baserow_enterprise_tests/ws
web-frontend/modules
|
@ -1,3 +1,5 @@
|
|||
from typing import List
|
||||
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
|
@ -6,6 +8,7 @@ from baserow.contrib.database.api.tables.serializers import TableSerializer
|
|||
from baserow.contrib.database.operations import ListTablesDatabaseTableOperationType
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import Application
|
||||
|
||||
|
||||
class DatabaseSerializer(ApplicationSerializer):
|
||||
|
@ -19,23 +22,26 @@ class DatabaseSerializer(ApplicationSerializer):
|
|||
fields = ApplicationSerializer.Meta.fields + ("tables",)
|
||||
|
||||
@extend_schema_field(TableSerializer(many=True))
|
||||
def get_tables(self, instance):
|
||||
def get_tables(self, instance: Application) -> List:
|
||||
"""
|
||||
Because the instance doesn't know at this point it is a Database we have to
|
||||
select the related tables this way.
|
||||
|
||||
:param instance: The database application instance.
|
||||
:type instance: Application
|
||||
:return: A list of serialized tables that belong to this instance.
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
tables = Table.objects.filter(database_id=instance.pk)
|
||||
|
||||
user = self.context.get("user")
|
||||
request = self.context.get("request")
|
||||
if request and hasattr(request, "user"):
|
||||
|
||||
if user is None and hasattr(request, "user"):
|
||||
user = request.user
|
||||
|
||||
if user:
|
||||
tables = CoreHandler().filter_queryset(
|
||||
request.user,
|
||||
user,
|
||||
ListTablesDatabaseTableOperationType.type,
|
||||
tables,
|
||||
group=instance.group,
|
||||
|
|
|
@ -1,20 +1,28 @@
|
|||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import transaction
|
||||
from django.dispatch import receiver
|
||||
|
||||
from baserow.contrib.database.api.tables.serializers import TableSerializer
|
||||
from baserow.contrib.database.table import signals as table_signals
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.table.object_scopes import DatabaseTableObjectScopeType
|
||||
from baserow.contrib.database.table.operations import ReadDatabaseTableOperationType
|
||||
from baserow.contrib.database.table.tasks import (
|
||||
unsubscribe_user_from_table_currently_subscribed_to,
|
||||
)
|
||||
from baserow.core import signals as core_signals
|
||||
from baserow.ws.tasks import broadcast_to_group
|
||||
from baserow.core.utils import generate_hash
|
||||
from baserow.ws.tasks import broadcast_to_group, broadcast_to_permitted_users
|
||||
|
||||
|
||||
@receiver(table_signals.table_created)
|
||||
def table_created(sender, table, user, **kwargs):
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
lambda: broadcast_to_permitted_users.delay(
|
||||
table.database.group_id,
|
||||
ReadDatabaseTableOperationType.type,
|
||||
DatabaseTableObjectScopeType.type,
|
||||
table.id,
|
||||
{"type": "table_created", "table": TableSerializer(table).data},
|
||||
getattr(user, "web_socket_id", None),
|
||||
)
|
||||
|
@ -22,10 +30,19 @@ def table_created(sender, table, user, **kwargs):
|
|||
|
||||
|
||||
@receiver(table_signals.table_updated)
|
||||
def table_updated(sender, table, user, force_table_refresh=False, **kwargs):
|
||||
def table_updated(
|
||||
sender,
|
||||
table: Table,
|
||||
user: AbstractUser,
|
||||
force_table_refresh: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
lambda: broadcast_to_permitted_users.delay(
|
||||
table.database.group_id,
|
||||
ReadDatabaseTableOperationType.type,
|
||||
DatabaseTableObjectScopeType.type,
|
||||
table.id,
|
||||
{
|
||||
"type": "table_updated",
|
||||
"table_id": table.id,
|
||||
|
@ -40,8 +57,11 @@ def table_updated(sender, table, user, force_table_refresh=False, **kwargs):
|
|||
@receiver(table_signals.table_deleted)
|
||||
def table_deleted(sender, table_id, table, user, **kwargs):
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
lambda: broadcast_to_permitted_users.delay(
|
||||
table.database.group_id,
|
||||
ReadDatabaseTableOperationType.type,
|
||||
DatabaseTableObjectScopeType.type,
|
||||
table.id,
|
||||
{
|
||||
"type": "table_deleted",
|
||||
"database_id": table.database_id,
|
||||
|
@ -54,12 +74,16 @@ def table_deleted(sender, table_id, table, user, **kwargs):
|
|||
|
||||
@receiver(table_signals.tables_reordered)
|
||||
def tables_reordered(sender, database, order, user, **kwargs):
|
||||
# Hashing all values here to not expose real ids of tables a user might not have
|
||||
# access to
|
||||
order = [generate_hash(o) for o in order]
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
database.group_id,
|
||||
{
|
||||
"type": "tables_reordered",
|
||||
"database_id": database.id,
|
||||
# A user might also not have access to the database itself
|
||||
"database_id": generate_hash(database.id),
|
||||
"order": order,
|
||||
},
|
||||
getattr(user, "web_socket_id", None),
|
||||
|
|
|
@ -3,7 +3,7 @@ import json
|
|||
import os
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import IO, Any, Dict, List, NewType, Optional, Tuple, TypedDict, cast
|
||||
from typing import IO, Any, Dict, List, NewType, Optional, Set, Tuple, TypedDict, cast
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from zipfile import ZIP_DEFLATED, ZipFile
|
||||
|
||||
|
@ -352,6 +352,21 @@ class CoreHandler:
|
|||
|
||||
return queryset
|
||||
|
||||
def get_user_ids_of_permitted_users(
|
||||
self, users: List[AbstractUser], operation_name: str, group: Group, context=None
|
||||
) -> Set[int]:
|
||||
|
||||
permitted_user_ids = set()
|
||||
|
||||
# TODO replace with batch check_permission once it's implemented
|
||||
for user in users:
|
||||
if self.check_permissions(
|
||||
user, operation_name, group, context, raise_error=False
|
||||
):
|
||||
permitted_user_ids.add(user.id)
|
||||
|
||||
return permitted_user_ids
|
||||
|
||||
def get_group_for_update(self, group_id: int) -> GroupForUpdate:
|
||||
return cast(
|
||||
GroupForUpdate,
|
||||
|
|
|
@ -689,3 +689,17 @@ def atomic_if_not_already():
|
|||
"""
|
||||
|
||||
return optional_atomic(transaction.get_autocommit())
|
||||
|
||||
|
||||
def generate_hash(value: str):
|
||||
"""
|
||||
Generates a hexadecimal hash given an input value. The same function is replicated
|
||||
in the frontend as `generateHash` such that the front and backend can share the
|
||||
same hashing algorithm.
|
||||
:param value: The value used to generate the hash
|
||||
:return: The hexadecimal hash of the value provided using sha256
|
||||
"""
|
||||
|
||||
value_hashed = hashlib.sha256()
|
||||
value_hashed.update(str(value).encode("UTF-8"))
|
||||
return value_hashed.hexdigest()
|
||||
|
|
|
@ -128,6 +128,20 @@ class CoreConsumer(AsyncJsonWebsocketConsumer):
|
|||
if shouldnt_ignore and (self.scope["user"].id in user_ids or send_to_all_users):
|
||||
await self.send_json(payload)
|
||||
|
||||
async def broadcast_to_users_individual_payloads(self, event):
|
||||
"""
|
||||
Accepts a payload mapping and sends the payload as JSON if the user_id of the
|
||||
consumer is part of the mapping provided
|
||||
|
||||
:param event: The event containing the payload mapping
|
||||
"""
|
||||
|
||||
payload_map = event["payload_map"]
|
||||
user_id = str(self.scope["user"].id)
|
||||
|
||||
if user_id in payload_map:
|
||||
await self.send_json(payload_map[user_id])
|
||||
|
||||
async def broadcast_to_group(self, event):
|
||||
"""
|
||||
Broadcasts a message to all the users that are in the provided group name.
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import transaction
|
||||
from django.dispatch import receiver
|
||||
|
||||
from baserow.api.applications.serializers import get_application_serializer
|
||||
from baserow.api.applications.serializers import (
|
||||
ApplicationSerializer,
|
||||
get_application_serializer,
|
||||
)
|
||||
from baserow.api.groups.serializers import (
|
||||
GroupSerializer,
|
||||
GroupUserGroupSerializer,
|
||||
|
@ -10,9 +14,21 @@ from baserow.api.groups.serializers import (
|
|||
from baserow.api.user.serializers import PublicUserSerializer
|
||||
from baserow.core import signals
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import GroupUser
|
||||
from baserow.core.models import Application, GroupUser
|
||||
from baserow.core.operations import (
|
||||
ListApplicationsGroupOperationType,
|
||||
ReadApplicationOperationType,
|
||||
)
|
||||
from baserow.core.registries import object_scope_type_registry
|
||||
from baserow.core.utils import generate_hash
|
||||
|
||||
from .tasks import broadcast_to_group, broadcast_to_groups, broadcast_to_users
|
||||
from .tasks import (
|
||||
broadcast_application_created,
|
||||
broadcast_to_group,
|
||||
broadcast_to_groups,
|
||||
broadcast_to_permitted_users,
|
||||
broadcast_to_users,
|
||||
)
|
||||
|
||||
|
||||
@receiver(signals.user_updated)
|
||||
|
@ -163,6 +179,22 @@ def group_restored(sender, group_user, user, **kwargs):
|
|||
groupuser_groups = (
|
||||
CoreHandler().get_groupuser_group_queryset().get(id=group_user.id)
|
||||
)
|
||||
|
||||
applications_qs = group_user.group.application_set.select_related(
|
||||
"content_type", "group"
|
||||
).all()
|
||||
applications_qs = CoreHandler().filter_queryset(
|
||||
group_user.user,
|
||||
ListApplicationsGroupOperationType.type,
|
||||
applications_qs,
|
||||
group=group_user.group,
|
||||
context=group_user.group,
|
||||
)
|
||||
applications = [
|
||||
get_application_serializer(application, context={"user": group_user.user}).data
|
||||
for application in applications_qs
|
||||
]
|
||||
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_users.delay(
|
||||
[group_user.user_id],
|
||||
|
@ -170,12 +202,7 @@ def group_restored(sender, group_user, user, **kwargs):
|
|||
"type": "group_restored",
|
||||
"group_id": group_user.group_id,
|
||||
"group": GroupUserGroupSerializer(groupuser_groups).data,
|
||||
"applications": [
|
||||
get_application_serializer(application).data
|
||||
for application in group_user.group.application_set.select_related(
|
||||
"content_type", "group"
|
||||
).all()
|
||||
],
|
||||
"applications": applications,
|
||||
},
|
||||
getattr(user, "web_socket_id", None),
|
||||
)
|
||||
|
@ -195,27 +222,23 @@ def groups_reordered(sender, group_ids, user, **kwargs):
|
|||
|
||||
@receiver(signals.application_created)
|
||||
def application_created(sender, application, user, **kwargs):
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
application.group_id,
|
||||
{
|
||||
"type": "application_created",
|
||||
"application": get_application_serializer(application).data,
|
||||
},
|
||||
getattr(user, "web_socket_id", None),
|
||||
)
|
||||
)
|
||||
transaction.on_commit(lambda: broadcast_application_created.delay(application.id))
|
||||
|
||||
|
||||
@receiver(signals.application_updated)
|
||||
def application_updated(sender, application, user, **kwargs):
|
||||
def application_updated(sender, application: Application, user: AbstractUser, **kwargs):
|
||||
scope_type = object_scope_type_registry.get_by_model(application.specific)
|
||||
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
lambda: broadcast_to_permitted_users.delay(
|
||||
application.group_id,
|
||||
ReadApplicationOperationType.type,
|
||||
scope_type.type,
|
||||
application.id,
|
||||
{
|
||||
"type": "application_updated",
|
||||
"application_id": application.id,
|
||||
"application": get_application_serializer(application).data,
|
||||
"application": ApplicationSerializer(application).data,
|
||||
},
|
||||
getattr(user, "web_socket_id", None),
|
||||
)
|
||||
|
@ -224,9 +247,14 @@ def application_updated(sender, application, user, **kwargs):
|
|||
|
||||
@receiver(signals.application_deleted)
|
||||
def application_deleted(sender, application_id, application, user, **kwargs):
|
||||
scope_type = object_scope_type_registry.get_by_model(application.specific)
|
||||
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
lambda: broadcast_to_permitted_users.delay(
|
||||
application.group_id,
|
||||
ReadApplicationOperationType.type,
|
||||
scope_type.type,
|
||||
application.id,
|
||||
{"type": "application_deleted", "application_id": application_id},
|
||||
getattr(user, "web_socket_id", None),
|
||||
)
|
||||
|
@ -235,6 +263,9 @@ def application_deleted(sender, application_id, application, user, **kwargs):
|
|||
|
||||
@receiver(signals.applications_reordered)
|
||||
def applications_reordered(sender, group, order, user, **kwargs):
|
||||
# Hashing all values here to not expose real ids of applications a user might not
|
||||
# have access to
|
||||
order = [generate_hash(o) for o in order]
|
||||
transaction.on_commit(
|
||||
lambda: broadcast_to_group.delay(
|
||||
group.id,
|
||||
|
|
|
@ -41,6 +41,89 @@ def broadcast_to_users(
|
|||
)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def broadcast_to_permitted_users(
|
||||
self,
|
||||
group_id: int,
|
||||
operation_type: str,
|
||||
scope_name: str,
|
||||
scope_id: int,
|
||||
payload: Dict[str, any],
|
||||
ignore_web_socket_id: Optional[int] = None,
|
||||
):
|
||||
"""
|
||||
This task will broadcast a websocket message to all the users that are permitted
|
||||
to perform the operation provided.
|
||||
|
||||
:param self:
|
||||
:param group_id: The group the users are in
|
||||
:param operation_type: The operation that should be checked for
|
||||
:param scope_name: The name of the scope that the operation is executed on
|
||||
:param scope_id: The id of the scope instance
|
||||
:param payload: The message being sent
|
||||
:param ignore_web_socket_id: An optional web socket id which will not be sent the
|
||||
payload if provided. This is normally the web socket id that has originally
|
||||
made the change request.
|
||||
:return:
|
||||
"""
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.mixins import TrashableModelMixin
|
||||
from baserow.core.models import Group, GroupUser
|
||||
from baserow.core.registries import object_scope_type_registry
|
||||
|
||||
group = Group.objects.get(id=group_id)
|
||||
|
||||
users_in_group = [
|
||||
group_user.user
|
||||
for group_user in GroupUser.objects.filter(group=group).select_related("user")
|
||||
]
|
||||
|
||||
scope_type = object_scope_type_registry.get(scope_name)
|
||||
scope_model_class = scope_type.model_class
|
||||
|
||||
objects = (
|
||||
scope_model_class.objects_and_trash
|
||||
if issubclass(scope_model_class, TrashableModelMixin)
|
||||
else scope_model_class.objects
|
||||
)
|
||||
|
||||
scope = objects.get(id=scope_id)
|
||||
|
||||
user_ids = list(
|
||||
CoreHandler().get_user_ids_of_permitted_users(
|
||||
users_in_group,
|
||||
operation_type,
|
||||
group,
|
||||
context=scope,
|
||||
)
|
||||
)
|
||||
broadcast_to_users(user_ids, payload, ignore_web_socket_id=ignore_web_socket_id)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def broadcast_to_users_individual_payloads(self, payload_map: Dict[str, any]):
|
||||
"""
|
||||
This task will broadcast different payloads to different users by just using one
|
||||
message.
|
||||
|
||||
:param payload_map: A mapping from user_id to the payload that should be sent to
|
||||
the user. The id has to be stringified to not violate redis channel policy
|
||||
"""
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
async_to_sync(channel_layer.group_send)(
|
||||
"users",
|
||||
{
|
||||
"type": "broadcast_to_users_individual_payloads",
|
||||
"payload_map": payload_map,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def broadcast_to_channel_group(self, group, payload, ignore_web_socket_id=None):
|
||||
"""
|
||||
|
@ -131,3 +214,52 @@ def broadcast_to_groups(
|
|||
return
|
||||
|
||||
broadcast_to_users(user_ids, payload, ignore_web_socket_id)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def broadcast_application_created(self, application_id: int):
|
||||
"""
|
||||
This task is called when an application is created. We made this a task instead of
|
||||
running the code in the signal because calculating the individual payloads can take
|
||||
a lot of computational power and should therefore not run on a gunicorn worker.
|
||||
|
||||
:param application_id: The id of the application that was created
|
||||
:return:
|
||||
"""
|
||||
|
||||
from baserow.api.applications.serializers import get_application_serializer
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import Application, GroupUser
|
||||
from baserow.core.operations import ReadApplicationOperationType
|
||||
|
||||
application = Application.objects.get(id=application_id)
|
||||
group = application.group
|
||||
users_in_group = [
|
||||
group_user.user
|
||||
for group_user in GroupUser.objects.filter(group=group).select_related("user")
|
||||
]
|
||||
|
||||
user_ids = list(
|
||||
CoreHandler().get_user_ids_of_permitted_users(
|
||||
users_in_group,
|
||||
ReadApplicationOperationType.type,
|
||||
group,
|
||||
context=application.specific,
|
||||
)
|
||||
)
|
||||
|
||||
users_in_group_id_map = {user.id: user for user in users_in_group}
|
||||
|
||||
payload_map = {}
|
||||
for user_id in user_ids:
|
||||
user = users_in_group_id_map[user_id]
|
||||
application_serialized = get_application_serializer(
|
||||
application, context={"user": user}
|
||||
).data
|
||||
|
||||
payload_map[str(user_id)] = {
|
||||
"type": "application_created",
|
||||
"application": application_serialized,
|
||||
}
|
||||
|
||||
broadcast_to_users_individual_payloads(payload_map)
|
||||
|
|
|
@ -12,10 +12,23 @@ async def received_message(communicator: WebsocketCommunicator, message_type: st
|
|||
:param message_type: The type of message you are looking for
|
||||
:returns: If the message has been received
|
||||
"""
|
||||
|
||||
return await get_message(communicator, message_type) is not None
|
||||
|
||||
|
||||
async def get_message(communicator: WebsocketCommunicator, message_type: str):
|
||||
"""
|
||||
Can be called to find the next message of the specified type
|
||||
|
||||
:param communicator: The communicator receiving the message
|
||||
:param message_type: The type of message you are looking for
|
||||
:return: The received message
|
||||
"""
|
||||
|
||||
while True:
|
||||
try:
|
||||
message = await communicator.receive_json_from()
|
||||
if message["type"] == message_type:
|
||||
return True
|
||||
return message
|
||||
except asyncio.exceptions.TimeoutError: # No more messages
|
||||
return False
|
||||
return None
|
||||
|
|
|
@ -3,35 +3,34 @@ from unittest.mock import patch
|
|||
import pytest
|
||||
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.core.utils import generate_hash
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.contrib.database.ws.table.signals.broadcast_to_group")
|
||||
def test_table_created(mock_broadcast_to_group, data_fixture):
|
||||
@patch("baserow.contrib.database.ws.table.signals.broadcast_to_permitted_users")
|
||||
def test_table_created(mock_broadcast_to_permitted_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
table, _ = TableHandler().create_table(user=user, database=database, name="Test")
|
||||
|
||||
mock_broadcast_to_group.delay.assert_called_once()
|
||||
args = mock_broadcast_to_group.delay.call_args
|
||||
assert args[0][0] == table.database.group_id
|
||||
assert args[0][1]["type"] == "table_created"
|
||||
assert args[0][1]["table"]["id"] == table.id
|
||||
mock_broadcast_to_permitted_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_permitted_users.delay.call_args
|
||||
assert args[0][4]["type"] == "table_created"
|
||||
assert args[0][4]["table"]["id"] == table.id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.contrib.database.ws.table.signals.broadcast_to_group")
|
||||
def test_table_updated(mock_broadcast_to_group, data_fixture):
|
||||
@patch("baserow.contrib.database.ws.table.signals.broadcast_to_permitted_users")
|
||||
def test_table_updated(mock_broadcast_to_permitted_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
table = TableHandler().update_table(user=user, table=table, name="Test")
|
||||
|
||||
mock_broadcast_to_group.delay.assert_called_once()
|
||||
args = mock_broadcast_to_group.delay.call_args
|
||||
assert args[0][0] == table.database.group_id
|
||||
assert args[0][1]["type"] == "table_updated"
|
||||
assert args[0][1]["table_id"] == table.id
|
||||
assert args[0][1]["table"]["id"] == table.id
|
||||
mock_broadcast_to_permitted_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_permitted_users.delay.call_args
|
||||
assert args[0][4]["type"] == "table_updated"
|
||||
assert args[0][4]["table_id"] == table.id
|
||||
assert args[0][4]["table"]["id"] == table.id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
|
@ -46,22 +45,21 @@ def test_tables_reordered(mock_broadcast_to_channel_group, data_fixture):
|
|||
args = mock_broadcast_to_channel_group.delay.call_args
|
||||
assert args[0][0] == table.database.group_id
|
||||
assert args[0][1]["type"] == "tables_reordered"
|
||||
assert args[0][1]["database_id"] == database.id
|
||||
assert args[0][1]["order"] == [table.id]
|
||||
assert args[0][1]["database_id"] == generate_hash(database.id)
|
||||
assert args[0][1]["order"] == [generate_hash(table.id)]
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.contrib.database.ws.table.signals.broadcast_to_group")
|
||||
def test_table_deleted(mock_broadcast_to_users, data_fixture):
|
||||
@patch("baserow.contrib.database.ws.table.signals.broadcast_to_permitted_users")
|
||||
def test_table_deleted(mock_broadcast_to_permitted_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
table_id = table.id
|
||||
database_id = table.database_id
|
||||
TableHandler().delete_table(user=user, table=table)
|
||||
|
||||
mock_broadcast_to_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_users.delay.call_args
|
||||
assert args[0][0] == table.database.group_id
|
||||
assert args[0][1]["type"] == "table_deleted"
|
||||
assert args[0][1]["database_id"] == database_id
|
||||
assert args[0][1]["table_id"] == table_id
|
||||
mock_broadcast_to_permitted_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_permitted_users.delay.call_args
|
||||
assert args[0][4]["type"] == "table_deleted"
|
||||
assert args[0][4]["database_id"] == database_id
|
||||
assert args[0][4]["table_id"] == table_id
|
||||
|
|
|
@ -39,6 +39,7 @@ from baserow.core.models import (
|
|||
Template,
|
||||
TemplateCategory,
|
||||
)
|
||||
from baserow.core.operations import ReadGroupOperationType
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.user_files.models import UserFile
|
||||
|
||||
|
@ -1265,3 +1266,14 @@ def test_raise_if_user_is_last_admin_of_group(data_fixture):
|
|||
CoreHandler.raise_if_user_is_last_admin_of_group(group_user)
|
||||
except LastAdminOfGroup:
|
||||
pytest.fail("Unexpected last admin error...")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_user_ids_of_permitted_users(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_of_another_group = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
|
||||
assert CoreHandler().get_user_ids_of_permitted_users(
|
||||
[user, user_of_another_group], ReadGroupOperationType.type, group, context=group
|
||||
) == {user.id}
|
||||
|
|
|
@ -13,6 +13,7 @@ from baserow.core.models import (
|
|||
)
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.user.handler import UserHandler
|
||||
from baserow.core.utils import generate_hash
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
|
@ -131,8 +132,8 @@ def test_group_restored(mock_broadcast_to_users, data_fixture):
|
|||
"name": database.name,
|
||||
"order": 0,
|
||||
"type": "database",
|
||||
"tables": [],
|
||||
"group": {"id": group.id, "name": group.name},
|
||||
"tables": [],
|
||||
}
|
||||
assert member_call[1]["applications"] == [expected_group_json]
|
||||
assert admin_call[0] == [user.id]
|
||||
|
@ -306,49 +307,45 @@ def test_groups_reordered(mock_broadcast_to_users, data_fixture):
|
|||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.signals.broadcast_to_group")
|
||||
def test_application_created(mock_broadcast_to_group, data_fixture):
|
||||
@patch("baserow.ws.signals.broadcast_application_created")
|
||||
def test_application_created(mock_broadcast_to_permitted_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
database = CoreHandler().create_application(
|
||||
user=user, group=group, type_name="database", name="Database"
|
||||
)
|
||||
|
||||
mock_broadcast_to_group.delay.assert_called_once()
|
||||
args = mock_broadcast_to_group.delay.call_args
|
||||
assert args[0][0] == group.id
|
||||
assert args[0][1]["type"] == "application_created"
|
||||
assert args[0][1]["application"]["id"] == database.id
|
||||
mock_broadcast_to_permitted_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_permitted_users.delay.call_args
|
||||
assert args[0][0] == database.id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.signals.broadcast_to_group")
|
||||
def test_application_updated(mock_broadcast_to_group, data_fixture):
|
||||
@patch("baserow.ws.signals.broadcast_to_permitted_users")
|
||||
def test_application_updated(mock_broadcast_to_permitted_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
CoreHandler().update_application(user=user, application=database, name="Database")
|
||||
|
||||
mock_broadcast_to_group.delay.assert_called_once()
|
||||
args = mock_broadcast_to_group.delay.call_args
|
||||
assert args[0][0] == database.group_id
|
||||
assert args[0][1]["type"] == "application_updated"
|
||||
assert args[0][1]["application_id"] == database.id
|
||||
assert args[0][1]["application"]["id"] == database.id
|
||||
mock_broadcast_to_permitted_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_permitted_users.delay.call_args
|
||||
assert args[0][4]["type"] == "application_updated"
|
||||
assert args[0][4]["application_id"] == database.id
|
||||
assert args[0][4]["application"]["id"] == database.id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.signals.broadcast_to_group")
|
||||
def test_application_deleted(mock_broadcast_to_group, data_fixture):
|
||||
@patch("baserow.ws.signals.broadcast_to_permitted_users")
|
||||
def test_application_deleted(mock_broadcast_to_permitted_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
database_id = database.id
|
||||
CoreHandler().delete_application(user=user, application=database)
|
||||
|
||||
mock_broadcast_to_group.delay.assert_called_once()
|
||||
args = mock_broadcast_to_group.delay.call_args
|
||||
assert args[0][0] == database.group_id
|
||||
assert args[0][1]["type"] == "application_deleted"
|
||||
assert args[0][1]["application_id"] == database_id
|
||||
mock_broadcast_to_permitted_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_permitted_users.delay.call_args
|
||||
assert args[0][4]["type"] == "application_deleted"
|
||||
assert args[0][4]["application_id"] == database_id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
|
@ -364,12 +361,12 @@ def test_applications_reordered(mock_broadcast_to_channel_group, data_fixture):
|
|||
assert args[0][0] == database.group_id
|
||||
assert args[0][1]["type"] == "applications_reordered"
|
||||
assert args[0][1]["group_id"] == group.id
|
||||
assert args[0][1]["order"] == [database.id]
|
||||
assert args[0][1]["order"] == [generate_hash(database.id)]
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@patch("baserow.ws.signals.broadcast_to_group")
|
||||
def test_duplicate_application(mock_broadcast_to_channel_group, data_fixture):
|
||||
@patch("baserow.ws.signals.broadcast_application_created")
|
||||
def test_duplicate_application(mock_broadcast_to_permitted_users, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
|
@ -379,8 +376,6 @@ def test_duplicate_application(mock_broadcast_to_channel_group, data_fixture):
|
|||
user=user, application=database
|
||||
)
|
||||
|
||||
mock_broadcast_to_channel_group.delay.assert_called_once()
|
||||
args = mock_broadcast_to_channel_group.delay.call_args
|
||||
assert args[0][0] == group.id
|
||||
assert args[0][1]["type"] == "application_created"
|
||||
assert args[0][1]["application"]["id"] == application_clone.id
|
||||
mock_broadcast_to_permitted_users.delay.assert_called_once()
|
||||
args = mock_broadcast_to_permitted_users.delay.call_args
|
||||
assert args[0][0] == application_clone.id
|
||||
|
|
|
@ -11,6 +11,10 @@ For example:
|
|||
|
||||
### New Features
|
||||
* Add various help icons to explain RBAC in the UI [#1318](https://gitlab.com/bramw/baserow/-/issues/1318)
|
||||
* Database and table ids are now hashed in websocket messages to not leak sensitive data [#1374](https://gitlab.com/bramw/baserow/-/issues/1374)
|
||||
* Pressing shift + enter in a selected cell of the grid view creates a new row. [#1208](https://gitlab.com/bramw/baserow/-/issues/1208)
|
||||
* Pressing enter on a selected cell should select the cell below. [#1329](https://gitlab.com/bramw/baserow/-/issues/1329)
|
||||
* Select the primary field in the grid view after creating a new row. [#1217](https://gitlab.com/bramw/baserow/-/issues/1217)
|
||||
* Pressing shift + enter in a selected cell of the grid view creates a new row. [#1208](https://gitlab.com/bramw/baserow/-/issues/1208)
|
||||
* Pressing enter on a selected cell should select the cell below. [#1329](https://gitlab.com/bramw/baserow/-/issues/1329)
|
||||
* Select the primary field in the grid view after creating a new row. [#1217](https://gitlab.com/bramw/baserow/-/issues/1217)
|
||||
|
@ -26,6 +30,7 @@ For example:
|
|||
* Form validator shows the correct message when a field is required. [#1475](https://gitlab.com/bramw/baserow/-/issues/1475)
|
||||
* Prevent errors after migrating and syncing RBAC roles by adding migration to rename NO_ROLE to NO_ACCESS [#1478](https://gitlab.com/bramw/baserow/-/issues/1478)
|
||||
* Fixed issue where 2 admins could lower each others permissions at the same time and lock each other out [#1443](https://gitlab.com/bramw/baserow/-/issues/1443)
|
||||
* Fixed bug preventing groups from being restored when RBAC was enabled [#1485](https://gitlab.com/bramw/baserow/-/issues/1485)
|
||||
|
||||
### Refactors
|
||||
* Replaced deprecated `execCommand('copy')` with `clipboard API` for copy and paste. [#1392](https://gitlab.com/bramw/baserow/-/issues/1392)
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
import pytest
|
||||
from asgiref.sync import sync_to_async
|
||||
from channels.testing import WebsocketCommunicator
|
||||
|
||||
from baserow.api.applications.serializers import get_application_serializer
|
||||
from baserow.config.asgi import application
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.trash.trash_types import (
|
||||
ApplicationTrashableItemType,
|
||||
GroupTrashableItemType,
|
||||
)
|
||||
from baserow_enterprise.role.handler import RoleAssignmentHandler
|
||||
from baserow_enterprise.role.models import Role
|
||||
from tests.baserow.contrib.database.utils import get_message, received_message
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def enable_enterprise_and_roles_for_all_tests_here(enable_enterprise, synced_roles):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def use_async_event_loop_here(async_event_loop):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_database_updated_message_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, database
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(CoreHandler().update_application)(
|
||||
user=user, application=database, name="Test"
|
||||
)
|
||||
|
||||
assert await received_message(communicator, "application_updated") is False
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_database_deleted_message_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, database
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(CoreHandler().delete_application)(
|
||||
user=user, application=database
|
||||
)
|
||||
|
||||
assert await received_message(communicator, "application_deleted") is False
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_database_created_message_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, database
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(CoreHandler().delete_application)(
|
||||
user=user, application=database
|
||||
)
|
||||
|
||||
await sync_to_async(TrashHandler().restore_item)(
|
||||
user, ApplicationTrashableItemType.type, database.id
|
||||
)
|
||||
|
||||
assert await received_message(communicator, "application_created") is False
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_group_restored_applications_arent_leaked(data_fixture):
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user_excluded)
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
database_excluded = data_fixture.create_database_application(group=group)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded,
|
||||
group,
|
||||
role=no_access_role,
|
||||
scope=database_excluded.application_ptr,
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(TrashHandler.trash)(
|
||||
user_excluded, group, None, trash_item=group
|
||||
)
|
||||
await sync_to_async(TrashHandler.restore_item)(
|
||||
user_excluded, GroupTrashableItemType.type, group.id
|
||||
)
|
||||
|
||||
group_restored_message = await get_message(communicator, "group_restored")
|
||||
assert group_restored_message is not None
|
||||
assert group_restored_message["applications"] == [
|
||||
get_application_serializer(database).data
|
||||
]
|
||||
await communicator.disconnect()
|
|
@ -0,0 +1,184 @@
|
|||
import pytest
|
||||
from asgiref.sync import sync_to_async
|
||||
from channels.testing import WebsocketCommunicator
|
||||
|
||||
from baserow.config.asgi import application
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.contrib.database.trash.trash_types import TableTrashableItemType
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.trash.trash_types import (
|
||||
ApplicationTrashableItemType,
|
||||
GroupTrashableItemType,
|
||||
)
|
||||
from baserow_enterprise.role.handler import RoleAssignmentHandler
|
||||
from baserow_enterprise.role.models import Role
|
||||
from tests.baserow.contrib.database.utils import get_message, received_message
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def enable_enterprise_and_roles_for_all_tests_here(enable_enterprise, synced_roles):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def use_async_event_loop_here(async_event_loop):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_table_updated_message_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, table
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(TableHandler().update_table)(
|
||||
user=user, table=table, name="Test"
|
||||
)
|
||||
|
||||
assert await received_message(communicator, "table_updated") is False
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_table_deleted_message_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, table
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(TableHandler().delete_table)(user, table)
|
||||
|
||||
assert await received_message(communicator, "table_deleted") is False
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_table_created_message_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, table
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(TableHandler().delete_table)(user, table)
|
||||
await sync_to_async(TrashHandler().restore_item)(
|
||||
user, TableTrashableItemType.type, table.id
|
||||
)
|
||||
|
||||
assert await received_message(communicator, "table_created") is False
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_group_restored_tables_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, table
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(CoreHandler().delete_group)(user, group)
|
||||
await sync_to_async(TrashHandler().restore_item)(
|
||||
user, GroupTrashableItemType.type, group.id
|
||||
)
|
||||
|
||||
application_created_message = await get_message(communicator, "group_restored")
|
||||
assert application_created_message["applications"][0]["tables"] == []
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@pytest.mark.asyncio
|
||||
async def test_database_restored_tables_not_leaking(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_excluded, token = data_fixture.create_user_and_token()
|
||||
group = data_fixture.create_group(user=user, members=[user_excluded])
|
||||
database = data_fixture.create_database_application(group=group)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
no_access_role = Role.objects.get(uid="NO_ACCESS")
|
||||
|
||||
await sync_to_async(RoleAssignmentHandler().assign_role)(
|
||||
user_excluded, group, no_access_role, table
|
||||
)
|
||||
|
||||
# Establish websocket connection and subscribe to table
|
||||
communicator = WebsocketCommunicator(
|
||||
application,
|
||||
f"ws/core/?jwt_token={token}",
|
||||
headers=[(b"origin", b"http://localhost")],
|
||||
)
|
||||
await communicator.connect()
|
||||
await communicator.receive_json_from()
|
||||
|
||||
await sync_to_async(CoreHandler().delete_application)(user, database)
|
||||
await sync_to_async(TrashHandler().restore_item)(
|
||||
user, ApplicationTrashableItemType.type, database.id
|
||||
)
|
||||
|
||||
application_created_message = await get_message(communicator, "application_created")
|
||||
assert application_created_message["application"]["tables"] == []
|
||||
await communicator.disconnect()
|
|
@ -306,7 +306,11 @@ export class RealTimeHandler {
|
|||
this.registerEvent('applications_reordered', ({ store }, data) => {
|
||||
const group = store.getters['group/get'](data.group_id)
|
||||
if (group !== undefined) {
|
||||
store.commit('application/ORDER_ITEMS', { group, order: data.order })
|
||||
store.commit('application/ORDER_ITEMS', {
|
||||
group,
|
||||
order: data.order,
|
||||
isHashed: true,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ import { StoreItemLookupError } from '@baserow/modules/core/errors'
|
|||
import ApplicationService from '@baserow/modules/core/services/application'
|
||||
import { clone } from '@baserow/modules/core/utils/object'
|
||||
import { CORE_ACTION_SCOPES } from '@baserow/modules/core/utils/undoRedoConstants'
|
||||
import { generateHash } from '@baserow/modules/core/utils/hashing'
|
||||
|
||||
export function populateApplication(application, registry) {
|
||||
const type = registry.get('application', application.type)
|
||||
|
@ -46,11 +47,12 @@ export const mutations = {
|
|||
Object.assign(state.items[index], state.items[index], values)
|
||||
}
|
||||
},
|
||||
ORDER_ITEMS(state, { group, order }) {
|
||||
ORDER_ITEMS(state, { group, order, isHashed = false }) {
|
||||
state.items
|
||||
.filter((item) => item.group.id === group.id)
|
||||
.forEach((item) => {
|
||||
const index = order.findIndex((value) => value === item.id)
|
||||
const itemId = isHashed ? generateHash(item.id) : item.id
|
||||
const index = order.findIndex((value) => value === itemId)
|
||||
item.order = index === -1 ? undefined : index + 1
|
||||
})
|
||||
},
|
||||
|
@ -209,13 +211,16 @@ export const actions = {
|
|||
/**
|
||||
* Updates the order of all the applications in a group.
|
||||
*/
|
||||
async order({ commit, getters }, { group, order, oldOrder }) {
|
||||
commit('ORDER_ITEMS', { group, order })
|
||||
async order(
|
||||
{ commit, getters },
|
||||
{ group, order, oldOrder, isHashed = false }
|
||||
) {
|
||||
commit('ORDER_ITEMS', { group, order, isHashed })
|
||||
|
||||
try {
|
||||
await ApplicationService(this.$client).order(group.id, order)
|
||||
} catch (error) {
|
||||
commit('ORDER_ITEMS', { group, order: oldOrder })
|
||||
commit('ORDER_ITEMS', { group, order: oldOrder, isHashed })
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
|
5
web-frontend/modules/core/utils/hashing.js
Normal file
5
web-frontend/modules/core/utils/hashing.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
import crypto from 'crypto'
|
||||
|
||||
export function generateHash(value) {
|
||||
return crypto.createHash('sha256').update(value.toString()).digest('hex')
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
import { clone } from '@baserow/modules/core/utils/object'
|
||||
import { anyFieldsNeedFetch } from '@baserow/modules/database/store/field'
|
||||
import { generateHash } from '@baserow/modules/core/utils/hashing'
|
||||
|
||||
/**
|
||||
* Registers the real time events related to the database module. When a message comes
|
||||
|
@ -34,9 +35,15 @@ export const registerRealtimeEvents = (realtime) => {
|
|||
})
|
||||
|
||||
realtime.registerEvent('tables_reordered', ({ store, app }, data) => {
|
||||
const database = store.getters['application/get'](data.database_id)
|
||||
const database = store.getters['application/getAll'].find(
|
||||
(application) => generateHash(application.id) === data.database_id
|
||||
)
|
||||
if (database !== undefined) {
|
||||
store.commit('table/ORDER_TABLES', { database, order: data.order })
|
||||
store.commit('table/ORDER_TABLES', {
|
||||
database,
|
||||
order: data.order,
|
||||
isHashed: true,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import { StoreItemLookupError } from '@baserow/modules/core/errors'
|
|||
import TableService from '@baserow/modules/database/services/table'
|
||||
import { DatabaseApplicationType } from '@baserow/modules/database/applicationTypes'
|
||||
import { DATABASE_ACTION_SCOPES } from '@baserow/modules/database/utils/undoRedoConstants'
|
||||
import { generateHash } from '@baserow/modules/core/utils/hashing'
|
||||
|
||||
export function populateTable(table) {
|
||||
table._ = {
|
||||
|
@ -28,9 +29,10 @@ export const mutations = {
|
|||
UPDATE_ITEM(state, { table, values }) {
|
||||
Object.assign(table, table, values)
|
||||
},
|
||||
ORDER_TABLES(state, { database, order }) {
|
||||
ORDER_TABLES(state, { database, order, isHashed = false }) {
|
||||
database.tables.forEach((table) => {
|
||||
const index = order.findIndex((value) => value === table.id)
|
||||
const tableId = isHashed ? generateHash(table.id) : table.id
|
||||
const index = order.findIndex((value) => value === tableId)
|
||||
table.order = index === -1 ? 0 : index + 1
|
||||
})
|
||||
},
|
||||
|
@ -148,13 +150,16 @@ export const actions = {
|
|||
/**
|
||||
* Updates the order of all the tables in a database.
|
||||
*/
|
||||
async order({ commit, getters }, { database, order, oldOrder }) {
|
||||
commit('ORDER_TABLES', { database, order })
|
||||
async order(
|
||||
{ commit, getters },
|
||||
{ database, order, oldOrder, isHashed = false }
|
||||
) {
|
||||
commit('ORDER_TABLES', { database, order, isHashed })
|
||||
|
||||
try {
|
||||
await TableService(this.$client).order(database.id, order)
|
||||
} catch (error) {
|
||||
commit('ORDER_TABLES', { database, order: oldOrder })
|
||||
commit('ORDER_TABLES', { database, order: oldOrder, isHashed })
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue