1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-11 07:51:20 +00:00

Merge branch '151-real-time-collaboration' into 'develop'

Resolve "Real time collaboration"

Closes 

See merge request 
This commit is contained in:
Bram Wiepjes 2021-01-24 14:53:06 +00:00
commit de58eeef6f
100 changed files with 3288 additions and 156 deletions
backend
changelog.mddocker-compose.demo.ymldocker-compose.yml
docs
getting-started
guides/installation
index.md
start_osx.sh
web-frontend/modules

View file

@ -15,4 +15,5 @@ RUN apt-get -y install gnupg2
RUN make install-dependencies
ENTRYPOINT python src/baserow/manage.py migrate && \
gunicorn --workers=3 -b 0.0.0.0:8000 baserow.config.wsgi
celery -A baserow worker -l INFO --detach && \
gunicorn --workers=3 -b 0.0.0.0:8000 -k uvicorn.workers.UvicornWorker baserow.config.asgi:application

View file

@ -7,8 +7,12 @@ mysqlclient==1.4.6
ipython==7.13.0
Faker==4.0.2
gunicorn==20.0.4
uvicorn[standard]==0.13.3
django-mjml==0.9.0
requests==2.25.0
itsdangerous==1.1.0
drf-spectacular==0.9.12
Pillow==8.0.1
channels==3.0.3
channels-redis==3.2.0
celery[redis]==5.0.5

View file

@ -1,5 +1,9 @@
flake8==3.7.9
pytest-django>=3.5.0
pytest-env==0.6.2
pytest-asyncio==0.14.0
pytest-ordering==0.6
freezegun==0.3.15
responses==0.12.0
watchdog==1.0.2
argh==0.26.2

View file

@ -0,0 +1,4 @@
from baserow.config.celery import app as celery_app
__all__ = ['celery_app']

View file

@ -42,6 +42,9 @@ class JSONWebTokenAuthentication(JWTJSONWebTokenAuthentication):
user = self.authenticate_credentials(payload)
# @TODO this should actually somehow be moved to the ws app.
user.web_socket_id = request.headers.get('WebSocketId')
return user, jwt_value

View file

@ -0,0 +1,15 @@
import django
from channels.http import AsgiHandler
from channels.routing import ProtocolTypeRouter
from baserow.ws.routers import websocket_router
django.setup()
application = ProtocolTypeRouter({
'http': AsgiHandler(),
'websocket': websocket_router
})

View file

@ -0,0 +1,6 @@
from celery import Celery
app = Celery('baserow')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()

View file

@ -2,6 +2,8 @@ import os
import datetime
from urllib.parse import urlparse, urljoin
from corsheaders.defaults import default_headers
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@ -22,11 +24,13 @@ INSTALLED_APPS = [
'rest_framework',
'corsheaders',
'channels',
'mjml',
'drf_spectacular',
'baserow.core',
'baserow.api',
'baserow.ws',
'baserow.contrib.database'
]
@ -60,6 +64,27 @@ TEMPLATES = [
]
WSGI_APPLICATION = 'baserow.config.wsgi.application'
ASGI_APPLICATION = 'baserow.config.asgi.application'
REDIS_HOST = os.getenv('REDIS_HOST', 'redis')
REDIS_PORT = os.getenv('REDIS_PORT', '6379')
REDIS_USERNAME = os.getenv('REDIS_USER', '')
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
REDIS_PROTOCOL = os.getenv('REDIS_PROTOCOL', 'redis')
REDIS_URL = (
f'{REDIS_PROTOCOL}://{REDIS_USERNAME}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
)
CELERY_BROKER_URL = REDIS_URL
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [REDIS_URL],
},
},
}
# Database
@ -134,6 +159,10 @@ REST_FRAMEWORK = {
}
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_HEADERS = list(default_headers) + [
'WebSocketId',
]
JWT_AUTH = {
'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=60 * 60),

View file

@ -1,6 +1,16 @@
from .base import * # noqa: F403, F401
CELERY_BROKER_BACKEND = 'memory'
CELERY_TASK_ALWAYS_EAGER = True
CELERY_TASK_EAGER_PROPAGATES = True
CHANNEL_LAYERS = {
"default": {
"BACKEND": "channels.layers.InMemoryChannelLayer"
}
}
USER_FILES_DIRECTORY = 'user_files'
USER_THUMBNAILS_DIRECTORY = 'thumbnails'
USER_THUMBNAILS = {'tiny': [21, 21]}

View file

@ -17,11 +17,10 @@ class FieldSerializer(serializers.ModelSerializer):
class Meta:
model = Field
fields = ('id', 'name', 'order', 'type', 'primary')
fields = ('id', 'table_id', 'name', 'order', 'type', 'primary')
extra_kwargs = {
'id': {
'read_only': True
}
'id': {'read_only': True},
'table_id': {'read_only': True},
}
@extend_schema_field(OpenApiTypes.STR)

View file

@ -20,7 +20,8 @@ class RowSerializer(serializers.ModelSerializer):
}
def get_row_serializer_class(model, base_class=None, is_response=False):
def get_row_serializer_class(model, base_class=None, is_response=False,
field_ids=None):
"""
Generates a Django rest framework model serializer based on the available fields
that belong to this model. For each table field, used to generate this serializer,
@ -36,18 +37,27 @@ def get_row_serializer_class(model, base_class=None, is_response=False):
instead of handling input data. If that is the case other serializer fields
might be used depending on the field type.
:type is_response: bool
:param field_ids: If provided only the field ids in the list will be included in
the serializer. By default all the fields of the model are going to be
included. Note that the field id must exist in the model in order to work.
:type field_ids: list or None
:return: The generated serializer.
:rtype: ModelSerializer
"""
field_objects = model._field_objects
field_names = [field['name'] for field in field_objects.values()]
field_names = [
field['name']
for field in field_objects.values()
if field_ids is None or field['field'].id in field_ids
]
field_overrides = {
field['name']:
field['type'].get_response_serializer_field(field['field'])
if is_response else
field['type'].get_serializer_field(field['field'])
for field in field_objects.values()
if field_ids is None or field['field'].id in field_ids
}
return get_serializer_class(model, field_names, field_overrides, base_class)

View file

@ -429,14 +429,9 @@ class RowView(APIView):
table = TableHandler().get_table(request.user, table_id)
TokenHandler().check_table_permissions(request, 'update', table, False)
# Small side effect of generating the model for only the fields that need to
# change is that the response it not going to contain the other fields. It is
# however much faster because it doesn't need to get the specific version of
# all the field objects.
field_ids = RowHandler().extract_field_ids_from_dict(request.data)
model = table.get_model(field_ids=field_ids)
validation_serializer = get_row_serializer_class(model)
model = table.get_model()
validation_serializer = get_row_serializer_class(model, field_ids=field_ids)
data = validate_data(validation_serializer, request.data)
row = RowHandler().update_row(request.user, table, row_id, data, model)

View file

@ -6,11 +6,10 @@ from baserow.contrib.database.table.models import Table
class TableSerializer(serializers.ModelSerializer):
class Meta:
model = Table
fields = ('id', 'name', 'order',)
fields = ('id', 'name', 'order', 'database_id',)
extra_kwargs = {
'id': {
'read_only': True
},
'id': {'read_only': True},
'database_id': {'read_only': True},
'order': {
'help_text': 'Lowest first.'
}

View file

@ -252,5 +252,9 @@ class GridViewView(APIView):
handler = ViewHandler()
view = handler.get_view(request.user, view_id, GridView)
handler.update_grid_view_field_options(view, data['field_options'])
handler.update_grid_view_field_options(
request.user,
view,
data['field_options']
)
return Response(GridViewSerializer(view).data)

View file

@ -17,9 +17,7 @@ class ViewFilterSerializer(serializers.ModelSerializer):
model = ViewFilter
fields = ('id', 'view', 'field', 'type', 'value')
extra_kwargs = {
'id': {
'read_only': True
}
'id': {'read_only': True}
}
@ -58,9 +56,7 @@ class ViewSortSerializer(serializers.ModelSerializer):
model = ViewSort
fields = ('id', 'view', 'field', 'order')
extra_kwargs = {
'id': {
'read_only': True
}
'id': {'read_only': True}
}
@ -91,12 +87,11 @@ class ViewSerializer(serializers.ModelSerializer):
class Meta:
model = View
fields = ('id', 'name', 'order', 'type', 'table', 'filter_type', 'filters',
'sortings', 'filters_disabled')
fields = ('id', 'table_id', 'name', 'order', 'type', 'table', 'filter_type',
'filters', 'sortings', 'filters_disabled')
extra_kwargs = {
'id': {
'read_only': True
}
'id': {'read_only': True},
'table_id': {'read_only': True}
}
def __init__(self, *args, **kwargs):

View file

@ -1,6 +1,7 @@
from django.apps import AppConfig
from baserow.core.registries import plugin_registry, application_type_registry
from baserow.ws.registries import page_registry
from .views.registries import view_type_registry, view_filter_type_registry
from .fields.registries import field_type_registry, field_converter_registry
@ -89,3 +90,10 @@ class DatabaseConfig(AppConfig):
from .application_types import DatabaseApplicationType
application_type_registry.register(DatabaseApplicationType())
from .ws.pages import TablePageType
page_registry.register(TablePageType())
# The signals must always be imported last because they use the registries
# which need to be filled first.
import baserow.contrib.database.ws.signals # noqa: F403, F401

View file

@ -3,6 +3,15 @@ from django.db.models.fields.related_descriptors import ForwardManyToOneDescript
class SingleSelectForwardManyToOneDescriptor(ForwardManyToOneDescriptor):
def get_queryset(self, **hints):
"""
We specifically want to return a new query set without the provided hints
because the related table could be in another database and that could fail
otherwise.
"""
return self.field.remote_field.model.objects.all()
def get_object(self, instance):
"""
Tries to fetch the reference object, but if it fails because it doesn't exist,

View file

@ -16,6 +16,7 @@ from .exceptions import (
)
from .registries import field_type_registry, field_converter_registry
from .models import Field, SelectOption
from .signals import field_created, field_updated, field_deleted
logger = logging.getLogger(__name__)
@ -126,6 +127,9 @@ class FieldHandler:
field_type.after_create(instance, to_model, user, connection, before)
field_created.send(self, field=instance, user=user,
type_name=type_name)
return instance
def update_field(self, user, field, new_type_name=None, **kwargs):
@ -261,6 +265,8 @@ class FieldHandler:
field_type.after_update(old_field, field, from_model, to_model, user,
connection, altered_column, before)
field_updated.send(self, field=field, user=user)
return field
def delete_field(self, user, field):
@ -298,12 +304,15 @@ class FieldHandler:
model_field = from_model._meta.get_field(field.db_column)
schema_editor.remove_field(from_model, model_field)
field_id = field.id
field.delete()
# After the field is deleted we are going to to call the after_delete method of
# the field type because some instance cleanup might need to happen.
field_type.after_delete(field, from_model, user, connection)
field_deleted.send(self, field_id=field_id, field=field, user=user)
def update_field_select_options(self, user, field, select_options):
"""
Brings the select options in the desired provided state in a query efficient

View file

@ -0,0 +1,6 @@
from django.dispatch import Signal
field_created = Signal()
field_updated = Signal()
field_deleted = Signal()

View file

@ -46,6 +46,7 @@ class DatabasePlugin(Plugin):
user, table, BooleanFieldType.type, name='Active'
)
view_handler.update_grid_view_field_options(
user,
customers_view,
{
notes_field.id: {'width': 400},
@ -87,6 +88,7 @@ class DatabasePlugin(Plugin):
model.objects.create(name='Amazon', active=False, started=date(2018, 1, 1),
order=3)
view_handler.update_grid_view_field_options(
user,
projects_view,
{active_field.id: {'width': 100}},
fields=[active_field]

View file

@ -11,6 +11,7 @@ from baserow.core.exceptions import UserNotInGroupError
from baserow.contrib.database.fields.models import Field
from .exceptions import RowDoesNotExist
from .signals import row_created, row_updated, row_deleted
class RowHandler:
@ -234,6 +235,9 @@ class RowHandler:
for name, value in manytomany_values.items():
getattr(instance, name).set(value)
row_created.send(self, row=instance, before=before, user=user, table=table,
model=model)
return instance
def update_row(self, user, table, row_id, values, model=None):
@ -262,8 +266,7 @@ class RowHandler:
raise UserNotInGroupError(user, group)
if not model:
field_ids = self.extract_field_ids_from_dict(values)
model = table.get_model(field_ids=field_ids)
model = table.get_model()
# Because it is possible to have a different database for the user tables we
# need to start another transaction here, otherwise it is not possible to use
@ -285,6 +288,8 @@ class RowHandler:
for name, value in manytomany_values.items():
getattr(row, name).set(value)
row_updated.send(self, row=row, user=user, table=table, model=model)
return row
def delete_row(self, user, table, row_id):
@ -312,4 +317,8 @@ class RowHandler:
except model.DoesNotExist:
raise RowDoesNotExist(f'The row with id {row_id} does not exist.')
row_id = row.id
row.delete()
row_deleted.send(self, row_id=row_id, row=row, user=user, table=table,
model=model)

View file

@ -0,0 +1,6 @@
from django.dispatch import Signal
row_created = Signal()
row_updated = Signal()
row_deleted = Signal()

View file

@ -15,6 +15,7 @@ from .models import Table
from .exceptions import (
TableDoesNotExist, InvalidInitialTableData, InitialTableDataLimitExceeded
)
from .signals import table_created, table_updated, table_deleted
class TableHandler:
@ -114,6 +115,8 @@ class TableHandler:
elif fill_example:
self.fill_example_table_data(user, table)
table_created.send(self, table=table, user=user)
return table
def normalize_initial_table_data(self, data, first_row_header):
@ -212,7 +215,8 @@ class TableHandler:
active.id: {'width': 100}
}
fields = [notes, active]
view_handler.update_grid_view_field_options(view, field_options, fields=fields)
view_handler.update_grid_view_field_options(user, view, field_options,
fields=fields)
model = table.get_model(attribute_names=True)
model.objects.create(name='Tesla', active=True, order=1)
@ -243,6 +247,8 @@ class TableHandler:
table = set_allowed_attrs(kwargs, ['name'], table)
table.save()
table_updated.send(self, table=table, user=user)
return table
def delete_table(self, user, table):
@ -263,6 +269,8 @@ class TableHandler:
if not table.database.group.has_user(user):
raise UserNotInGroupError(user, table.database.group)
table_id = table.id
# Delete the table schema from the database.
connection = connections[settings.USER_TABLE_DATABASE]
with connection.schema_editor() as schema_editor:
@ -270,3 +278,5 @@ class TableHandler:
schema_editor.delete_model(model)
table.delete()
table_deleted.send(self, table_id=table_id, table=table, user=user)

View file

@ -0,0 +1,6 @@
from django.dispatch import Signal
table_created = Signal()
table_updated = Signal()
table_deleted = Signal()

View file

@ -15,6 +15,11 @@ from .registries import view_type_registry, view_filter_type_registry
from .models import (
View, GridViewFieldOptions, ViewFilter, ViewSort, FILTER_TYPE_AND, FILTER_TYPE_OR
)
from .signals import (
view_created, view_updated, view_deleted, view_filter_created, view_filter_updated,
view_filter_deleted, view_sort_created, view_sort_updated, view_sort_deleted,
grid_view_field_options_updated
)
class ViewHandler:
@ -95,6 +100,9 @@ class ViewHandler:
instance = model_class.objects.create(table=table, order=last_order,
**view_values)
view_created.send(self, view=instance, user=user,
type_name=type_name)
return instance
def update_view(self, user, view, **kwargs):
@ -129,6 +137,8 @@ class ViewHandler:
view = set_allowed_attrs(kwargs, allowed_fields, view)
view.save()
view_updated.send(self, view=view, user=user)
return view
def delete_view(self, user, view):
@ -150,13 +160,19 @@ class ViewHandler:
if not group.has_user(user):
raise UserNotInGroupError(user, group)
view_id = view.id
view.delete()
def update_grid_view_field_options(self, grid_view, field_options, fields=None):
view_deleted.send(self, view_id=view_id, view=view, user=user)
def update_grid_view_field_options(self, user, grid_view, field_options,
fields=None):
"""
Updates the field options with the provided values if the field id exists in
the table related to the grid view.
:param user: The user on whose behalf the request is made.
:type user: User
:param grid_view: The grid view for which the field options need to be updated.
:type grid_view: Model
:param field_options: A dict with the field ids as the key and a dict
@ -181,6 +197,8 @@ class ViewHandler:
grid_view=grid_view, field_id=field_id, defaults=options
)
grid_view_field_options_updated.send(self, grid_view=grid_view, user=user)
def field_type_changed(self, field):
"""
This method is called by the FieldHandler when the field type of a field has
@ -348,13 +366,17 @@ class ViewHandler:
raise FieldNotInTable(f'The field {field.pk} does not belong to table '
f'{view.table.id}.')
return ViewFilter.objects.create(
view_filter = ViewFilter.objects.create(
view=view,
field=field,
type=view_filter_type.type,
value=value
)
view_filter_created.send(self, view_filter=view_filter, user=user)
return view_filter
def update_filter(self, user, view_filter, **kwargs):
"""
Updates the values of an existing view filter.
@ -405,6 +427,8 @@ class ViewHandler:
view_filter.type = type_name
view_filter.save()
view_filter_updated.send(self, view_filter=view_filter, user=user)
return view_filter
def delete_filter(self, user, view_filter):
@ -422,8 +446,12 @@ class ViewHandler:
if not group.has_user(user):
raise UserNotInGroupError(user, group)
view_filter_id = view_filter.id
view_filter.delete()
view_filter_deleted.send(self, view_filter_id=view_filter_id,
view_filter=view_filter, user=user)
def apply_sorting(self, view, queryset):
"""
Applies the view's sorting to the given queryset. The first sort, which for now
@ -575,12 +603,16 @@ class ViewHandler:
raise ViewSortFieldAlreadyExist(f'A sort with the field {field.pk} '
f'already exists.')
return ViewSort.objects.create(
view_sort = ViewSort.objects.create(
view=view,
field=field,
order=order
)
view_sort_created.send(self, view_sort=view_sort, user=user)
return view_sort
def update_sort(self, user, view_sort, **kwargs):
"""
Updates the values of an existing view sort.
@ -636,6 +668,8 @@ class ViewHandler:
view_sort.order = order
view_sort.save()
view_sort_updated.send(self, view_sort=view_sort, user=user)
return view_sort
def delete_sort(self, user, view_sort):
@ -653,4 +687,8 @@ class ViewHandler:
if not group.has_user(user):
raise UserNotInGroupError(user, group)
view_sort_id = view_sort.id
view_sort.delete()
view_sort_deleted.send(self, view_sort_id=view_sort_id, view_sort=view_sort,
user=user)

View file

@ -0,0 +1,16 @@
from django.dispatch import Signal
view_created = Signal()
view_updated = Signal()
view_deleted = Signal()
view_filter_created = Signal()
view_filter_updated = Signal()
view_filter_deleted = Signal()
view_sort_created = Signal()
view_sort_updated = Signal()
view_sort_deleted = Signal()
grid_view_field_options_updated = Signal()

View file

@ -0,0 +1,52 @@
from django.dispatch import receiver
from django.db import transaction
from baserow.ws.registries import page_registry
from baserow.contrib.database.fields import signals as field_signals
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.api.fields.serializers import FieldSerializer
@receiver(field_signals.field_created)
def field_created(sender, field, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'field_created',
'field': field_type_registry.get_serializer(
field,
FieldSerializer
).data
},
getattr(user, 'web_socket_id', None),
table_id=field.table_id
))
@receiver(field_signals.field_updated)
def field_updated(sender, field, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'field_updated',
'field_id': field.id,
'field': field_type_registry.get_serializer(field, FieldSerializer).data
},
getattr(user, 'web_socket_id', None),
table_id=field.table_id
))
@receiver(field_signals.field_deleted)
def field_deleted(sender, field_id, field, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'field_deleted',
'table_id': field.table_id,
'field_id': field_id
},
getattr(user, 'web_socket_id', None),
table_id=field.table_id
))

View file

@ -0,0 +1,30 @@
from baserow.ws.registries import PageType
from baserow.core.exceptions import UserNotInGroupError
from baserow.contrib.database.table.handler import TableHandler
from baserow.contrib.database.table.exceptions import TableDoesNotExist
class TablePageType(PageType):
type = 'table'
parameters = ['table_id']
def can_add(self, user, web_socket_id, table_id, **kwargs):
"""
The user should only have access to this page if the table exists and if he
has access to the table.
"""
if not table_id:
return False
try:
handler = TableHandler()
handler.get_table(user, table_id)
except (UserNotInGroupError, TableDoesNotExist):
return False
return True
def get_group_name(self, table_id, **kwargs):
return f'table-{table_id}'

View file

@ -0,0 +1,60 @@
from django.dispatch import receiver
from django.db import transaction
from baserow.ws.registries import page_registry
from baserow.contrib.database.rows import signals as row_signals
from baserow.contrib.database.api.rows.serializers import (
get_row_serializer_class, RowSerializer
)
@receiver(row_signals.row_created)
def row_created(sender, row, before, user, table, model, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'row_created',
'table_id': table.id,
'row': get_row_serializer_class(
model,
RowSerializer,
is_response=True
)(row).data,
'before_row_id': before.id if before else None
},
getattr(user, 'web_socket_id', None),
table_id=table.id
))
@receiver(row_signals.row_updated)
def row_updated(sender, row, user, table, model, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'row_updated',
'table_id': table.id,
'row': get_row_serializer_class(
model,
RowSerializer,
is_response=True
)(row).data
},
getattr(user, 'web_socket_id', None),
table_id=table.id
))
@receiver(row_signals.row_deleted)
def row_deleted(sender, row_id, row, user, table, model, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'row_deleted',
'table_id': table.id,
'row_id': row_id
},
getattr(user, 'web_socket_id', None),
table_id=table.id
))

View file

@ -0,0 +1,12 @@
from .table.signals import table_created, table_updated, table_deleted
from .views.signals import view_created, view_updated, view_deleted
from .rows.signals import row_created, row_updated, row_deleted
from .fields.signals import field_created, field_updated, field_deleted
__all__ = [
'table_created', 'table_updated', 'table_deleted',
'view_created', 'view_updated', 'view_deleted',
'row_created', 'row_updated', 'row_deleted',
'field_created', 'field_updated', 'field_deleted'
]

View file

@ -0,0 +1,45 @@
from django.dispatch import receiver
from django.db import transaction
from baserow.contrib.database.table import signals as table_signals
from baserow.contrib.database.api.tables.serializers import TableSerializer
from baserow.ws.tasks import broadcast_to_group
@receiver(table_signals.table_created)
def table_created(sender, table, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
table.database.group_id,
{
'type': 'table_created',
'table': TableSerializer(table).data
},
getattr(user, 'web_socket_id', None)
))
@receiver(table_signals.table_updated)
def table_updated(sender, table, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
table.database.group_id,
{
'type': 'table_updated',
'table_id': table.id,
'table': TableSerializer(table).data
},
getattr(user, 'web_socket_id', None)
))
@receiver(table_signals.table_deleted)
def table_deleted(sender, table_id, table, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
table.database.group_id,
{
'type': 'table_deleted',
'database_id': table.database_id,
'table_id': table_id
},
getattr(user, 'web_socket_id', None)
))

View file

@ -0,0 +1,163 @@
from django.dispatch import receiver
from django.db import transaction
from baserow.ws.registries import page_registry
from baserow.contrib.database.views import signals as view_signals
from baserow.contrib.database.views.registries import view_type_registry
from baserow.contrib.database.api.views.serializers import (
ViewSerializer, ViewFilterSerializer, ViewSortSerializer
)
from baserow.contrib.database.api.views.grid.serializers import GridViewSerializer
@receiver(view_signals.view_created)
def view_created(sender, view, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_created',
'view': view_type_registry.get_serializer(
view,
ViewSerializer,
filters=True,
sortings=True
).data
},
getattr(user, 'web_socket_id', None),
table_id=view.table_id
))
@receiver(view_signals.view_updated)
def view_updated(sender, view, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_updated',
'view_id': view.id,
'view': view_type_registry.get_serializer(
view,
ViewSerializer,
# We do not want to broad cast the filters and sortings every time
# the view changes. There are separate views and handlers for them
# each will broad cast their own message.
filters=False,
sortings=False
).data
},
getattr(user, 'web_socket_id', None),
table_id=view.table_id
))
@receiver(view_signals.view_deleted)
def view_deleted(sender, view_id, view, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_deleted',
'table_id': view.table_id,
'view_id': view_id
},
getattr(user, 'web_socket_id', None),
table_id=view.table_id
))
@receiver(view_signals.view_filter_created)
def view_filter_created(sender, view_filter, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_filter_created',
'view_filter': ViewFilterSerializer(view_filter).data
},
getattr(user, 'web_socket_id', None),
table_id=view_filter.view.table_id
))
@receiver(view_signals.view_filter_updated)
def view_filter_updated(sender, view_filter, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_filter_updated',
'view_filter_id': view_filter.id,
'view_filter': ViewFilterSerializer(view_filter).data
},
getattr(user, 'web_socket_id', None),
table_id=view_filter.view.table_id
))
@receiver(view_signals.view_filter_deleted)
def view_filter_deleted(sender, view_filter_id, view_filter, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_filter_deleted',
'view_id': view_filter.view_id,
'view_filter_id': view_filter_id
},
getattr(user, 'web_socket_id', None),
table_id=view_filter.view.table_id
))
@receiver(view_signals.view_sort_created)
def view_sort_created(sender, view_sort, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_sort_created',
'view_sort': ViewSortSerializer(view_sort).data
},
getattr(user, 'web_socket_id', None),
table_id=view_sort.view.table_id
))
@receiver(view_signals.view_sort_updated)
def view_sort_updated(sender, view_sort, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_sort_updated',
'view_sort_id': view_sort.id,
'view_sort': ViewSortSerializer(view_sort).data
},
getattr(user, 'web_socket_id', None),
table_id=view_sort.view.table_id
))
@receiver(view_signals.view_sort_deleted)
def view_sort_deleted(sender, view_sort_id, view_sort, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'view_sort_deleted',
'view_id': view_sort.view_id,
'view_sort_id': view_sort_id
},
getattr(user, 'web_socket_id', None),
table_id=view_sort.view.table_id
))
@receiver(view_signals.grid_view_field_options_updated)
def grid_view_field_options_updated(sender, grid_view, user, **kwargs):
table_page_type = page_registry.get('table')
transaction.on_commit(lambda: table_page_type.broadcast(
{
'type': 'grid_view_field_options_updated',
'grid_view_id': grid_view.id,
'grid_view_field_options': GridViewSerializer(
grid_view
).data['field_options']
},
getattr(user, 'web_socket_id', None),
table_id=grid_view.table_id
))

View file

@ -3,6 +3,10 @@ from .exceptions import UserNotInGroupError
from .utils import extract_allowed, set_allowed_attrs
from .registries import application_type_registry
from .exceptions import GroupDoesNotExist, ApplicationDoesNotExist
from .signals import (
application_created, application_updated, application_deleted, group_created,
group_updated, group_deleted
)
class CoreHandler:
@ -83,6 +87,8 @@ class CoreHandler:
last_order = GroupUser.get_last_order(user)
group_user = GroupUser.objects.create(group=group, user=user, order=last_order)
group_created.send(self, group=group, user=user)
return group_user
def update_group(self, user, group, **kwargs):
@ -108,6 +114,8 @@ class CoreHandler:
group = set_allowed_attrs(kwargs, ['name'], group)
group.save()
group_updated.send(self, group=group, user=user)
return group
def delete_group(self, user, group):
@ -128,6 +136,11 @@ class CoreHandler:
if not group.has_user(user):
raise UserNotInGroupError(user, group)
# Load the group users before the group is deleted so that we can pass those
# along with the signal.
group_id = group.id
group_users = list(group.users.all())
# Select all the applications so we can delete them via the handler which is
# needed in order to call the pre_delete method for each application.
applications = group.application_set.all().select_related('group')
@ -136,6 +149,9 @@ class CoreHandler:
group.delete()
group_deleted.send(self, group_id=group_id, group=group,
group_users=group_users, user=user)
def order_groups(self, user, group_ids):
"""
Changes the order of groups for a user.
@ -217,6 +233,9 @@ class CoreHandler:
instance = model.objects.create(group=group, order=last_order,
**application_values)
application_created.send(self, application=instance, user=user,
type_name=type_name)
return instance
def update_application(self, user, application, **kwargs):
@ -244,6 +263,8 @@ class CoreHandler:
application = set_allowed_attrs(kwargs, ['name'], application)
application.save()
application_updated.send(self, application=application, user=user)
return application
def delete_application(self, user, application):
@ -264,8 +285,12 @@ class CoreHandler:
if not application.group.has_user(user):
raise UserNotInGroupError(user, application.group)
application_id = application.id
application = application.specific
application_type = application_type_registry.get_by_model(application)
application_type.pre_delete(user, application)
application.delete()
application_deleted.send(self, application_id=application_id,
application=application, user=user)

View file

@ -0,0 +1,10 @@
from django.dispatch import Signal
group_created = Signal()
group_updated = Signal()
group_deleted = Signal()
application_created = Signal()
application_updated = Signal()
application_deleted = Signal()

View file

@ -0,0 +1 @@
default_app_config = 'baserow.ws.config.WSConfig'

View file

@ -0,0 +1,72 @@
import jwt
import uuid
from urllib.parse import parse_qs
from django.contrib.auth import get_user_model
from channels.middleware import BaseMiddleware
from channels.db import database_sync_to_async
from rest_framework_jwt.authentication import jwt_decode_handler
from rest_framework_jwt.settings import api_settings
jwt_get_username_from_payload = api_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER
@database_sync_to_async
def get_user(token):
"""
Selects a user related to the provided JWT token. If the token is invalid or if the
user does not exist then None is returned.
:param token: The JWT token for which the user must be fetched.
:type token: str
:return: The user related to the JWT token.
:rtype: User or None
"""
try:
payload = jwt_decode_handler(token)
except jwt.InvalidTokenError:
return
User = get_user_model()
username = jwt_get_username_from_payload(payload)
if not username:
return
try:
user = User.objects.get_by_natural_key(username)
except User.DoesNotExist:
return
if not user.is_active:
return
return user
class JWTTokenAuthMiddleware(BaseMiddleware):
"""
The auth middleware adds a user object to the scope if a valid JWT token is
provided via the GET parameters when requesting the web socket. It also adds a
unique web socket id for future identification.
"""
def __init__(self, inner):
self.inner = inner
async def __call__(self, scope, receive, send):
get = parse_qs(scope["query_string"].decode("utf8"))
scope['user'] = None
scope['web_socket_id'] = None
jwt_token = get.get('jwt_token')
if jwt_token:
scope['user'] = await get_user(jwt_token[0])
scope['web_socket_id'] = str(uuid.uuid4())
return await self.inner(scope, receive, send)

View file

@ -0,0 +1,8 @@
from django.apps import AppConfig
class WSConfig(AppConfig):
name = 'baserow.ws'
def ready(self):
import baserow.ws.signals # noqa: F403, F401

View file

@ -0,0 +1,143 @@
from channels.db import database_sync_to_async
from channels.generic.websocket import AsyncJsonWebsocketConsumer
from baserow.ws.registries import page_registry
class CoreConsumer(AsyncJsonWebsocketConsumer):
async def connect(self):
await self.accept()
user = self.scope['user']
web_socket_id = self.scope['web_socket_id']
await self.send_json({
'type': 'authentication',
'success': user is not None,
'web_socket_id': web_socket_id
})
if not user:
await self.close()
return
await self.channel_layer.group_add('users', self.channel_name)
async def receive_json(self, content, **parameters):
if 'page' in content:
await self.add_to_page(content)
async def add_to_page(self, content):
"""
Subscribes the connection to a page abstraction. Based on the provided the page
type we can figure out to which page the connection wants to subscribe to. This
is for example used when the users visits a page that he might want to
receive real time updates for.
:param content: The provided payload by the user. This should contain the page
type and additional parameters.
:type content: dict
"""
user = self.scope['user']
web_socket_id = self.scope['web_socket_id']
# If the user has already joined another page we need to discard that
# page first before we can join a new one.
await self.discard_current_page()
try:
page_type = page_registry.get(content['page'])
except page_registry.does_not_exist_exception_class:
return
parameters = {
parameter: content.get(parameter)
for parameter in page_type.parameters
}
can_add = await database_sync_to_async(page_type.can_add)(
user,
web_socket_id,
**parameters
)
if not can_add:
return
group_name = page_type.get_group_name(**parameters)
await self.channel_layer.group_add(group_name, self.channel_name)
self.scope['page'] = page_type
self.scope['page_parameters'] = parameters
await self.send_json({
'type': 'page_add',
'page': page_type.type,
'parameters': parameters
})
async def discard_current_page(self):
"""
If the user has subscribed to another page then he will be unsubscribed from
the last page.
"""
page = self.scope.get('page')
if not page:
return
page_type = page.type
page_parameters = self.scope['page_parameters']
group_name = page.get_group_name(**self.scope['page_parameters'])
await self.channel_layer.group_discard(group_name, self.channel_name)
del self.scope['page']
del self.scope['page_parameters']
await self.send_json({
'type': 'page_discard',
'page': page_type,
'parameters': page_parameters
})
async def broadcast_to_users(self, event):
"""
Broadcasts a message to all the users that are in the provided user_ids list.
Optionally the ignore_web_socket_id is ignored because that is often the
sender.
:param event: The event containing the payload, user ids and the web socket
id that must be ignored.
:type event: dict
"""
web_socket_id = self.scope['web_socket_id']
payload = event['payload']
user_ids = event['user_ids']
ignore_web_socket_id = event['ignore_web_socket_id']
if (
(not ignore_web_socket_id or ignore_web_socket_id != web_socket_id) and
self.scope['user'].id in user_ids
):
await self.send_json(payload)
async def broadcast_to_group(self, event):
"""
Broadcasts a message to all the users that are in the provided group name.
:param event: The event containing the payload, group name and the web socket
id that must be ignored.
:type event: dict
"""
web_socket_id = self.scope['web_socket_id']
payload = event['payload']
ignore_web_socket_id = event['ignore_web_socket_id']
if not ignore_web_socket_id or ignore_web_socket_id != web_socket_id:
await self.send_json(payload)
async def disconnect(self, message):
self.discard_current_page()
await self.channel_layer.group_discard('users', self.channel_name)

View file

@ -0,0 +1,89 @@
from baserow.core.registry import Instance, Registry
from baserow.ws.tasks import broadcast_to_channel_group
class PageType(Instance):
"""
The page registry holds the pages where the users can subscribe/add himself to.
When added he will receive real time updates related to that page.
A user can subscribe by sending a message to the server containing the type as
page name and the additional parameters. Example:
{
'page': 'database',
'table_id': 1
}
"""
parameters = []
"""
A list of parameter name strings which are required when calling all methods. If
for example the parameter `test` is included, then you can expect that parameter
to be passed in the can_add and get_group_name functions. This way you can create
dynamic groups.
"""
def can_add(self, user, web_socket_id, **kwargs):
"""
Indicates whether the user can be added to the page group. Here can for
example be checked if the user has access to a related group.
:param user: The user requesting access.
:type user: User
:param web_socket_id: The unique web socket id of the user.
:type web_socket_id: str
:param kwargs: The additional parameters including their provided values.
:type kwargs: dict
:return: Should indicate if the user can join the page (yes=True and no=False).
:rtype: bool
"""
raise NotImplementedError(
'Each web socket page must have his own can_add method.'
)
def get_group_name(self, **kwargs):
"""
The generated name will be used by used by the core consumer to add the user
to the correct group of the channel_layer. But only if the user is allowed to
be added to the group. That is first determined by the can_add method.
:param kwargs: The additional parameters including their provided values.
:type kwargs: dict
:return: The unique name of the group. This will be used as parameter to the
channel_layer.group_add.
:rtype: str
"""
raise NotImplementedError(
'Each web socket page must have his own get_group_name method.'
)
def broadcast(self, payload, ignore_web_socket_id=None, **kwargs):
"""
Broadcasts a payload to everyone within the group.
:param payload: A payload that must be broad casted to all the users in the
group.
:type payload: dict
:param ignore_web_socket_id: If provided then the payload will not be broad
casted to that web socket id. This is often the sender.
:type ignore_web_socket_id: str
:param kwargs: The additional parameters including their provided values.
:type kwargs: dict
"""
broadcast_to_channel_group.delay(
self.get_group_name(**kwargs),
payload,
ignore_web_socket_id
)
class PageRegistry(Registry):
name = 'ws_page'
page_registry = PageRegistry()

View file

@ -0,0 +1,7 @@
from channels.routing import URLRouter
from .auth import JWTTokenAuthMiddleware
from .routing import websocket_urlpatterns
websocket_router = JWTTokenAuthMiddleware(URLRouter(websocket_urlpatterns))

View file

@ -0,0 +1,8 @@
from django.conf.urls import url
from .consumers import CoreConsumer
websocket_urlpatterns = [
url(r'^ws/core/', CoreConsumer.as_asgi())
]

View file

@ -0,0 +1,82 @@
from django.dispatch import receiver
from django.db import transaction
from baserow.api.groups.serializers import GroupSerializer
from baserow.api.applications.serializers import get_application_serializer
from baserow.core import signals
from .tasks import broadcast_to_group, broadcast_to_users
@receiver(signals.group_created)
def group_created(sender, group, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
group.id,
{
'type': 'group_created',
'group': GroupSerializer(group).data
},
getattr(user, 'web_socket_id', None)
))
@receiver(signals.group_updated)
def group_updated(sender, group, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
group.id,
{
'type': 'group_updated',
'group_id': group.id,
'group': GroupSerializer(group).data
},
getattr(user, 'web_socket_id', None)
))
@receiver(signals.group_deleted)
def group_deleted(sender, group_id, group, group_users, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_users.delay(
[u.id for u in group_users],
{
'type': 'group_deleted',
'group_id': group_id
},
getattr(user, 'web_socket_id', None)
))
@receiver(signals.application_created)
def application_created(sender, application, user, type_name, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
application.group_id,
{
'type': 'application_created',
'application': get_application_serializer(application).data
},
getattr(user, 'web_socket_id', None)
))
@receiver(signals.application_updated)
def application_updated(sender, application, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
application.group_id,
{
'type': 'application_updated',
'application_id': application.id,
'application': get_application_serializer(application).data
},
getattr(user, 'web_socket_id', None)
))
@receiver(signals.application_deleted)
def application_deleted(sender, application_id, application, user, **kwargs):
transaction.on_commit(lambda: broadcast_to_group.delay(
application.group_id,
{
'type': 'application_deleted',
'application_id': application_id
},
getattr(user, 'web_socket_id', None)
))

View file

@ -0,0 +1,90 @@
from baserow.config.celery import app
@app.task(bind=True)
def broadcast_to_users(self, user_ids, payload, ignore_web_socket_id=None):
"""
Broadcasts a JSON payload the provided users.
:param user_ids: A list containing the user ids that should receive the payload.
:type user_ids: list
:param payload: A dictionary object containing the payload that must be
broadcasted.
:type payload: dict
:param ignore_web_socket_id: The web socket id to which the message must not be
send. This is normally the web socket id that has originally made the change
request.
:type ignore_web_socket_id: str
"""
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
channel_layer = get_channel_layer()
async_to_sync(channel_layer.group_send)('users', {
'type': 'broadcast_to_users',
'user_ids': user_ids,
'payload': payload,
'ignore_web_socket_id': ignore_web_socket_id
})
@app.task(bind=True)
def broadcast_to_channel_group(self, group, payload, ignore_web_socket_id=None):
"""
Broadcasts a JSON payload all the users within the channel group having the
provided name.
:param group: The name of the channel group where the payload must be broad casted
to.
:type group: str
:param payload: A dictionary object containing the payload that must be
broadcasted.
:type payload: dict
:param ignore_web_socket_id: The web socket id to which the message must not be
send. This is normally the web socket id that has originally made the change
request.
:type ignore_web_socket_id: str
"""
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
channel_layer = get_channel_layer()
async_to_sync(channel_layer.group_send)(group, {
'type': 'broadcast_to_group',
'payload': payload,
'ignore_web_socket_id': ignore_web_socket_id
})
@app.task(bind=True)
def broadcast_to_group(self, group_id, payload, ignore_web_socket_id=None):
"""
Broadcasts a JSON payload to all users that are in provided group (Group model) id.
:param group_id: The message will only be broadcasted to the users within the
provided group id.
:type group_id: int
:param payload: A dictionary object containing the payload that must be
broadcasted.
:type payload: dict
:param ignore_web_socket_id: The web socket id to which the message must not be
send. This is normally the web socket id that has originally made the change
request.
:type ignore_web_socket_id: str
"""
from baserow.core.models import GroupUser
user_ids = [
user['user_id']
for user in GroupUser.objects.filter(group_id=group_id).values('user_id')
]
if len(user_ids) == 0:
return
broadcast_to_users(user_ids, payload, ignore_web_socket_id)

View file

@ -154,6 +154,7 @@ def test_get_field(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json['id'] == text.id
assert response_json['name'] == text.name
assert response_json['table_id'] == text.table_id
assert not response_json['text_default']

View file

@ -12,8 +12,8 @@ from baserow.contrib.database.api.rows.serializers import (
def test_get_table_serializer(data_fixture):
table = data_fixture.create_database_table(name='Cars')
table_2 = data_fixture.create_database_table()
data_fixture.create_text_field(table=table, order=0, name='Color',
text_default='white')
text_field = data_fixture.create_text_field(table=table, order=0, name='Color',
text_default='white')
data_fixture.create_number_field(table=table, order=1, name='Horsepower')
data_fixture.create_boolean_field(table=table, order=3, name='For sale')
data_fixture.create_number_field(table=table, order=4, name='Price',
@ -156,6 +156,12 @@ def test_get_table_serializer(data_fixture):
assert not serializer_instance.is_valid()
assert len(serializer_instance.errors[f'field_{price_field.id}']) == 1
model = table.get_model(attribute_names=True)
serializer_class = get_row_serializer_class(model=model, field_ids=[text_field.id])
serializer_instance = serializer_class(data={})
assert serializer_instance.is_valid()
assert serializer_instance.data == {'color': 'white'}
@pytest.mark.django_db
def test_get_example_row_serializer_class():

View file

@ -831,8 +831,12 @@ def test_update_row(api_client, data_fixture):
response_json_row_1 = response.json()
assert response.status_code == HTTP_200_OK
assert response_json_row_1[f'field_{text_field.id}'] == 'Purple'
assert response_json_row_1[f'field_{number_field.id}'] == '120'
assert response_json_row_1[f'field_{boolean_field.id}'] is True
row_1.refresh_from_db()
assert getattr(row_1, f'field_{text_field.id}') == 'Purple'
assert getattr(row_1, f'field_{number_field.id}') == Decimal('120')
assert getattr(row_1, f'field_{boolean_field.id}') is True
response = api_client.patch(
url,
@ -843,11 +847,8 @@ def test_update_row(api_client, data_fixture):
response_json_row_1 = response.json()
assert response.status_code == HTTP_200_OK
assert response_json_row_1[f'field_{text_field.id}'] == 'Orange'
# Because the model is generated only for the field we want to change the other
# fields are not included in the serializer.
assert f'field_{number_field.id}' not in response_json_row_1
assert f'field_{boolean_field.id}' not in response_json_row_1
assert response_json_row_1[f'field_{number_field.id}'] == '120'
assert response_json_row_1[f'field_{boolean_field.id}'] is True
row_1.refresh_from_db()
assert getattr(row_1, f'field_{text_field.id}') == 'Orange'
assert getattr(row_1, f'field_{number_field.id}') == 120
@ -926,9 +927,13 @@ def test_update_row(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json[f'field_{decimal_field.id}'] == '10.22'
assert response_json_row_2[f'field_{number_field.id}'] is None
assert response_json_row_2[f'field_{boolean_field.id}'] is False
row_3.refresh_from_db()
assert getattr(row_3, f'field_{decimal_field.id}') == Decimal('10.22')
assert getattr(row_2, f'field_{number_field.id}') is None
assert getattr(row_2, f'field_{boolean_field.id}') is False
@pytest.mark.django_db

View file

@ -287,6 +287,7 @@ def test_get_table(api_client, data_fixture):
assert json_response['id'] == table_1.id
assert json_response['name'] == table_1.name
assert json_response['order'] == table_1.order
assert json_response['database_id'] == table_1.database_id
url = reverse('api:database:tables:item', kwargs={'table_id': table_2.id})
response = api_client.get(url, HTTP_AUTHORIZATION=f'JWT {token}')

View file

@ -308,6 +308,7 @@ def test_get_view(api_client, data_fixture):
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert response_json['id'] == view.id
assert response_json['table_id'] == view.table_id
assert response_json['type'] == 'grid'
assert response_json['table']['id'] == table.id
assert response_json['filter_type'] == 'AND'

View file

@ -50,14 +50,19 @@ def test_get_field(data_fixture):
@pytest.mark.django_db
def test_create_field(data_fixture):
@patch('baserow.contrib.database.fields.signals.field_created.send')
def test_create_field(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
handler = FieldHandler()
handler.create_field(user=user, table=table, type_name='text',
name='Test text field', text_default='Some default')
field = handler.create_field(user=user, table=table, type_name='text',
name='Test text field', text_default='Some default')
send_mock.assert_called_once()
assert send_mock.call_args[1]['field'].id == field.id
assert send_mock.call_args[1]['user'].id == user.id
assert Field.objects.all().count() == 1
assert TextField.objects.all().count() == 1
@ -146,7 +151,8 @@ def test_create_primary_field(data_fixture):
@pytest.mark.django_db
def test_update_field(data_fixture):
@patch('baserow.contrib.database.fields.signals.field_updated.send')
def test_update_field(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
@ -181,6 +187,10 @@ def test_update_field(data_fixture):
assert field.text_default == 'Default value'
assert isinstance(field, TextField)
send_mock.assert_called_once()
assert send_mock.call_args[1]['field'].id == field.id
assert send_mock.call_args[1]['user'].id == user.id
# Insert some rows to the table which should be converted later.
model = table.get_model()
model.objects.create(**{f'field_{field.id}': 'Text value'})
@ -263,7 +273,8 @@ def test_update_field_failing(data_fixture):
@pytest.mark.django_db
def test_delete_field(data_fixture):
@patch('baserow.contrib.database.fields.signals.field_deleted.send')
def test_delete_field(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
@ -279,10 +290,16 @@ def test_delete_field(data_fixture):
assert Field.objects.all().count() == 1
assert TextField.objects.all().count() == 1
field_id = text_field.id
handler.delete_field(user=user, field=text_field)
assert Field.objects.all().count() == 0
assert TextField.objects.all().count() == 0
send_mock.assert_called_once()
assert send_mock.call_args[1]['field_id'] == field_id
assert send_mock.call_args[1]['field'].id == field_id
assert send_mock.call_args[1]['user'].id == user.id
table_model = table.get_model()
field_name = f'field_{text_field.id}'
assert field_name not in [field.name for field in table_model._meta.get_fields()]

View file

@ -1,4 +1,5 @@
import pytest
from unittest.mock import patch
from decimal import Decimal
@ -128,7 +129,8 @@ def test_extract_manytomany_values(data_fixture):
@pytest.mark.django_db
def test_create_row(data_fixture):
@patch('baserow.contrib.database.rows.signals.row_created.send')
def test_create_row(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(name='Car', user=user)
@ -166,6 +168,13 @@ def test_create_row(data_fixture):
assert not getattr(row_1, f'field_9999', None)
assert row_1.order == Decimal('1.00000000000000000000')
send_mock.assert_called_once()
assert send_mock.call_args[1]['row'].id == row_1.id
assert send_mock.call_args[1]['user'].id == user.id
assert send_mock.call_args[1]['table'].id == table.id
assert send_mock.call_args[1]['before'] is None
assert send_mock.call_args[1]['model']._generated_table_model
row_2 = handler.create_row(user=user, table=table)
assert getattr(row_2, f'field_{name_field.id}') == 'Test'
assert not getattr(row_2, f'field_{speed_field.id}')
@ -180,6 +189,7 @@ def test_create_row(data_fixture):
assert row_1.order == Decimal('1.00000000000000000000')
assert row_2.order == Decimal('2.00000000000000000000')
assert row_3.order == Decimal('1.99999999999999999999')
assert send_mock.call_args[1]['before'].id == row_2.id
row_4 = handler.create_row(user=user, table=table, before=row_2)
row_1.refresh_from_db()
@ -289,7 +299,8 @@ def test_get_row(data_fixture):
@pytest.mark.django_db
def test_update_row(data_fixture):
@patch('baserow.contrib.database.rows.signals.row_updated.send')
def test_update_row(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(name='Car', user=user)
@ -328,10 +339,16 @@ def test_update_row(data_fixture):
assert getattr(row, f'field_{name_field.id}') == 'Tesla'
assert getattr(row, f'field_{speed_field.id}') == 240
assert getattr(row, f'field_{price_field.id}') == Decimal('59999.99')
send_mock.assert_called_once()
assert send_mock.call_args[1]['row'].id == row.id
assert send_mock.call_args[1]['user'].id == user.id
assert send_mock.call_args[1]['table'].id == table.id
assert send_mock.call_args[1]['model']._generated_table_model
@pytest.mark.django_db
def test_delete_row(data_fixture):
@patch('baserow.contrib.database.rows.signals.row_deleted.send')
def test_delete_row(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(name='Car', user=user)
@ -348,5 +365,12 @@ def test_delete_row(data_fixture):
with pytest.raises(RowDoesNotExist):
handler.delete_row(user=user, table=table, row_id=99999)
row_id = row.id
handler.delete_row(user=user, table=table, row_id=row.id)
assert model.objects.all().count() == 1
send_mock.assert_called_once()
assert send_mock.call_args[1]['row_id'] == row_id
assert send_mock.call_args[1]['row']
assert send_mock.call_args[1]['user'].id == user.id
assert send_mock.call_args[1]['table'].id == table.id
assert send_mock.call_args[1]['model']._generated_table_model

View file

@ -1,4 +1,5 @@
import pytest
from unittest.mock import patch
from django.db import connection
from django.conf import settings
@ -42,7 +43,8 @@ def test_get_database_table(data_fixture):
@pytest.mark.django_db
def test_create_database_table(data_fixture):
@patch('baserow.contrib.database.table.signals.table_created.send')
def test_create_database_table(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
@ -63,6 +65,10 @@ def test_create_database_table(data_fixture):
assert primary_field.primary
assert primary_field.name == 'Name'
send_mock.assert_called_once()
assert send_mock.call_args[1]['table'].id == table.id
assert send_mock.call_args[1]['user'].id == user.id
with pytest.raises(UserNotInGroupError):
handler.create_table(user=user_2, database=database, name='')
@ -199,7 +205,8 @@ def test_fill_table_with_initial_data(data_fixture):
@pytest.mark.django_db
def test_update_database_table(data_fixture):
@patch('baserow.contrib.database.table.signals.table_updated.send')
def test_update_database_table(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
group = data_fixture.create_group(user=user)
@ -213,13 +220,18 @@ def test_update_database_table(data_fixture):
handler.update_table(user=user, table=table, name='Test 1')
send_mock.assert_called_once()
assert send_mock.call_args[1]['table'].id == table.id
assert send_mock.call_args[1]['user'].id == user.id
table.refresh_from_db()
assert table.name == 'Test 1'
@pytest.mark.django_db
def test_delete_database_table(data_fixture):
@patch('baserow.contrib.database.table.signals.table_deleted.send')
def test_delete_database_table(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
group = data_fixture.create_group(user=user)
@ -234,7 +246,12 @@ def test_delete_database_table(data_fixture):
assert Table.objects.all().count() == 1
assert f'database_table_{table.id}' in connection.introspection.table_names()
table_id = table.id
handler.delete_table(user=user, table=table)
send_mock.assert_called_once()
assert send_mock.call_args[1]['table_id'] == table_id
assert send_mock.call_args[1]['user'].id == user.id
assert Table.objects.all().count() == 0
assert f'database_table_{table.id}' not in connection.introspection.table_names()

View file

@ -1,4 +1,5 @@
import pytest
from unittest.mock import patch
from decimal import Decimal
from baserow.core.exceptions import UserNotInGroupError
@ -57,14 +58,20 @@ def test_get_view(data_fixture):
@pytest.mark.django_db
def test_create_view(data_fixture):
@patch('baserow.contrib.database.views.signals.view_created.send')
def test_create_view(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
table_2 = data_fixture.create_database_table(user=user)
handler = ViewHandler()
handler.create_view(user=user, table=table, type_name='grid', name='Test grid')
view = handler.create_view(user=user, table=table, type_name='grid',
name='Test grid')
send_mock.assert_called_once()
assert send_mock.call_args[1]['view'].id == view.id
assert send_mock.call_args[1]['user'].id == user.id
assert View.objects.all().count() == 1
assert GridView.objects.all().count() == 1
@ -109,7 +116,8 @@ def test_create_view(data_fixture):
@pytest.mark.django_db
def test_update_view(data_fixture):
@patch('baserow.contrib.database.views.signals.view_updated.send')
def test_update_view(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
@ -123,7 +131,11 @@ def test_update_view(data_fixture):
with pytest.raises(ValueError):
handler.update_view(user=user, view=object(), name='Test 1')
handler.update_view(user=user, view=grid, name='Test 1')
view = handler.update_view(user=user, view=grid, name='Test 1')
send_mock.assert_called_once()
assert send_mock.call_args[1]['view'].id == view.id
assert send_mock.call_args[1]['user'].id == user.id
grid.refresh_from_db()
assert grid.name == 'Test 1'
@ -138,7 +150,8 @@ def test_update_view(data_fixture):
@pytest.mark.django_db
def test_delete_view(data_fixture):
@patch('baserow.contrib.database.views.signals.view_deleted.send')
def test_delete_view(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
@ -152,13 +165,21 @@ def test_delete_view(data_fixture):
with pytest.raises(ValueError):
handler.delete_view(user=user_2, view=object())
view_id = grid.id
assert View.objects.all().count() == 1
handler.delete_view(user=user, view=grid)
assert View.objects.all().count() == 0
send_mock.assert_called_once()
assert send_mock.call_args[1]['view_id'] == view_id
assert send_mock.call_args[1]['view'].id == view_id
assert send_mock.call_args[1]['user'].id == user.id
@pytest.mark.django_db
def test_update_grid_view_field_options(data_fixture):
@patch('baserow.contrib.database.views.signals.grid_view_field_options_updated.send')
def test_update_grid_view_field_options(send_mock, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
grid_view = data_fixture.create_grid_view(table=table)
@ -168,6 +189,7 @@ def test_update_grid_view_field_options(data_fixture):
with pytest.raises(ValueError):
ViewHandler().update_grid_view_field_options(
user=user,
grid_view=grid_view,
field_options={
'strange_format': {'height': 150},
@ -176,6 +198,7 @@ def test_update_grid_view_field_options(data_fixture):
with pytest.raises(UnrelatedFieldError):
ViewHandler().update_grid_view_field_options(
user=user,
grid_view=grid_view,
field_options={
99999: {'width': 150},
@ -184,6 +207,7 @@ def test_update_grid_view_field_options(data_fixture):
with pytest.raises(UnrelatedFieldError):
ViewHandler().update_grid_view_field_options(
user=user,
grid_view=grid_view,
field_options={
field_3.id: {'width': 150},
@ -191,6 +215,7 @@ def test_update_grid_view_field_options(data_fixture):
)
ViewHandler().update_grid_view_field_options(
user=user,
grid_view=grid_view,
field_options={
str(field_1.id): {'width': 150},
@ -199,6 +224,9 @@ def test_update_grid_view_field_options(data_fixture):
)
options_4 = grid_view.get_field_options()
send_mock.assert_called_once()
assert send_mock.call_args[1]['grid_view'].id == grid_view.id
assert send_mock.call_args[1]['user'].id == user.id
assert len(options_4) == 2
assert options_4[0].width == 150
assert options_4[0].field_id == field_1.id
@ -206,10 +234,14 @@ def test_update_grid_view_field_options(data_fixture):
assert options_4[1].field_id == field_2.id
field_4 = data_fixture.create_text_field(table=table)
ViewHandler().update_grid_view_field_options(grid_view=grid_view, field_options={
field_2.id: {'width': 300},
field_4.id: {'width': 50}
})
ViewHandler().update_grid_view_field_options(
user=user,
grid_view=grid_view,
field_options={
field_2.id: {'width': 300},
field_4.id: {'width': 50}
}
)
options_4 = grid_view.get_field_options()
assert len(options_4) == 3
assert options_4[0].width == 150
@ -420,7 +452,8 @@ def test_get_filter(data_fixture):
@pytest.mark.django_db
def test_create_filter(data_fixture):
@patch('baserow.contrib.database.views.signals.view_filter_created.send')
def test_create_filter(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
grid_view = data_fixture.create_grid_view(user=user)
@ -459,6 +492,10 @@ def test_create_filter(data_fixture):
view_filter = handler.create_filter(user=user, view=grid_view, field=text_field,
type_name='equal', value='test')
send_mock.assert_called_once()
assert send_mock.call_args[1]['view_filter'].id == view_filter.id
assert send_mock.call_args[1]['user'].id == user.id
assert ViewFilter.objects.all().count() == 1
first = ViewFilter.objects.all().first()
@ -478,7 +515,8 @@ def test_create_filter(data_fixture):
@pytest.mark.django_db
def test_update_filter(data_fixture):
@patch('baserow.contrib.database.views.signals.view_filter_updated.send')
def test_update_filter(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
grid_view = data_fixture.create_grid_view(user=user)
@ -513,6 +551,9 @@ def test_update_filter(data_fixture):
updated_filter = handler.update_filter(user=user, view_filter=equal_filter,
value='test2')
send_mock.assert_called_once()
assert send_mock.call_args[1]['view_filter'].id == updated_filter.id
assert send_mock.call_args[1]['user'].id == user.id
assert updated_filter.value == 'test2'
assert updated_filter.field_id == long_text_field.id
assert updated_filter.type == 'equal'
@ -528,7 +569,8 @@ def test_update_filter(data_fixture):
@pytest.mark.django_db
def test_delete_filter(data_fixture):
@patch('baserow.contrib.database.views.signals.view_filter_deleted.send')
def test_delete_filter(send_mock, data_fixture):
user = data_fixture.create_user()
filter_1 = data_fixture.create_view_filter(user=user)
filter_2 = data_fixture.create_view_filter()
@ -540,8 +582,13 @@ def test_delete_filter(data_fixture):
with pytest.raises(UserNotInGroupError):
handler.delete_filter(user=user, view_filter=filter_2)
filter_1_id = filter_1.id
handler.delete_filter(user=user, view_filter=filter_1)
send_mock.assert_called_once()
assert send_mock.call_args[1]['view_filter_id'] == filter_1_id
assert send_mock.call_args[1]['view_filter']
assert send_mock.call_args[1]['user'].id == user.id
assert ViewFilter.objects.all().count() == 1
assert ViewFilter.objects.filter(pk=filter_1.pk).count() == 0
@ -709,7 +756,8 @@ def test_get_sort(data_fixture):
@pytest.mark.django_db
def test_create_sort(data_fixture):
@patch('baserow.contrib.database.views.signals.view_sort_created.send')
def test_create_sort(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
grid_view = data_fixture.create_grid_view(user=user)
@ -742,6 +790,10 @@ def test_create_sort(data_fixture):
view_sort = handler.create_sort(user=user, view=grid_view, field=text_field,
order='ASC')
send_mock.assert_called_once()
assert send_mock.call_args[1]['view_sort'].id == view_sort.id
assert send_mock.call_args[1]['user'].id == user.id
assert ViewSort.objects.all().count() == 1
first = ViewSort.objects.all().first()
@ -762,7 +814,8 @@ def test_create_sort(data_fixture):
@pytest.mark.django_db
def test_update_sort(data_fixture):
@patch('baserow.contrib.database.views.signals.view_sort_updated.send')
def test_update_sort(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
grid_view = data_fixture.create_grid_view(user=user)
@ -789,6 +842,9 @@ def test_update_sort(data_fixture):
updated_sort = handler.update_sort(user=user, view_sort=view_sort,
order='DESC')
send_mock.assert_called_once()
assert send_mock.call_args[1]['view_sort'].id == updated_sort.id
assert send_mock.call_args[1]['user'].id == user.id
assert updated_sort.order == 'DESC'
assert updated_sort.field_id == long_text_field.id
assert updated_sort.view_id == grid_view.id
@ -807,7 +863,8 @@ def test_update_sort(data_fixture):
@pytest.mark.django_db
def test_delete_sort(data_fixture):
@patch('baserow.contrib.database.views.signals.view_sort_deleted.send')
def test_delete_sort(send_mock, data_fixture):
user = data_fixture.create_user()
sort_1 = data_fixture.create_view_sort(user=user)
sort_2 = data_fixture.create_view_sort()
@ -819,7 +876,13 @@ def test_delete_sort(data_fixture):
with pytest.raises(UserNotInGroupError):
handler.delete_sort(user=user, view_sort=sort_2)
sort_1_id = sort_1.id
handler.delete_sort(user=user, view_sort=sort_1)
send_mock.assert_called_once()
assert send_mock.call_args[1]['view_sort_id'] == sort_1_id
assert send_mock.call_args[1]['view_sort']
assert send_mock.call_args[1]['user'].id == user.id
assert ViewSort.objects.all().count() == 1
assert ViewSort.objects.filter(pk=sort_1.pk).count() == 0

View file

@ -0,0 +1,52 @@
import pytest
from unittest.mock import patch
from baserow.contrib.database.fields.handler import FieldHandler
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_field_created(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = FieldHandler().create_field(user=user, table=table, type_name='text',
name='Grid')
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'field_created'
assert args[0][1]['field']['id'] == field.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_field_updated(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
field = data_fixture.create_text_field(user=user)
FieldHandler().update_field(user=user, field=field, name='field')
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{field.table.id}'
assert args[0][1]['type'] == 'field_updated'
assert args[0][1]['field_id'] == field.id
assert args[0][1]['field']['id'] == field.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_field_deleted(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
field = data_fixture.create_text_field(user=user)
field_id = field.id
table_id = field.table_id
FieldHandler().delete_field(user=user, field=field)
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{field.table.id}'
assert args[0][1]['type'] == 'field_deleted'
assert args[0][1]['field_id'] == field_id
assert args[0][1]['table_id'] == table_id

View file

@ -0,0 +1,90 @@
import pytest
from unittest.mock import patch
from baserow.contrib.database.rows.handler import RowHandler
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_row_created(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table)
row = RowHandler().create_row(user=user, table=table, values={
f'field_{field.id}': 'Test'
})
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'row_created'
assert args[0][1]['table_id'] == table.id
assert args[0][1]['row']['id'] == row.id
assert args[0][1]['before_row_id'] is None
assert args[0][1]['row'][f'field_{field.id}'] == 'Test'
row_2 = RowHandler().create_row(user=user, table=table, before=row, values={
f'field_{field.id}': 'Test2'
})
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'row_created'
assert args[0][1]['table_id'] == table.id
assert args[0][1]['row']['id'] == row_2.id
assert args[0][1]['before_row_id'] == row.id
assert args[0][1]['row'][f'field_{field.id}'] == 'Test2'
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_row_updated(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table)
field_2 = data_fixture.create_text_field(table=table)
row = table.get_model().objects.create()
RowHandler().update_row(user=user, table=table, row_id=row.id, values={
f'field_{field.id}': 'Test'
})
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'row_updated'
assert args[0][1]['table_id'] == table.id
assert args[0][1]['row']['id'] == row.id
assert args[0][1]['row'][f'field_{field.id}'] == 'Test'
assert args[0][1]['row'][f'field_{field_2.id}'] is None
row.refresh_from_db()
setattr(row, f'field_{field_2.id}', 'Second')
row.save()
RowHandler().update_row(user=user, table=table, row_id=row.id, values={
f'field_{field.id}': 'First'
})
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'row_updated'
assert args[0][1]['table_id'] == table.id
assert args[0][1]['row']['id'] == row.id
assert args[0][1]['row'][f'field_{field.id}'] == 'First'
assert args[0][1]['row'][f'field_{field_2.id}'] == 'Second'
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_row_deleted(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
row = table.get_model().objects.create()
row_id = row.id
RowHandler().delete_row(user=user, table=table, row_id=row_id)
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'row_deleted'
assert args[0][1]['row_id'] == row_id
assert args[0][1]['table_id'] == table.id

View file

@ -0,0 +1,51 @@
import pytest
from unittest.mock import patch
from baserow.contrib.database.table.handler import TableHandler
@pytest.mark.django_db(transaction=True)
@patch('baserow.contrib.database.ws.table.signals.broadcast_to_group')
def test_table_created(mock_broadcast_to_group, data_fixture):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = TableHandler().create_table(user=user, database=database, name='Test')
mock_broadcast_to_group.delay.assert_called_once()
args = mock_broadcast_to_group.delay.call_args
assert args[0][0] == table.database.group_id
assert args[0][1]['type'] == 'table_created'
assert args[0][1]['table']['id'] == table.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.contrib.database.ws.table.signals.broadcast_to_group')
def test_table_updated(mock_broadcast_to_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
table = TableHandler().update_table(user=user, table=table, name='Test')
mock_broadcast_to_group.delay.assert_called_once()
args = mock_broadcast_to_group.delay.call_args
assert args[0][0] == table.database.group_id
assert args[0][1]['type'] == 'table_updated'
assert args[0][1]['table_id'] == table.id
assert args[0][1]['table']['id'] == table.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.contrib.database.ws.table.signals.broadcast_to_group')
def test_table_deleted(mock_broadcast_to_users, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
table_id = table.id
database_id = table.database_id
TableHandler().delete_table(user=user, table=table)
mock_broadcast_to_users.delay.assert_called_once()
args = mock_broadcast_to_users.delay.call_args
assert args[0][0] == table.database.group_id
assert args[0][1]['type'] == 'table_deleted'
assert args[0][1]['database_id'] == database_id
assert args[0][1]['table_id'] == table_id

View file

@ -0,0 +1,178 @@
import pytest
from unittest.mock import patch
from baserow.contrib.database.views.handler import ViewHandler
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_created(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
view = ViewHandler().create_view(user=user, table=table, type_name='grid',
name='Grid')
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'view_created'
assert args[0][1]['view']['id'] == view.id
assert 'filters' in args[0][1]['view']
assert 'sortings' in args[0][1]['view']
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_updated(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
view = data_fixture.create_grid_view(user=user)
ViewHandler().update_view(user=user, view=view, name='View')
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{view.table.id}'
assert args[0][1]['type'] == 'view_updated'
assert args[0][1]['view_id'] == view.id
assert args[0][1]['view']['id'] == view.id
assert 'filters' not in args[0][1]['view']
assert 'sortings' not in args[0][1]['view']
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_deleted(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
view = data_fixture.create_grid_view(user=user)
view_id = view.id
table_id = view.table_id
ViewHandler().delete_view(user=user, view=view)
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{view.table.id}'
assert args[0][1]['type'] == 'view_deleted'
assert args[0][1]['view_id'] == view_id
assert args[0][1]['table_id'] == table_id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_filter_created(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table)
view = data_fixture.create_grid_view(user=user, table=table)
view_filter = ViewHandler().create_filter(user=user, view=view, type_name='equal',
value='test', field=field)
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'view_filter_created'
assert args[0][1]['view_filter']['id'] == view_filter.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_filter_updated(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
view_filter = data_fixture.create_view_filter(user=user)
view_filter = ViewHandler().update_filter(user=user, view_filter=view_filter,
value='test2')
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{view_filter.view.table.id}'
assert args[0][1]['type'] == 'view_filter_updated'
assert args[0][1]['view_filter_id'] == view_filter.id
assert args[0][1]['view_filter']['id'] == view_filter.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_filter_deleted(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
view_filter = data_fixture.create_view_filter(user=user)
view_id = view_filter.view.id
view_filter_id = view_filter.id
ViewHandler().delete_filter(user=user, view_filter=view_filter)
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{view_filter.view.table.id}'
assert args[0][1]['type'] == 'view_filter_deleted'
assert args[0][1]['view_id'] == view_id
assert args[0][1]['view_filter_id'] == view_filter_id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_sort_created(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table)
view = data_fixture.create_grid_view(user=user, table=table)
view_sort = ViewHandler().create_sort(user=user, view=view, field=field,
order='ASC')
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'view_sort_created'
assert args[0][1]['view_sort']['id'] == view_sort.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_sort_updated(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
view_sort = data_fixture.create_view_sort(user=user)
view_sort = ViewHandler().update_sort(user=user, view_sort=view_sort,
order='DESC')
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{view_sort.view.table.id}'
assert args[0][1]['type'] == 'view_sort_updated'
assert args[0][1]['view_sort_id'] == view_sort.id
assert args[0][1]['view_sort']['id'] == view_sort.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_view_sort_deleted(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
view_sort = data_fixture.create_view_sort(user=user)
view_id = view_sort.view.id
view_sort_id = view_sort.id
ViewHandler().delete_sort(user=user, view_sort=view_sort)
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{view_sort.view.table.id}'
assert args[0][1]['type'] == 'view_sort_deleted'
assert args[0][1]['view_id'] == view_id
assert args[0][1]['view_sort_id'] == view_sort_id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_grid_view_field_options_updated(mock_broadcast_to_channel_group, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table)
grid_view = data_fixture.create_grid_view(table=table)
ViewHandler().update_grid_view_field_options(
user=user,
grid_view=grid_view,
field_options={str(text_field.id): {'width': 150}}
)
mock_broadcast_to_channel_group.delay.assert_called_once()
args = mock_broadcast_to_channel_group.delay.call_args
assert args[0][0] == f'table-{table.id}'
assert args[0][1]['type'] == 'grid_view_field_options_updated'
assert args[0][1]['grid_view_id'] == grid_view.id
assert args[0][1]['grid_view_field_options'][text_field.id]['width'] == 150

View file

@ -1,4 +1,5 @@
import pytest
from unittest.mock import patch
from django.db import connection
@ -62,11 +63,16 @@ def test_get_group_user(data_fixture):
@pytest.mark.django_db
def test_create_group(data_fixture):
@patch('baserow.core.signals.group_created.send')
def test_create_group(send_mock, data_fixture):
user = data_fixture.create_user()
handler = CoreHandler()
handler.create_group(user=user, name='Test group')
group_user = handler.create_group(user=user, name='Test group')
send_mock.assert_called_once()
assert send_mock.call_args[1]['group'].id == group_user.group.id
assert send_mock.call_args[1]['user'].id == user.id
group = Group.objects.all().first()
user_group = GroupUser.objects.all().first()
@ -83,7 +89,8 @@ def test_create_group(data_fixture):
@pytest.mark.django_db
def test_update_group(data_fixture):
@patch('baserow.core.signals.group_updated.send')
def test_update_group(send_mock, data_fixture):
user_1 = data_fixture.create_user()
user_2 = data_fixture.create_user()
group = data_fixture.create_group(user=user_1)
@ -91,6 +98,10 @@ def test_update_group(data_fixture):
handler = CoreHandler()
handler.update_group(user=user_1, group=group, name='New name')
send_mock.assert_called_once()
assert send_mock.call_args[1]['group'].id == group.id
assert send_mock.call_args[1]['user'].id == user_1.id
group.refresh_from_db()
assert group.name == 'New name'
@ -103,7 +114,8 @@ def test_update_group(data_fixture):
@pytest.mark.django_db
def test_delete_group(data_fixture):
@patch('baserow.core.signals.group_deleted.send')
def test_delete_group(send_mock, data_fixture):
user = data_fixture.create_user()
group_1 = data_fixture.create_group(user=user)
database = data_fixture.create_database_application(group=group_1)
@ -115,6 +127,12 @@ def test_delete_group(data_fixture):
handler = CoreHandler()
handler.delete_group(user, group_1)
send_mock.assert_called_once()
assert send_mock.call_args[1]['group'].id == group_1.id
assert send_mock.call_args[1]['user'].id == user.id
assert len(send_mock.call_args[1]['group_users']) == 1
assert send_mock.call_args[1]['group_users'][0].id == user.id
assert Database.objects.all().count() == 0
assert Table.objects.all().count() == 0
assert f'database_table_{table.id}' not in connection.introspection.table_names()
@ -188,7 +206,8 @@ def test_get_application(data_fixture):
@pytest.mark.django_db
def test_create_database_application(data_fixture):
@patch('baserow.core.signals.application_created.send')
def test_create_database_application(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
group = data_fixture.create_group(user=user)
@ -205,6 +224,11 @@ def test_create_database_application(data_fixture):
assert database.order == 1
assert database.group == group
send_mock.assert_called_once()
assert send_mock.call_args[1]['application'].id == database.id
assert send_mock.call_args[1]['user'].id == user.id
assert send_mock.call_args[1]['type_name'] == 'database'
with pytest.raises(UserNotInGroupError):
handler.create_application(user=user_2, group=group, type_name='database',
name='')
@ -215,7 +239,8 @@ def test_create_database_application(data_fixture):
@pytest.mark.django_db
def test_update_database_application(data_fixture):
@patch('baserow.core.signals.application_updated.send')
def test_update_database_application(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
group = data_fixture.create_group(user=user)
@ -231,13 +256,18 @@ def test_update_database_application(data_fixture):
handler.update_application(user=user, application=database, name='Test 1')
send_mock.assert_called_once()
assert send_mock.call_args[1]['application'].id == database.id
assert send_mock.call_args[1]['user'].id == user.id
database.refresh_from_db()
assert database.name == 'Test 1'
@pytest.mark.django_db
def test_delete_database_application(data_fixture):
@patch('baserow.core.signals.application_deleted.send')
def test_delete_database_application(send_mock, data_fixture):
user = data_fixture.create_user()
user_2 = data_fixture.create_user()
group = data_fixture.create_group(user=user)
@ -257,3 +287,8 @@ def test_delete_database_application(data_fixture):
assert Database.objects.all().count() == 0
assert Table.objects.all().count() == 0
assert f'database_table_{table.id}' not in connection.introspection.table_names()
send_mock.assert_called_once()
assert send_mock.call_args[1]['application_id'] == database.id
assert send_mock.call_args[1]['application'].id == database.id
assert send_mock.call_args[1]['user'].id == user.id

View file

@ -0,0 +1,61 @@
import pytest
from channels.testing import WebsocketCommunicator
from baserow.config.asgi import application
from baserow.ws.auth import get_user
@pytest.mark.run(order=1)
@pytest.mark.asyncio
@pytest.mark.django_db(transaction=True)
async def test_get_user(data_fixture):
user, token = data_fixture.create_user_and_token()
assert await get_user('random') is None
u = await get_user(token)
assert user.id == u.id
@pytest.mark.run(order=2)
@pytest.mark.asyncio
@pytest.mark.django_db(transaction=True)
async def test_token_auth_middleware(data_fixture):
user, token = data_fixture.create_user_and_token()
communicator = WebsocketCommunicator(application, f'ws/core/')
connected, subprotocol = await communicator.connect()
assert connected
json = await communicator.receive_json_from()
assert json['type'] == 'authentication'
assert json['success'] is False
assert json['web_socket_id'] is None
await communicator.disconnect()
communicator = WebsocketCommunicator(application, f'ws/core/?jwt_token=random')
connected, subprotocol = await communicator.connect()
assert connected
json = await communicator.receive_json_from()
assert json['type'] == 'authentication'
assert json['success'] is False
assert json['web_socket_id'] is not None
await communicator.disconnect()
communicator = WebsocketCommunicator(application, f'ws/core/?jwt_token={token}')
connected, subprotocol = await communicator.connect()
assert connected
json = await communicator.receive_json_from()
assert json['type'] == 'authentication'
assert json['success'] is True
assert json['web_socket_id'] is not None
await communicator.disconnect()
communicator = WebsocketCommunicator(application, f'ws/core/?jwt_token={token}')
connected, subprotocol = await communicator.connect()
assert connected
json = await communicator.receive_json_from()
assert json['type'] == 'authentication'
assert json['web_socket_id'] is not None
await communicator.disconnect()

View file

@ -0,0 +1,44 @@
import pytest
from channels.testing import WebsocketCommunicator
from baserow.config.asgi import application
@pytest.mark.run(order=3)
@pytest.mark.asyncio
@pytest.mark.django_db(transaction=True)
async def test_join_page(data_fixture):
user_1, token_1 = data_fixture.create_user_and_token()
table_1 = data_fixture.create_database_table(user=user_1)
communicator_1 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_1}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_1.connect()
await communicator_1.receive_json_from()
# Join the table page.
await communicator_1.send_json_to({
'page': 'table',
'table_id': table_1.id
})
response = await communicator_1.receive_json_from(0.1)
assert response['type'] == 'page_add'
assert response['page'] == 'table'
assert response['parameters']['table_id'] == table_1.id
# When switching to a not existing page we expect to be discarded from the
# current page.
await communicator_1.send_json_to({'page': ''})
response = await communicator_1.receive_json_from(0.1)
assert response['type'] == 'page_discard'
assert response['page'] == 'table'
assert response['parameters']['table_id'] == table_1.id
# When switching to a not existing page we do not expect the confirmation.
await communicator_1.send_json_to({'page': 'NOT_EXISTING_PAGE'})
assert communicator_1.output_queue.qsize() == 0
await communicator_1.disconnect()

View file

@ -0,0 +1,21 @@
from unittest.mock import patch
from baserow.ws.registries import page_registry
@patch('baserow.ws.registries.broadcast_to_channel_group')
def test_broadcast(mock_broadcast, data_fixture):
table_page = page_registry.get('table')
table_page.broadcast({'message': 'test'}, table_id=1)
mock_broadcast.delay.assert_called_once()
args = mock_broadcast.delay.call_args
assert args[0][0] == 'table-1'
assert args[0][1]['message'] == 'test'
assert args[0][2] is None
table_page.broadcast({'message': 'test2'}, ignore_web_socket_id='123', table_id=2)
args = mock_broadcast.delay.call_args
assert args[0][0] == 'table-2'
assert args[0][1]['message'] == 'test2'
assert args[0][2] == '123'

View file

@ -0,0 +1,95 @@
import pytest
from unittest.mock import patch
from baserow.core.handler import CoreHandler
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.signals.broadcast_to_group')
def test_group_created(mock_broadcast_to_group, data_fixture):
user = data_fixture.create_user()
group_user = CoreHandler().create_group(user=user, name='Test')
mock_broadcast_to_group.delay.assert_called_once()
args = mock_broadcast_to_group.delay.call_args
assert args[0][0] == group_user.group_id
assert args[0][1]['type'] == 'group_created'
assert args[0][1]['group']['id'] == group_user.group_id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.signals.broadcast_to_group')
def test_group_updated(mock_broadcast_to_group, data_fixture):
user = data_fixture.create_user()
user.web_socket_id = 'test'
group = data_fixture.create_group(user=user)
group = CoreHandler().update_group(user=user, group=group, name='Test')
mock_broadcast_to_group.delay.assert_called_once()
args = mock_broadcast_to_group.delay.call_args
assert args[0][0] == group.id
assert args[0][1]['type'] == 'group_updated'
assert args[0][1]['group_id'] == group.id
assert args[0][1]['group']['id'] == group.id
assert args[0][2] == 'test'
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.signals.broadcast_to_users')
def test_group_deleted(mock_broadcast_to_users, data_fixture):
user = data_fixture.create_user()
group = data_fixture.create_group(user=user)
group_id = group.id
CoreHandler().delete_group(user=user, group=group)
mock_broadcast_to_users.delay.assert_called_once()
args = mock_broadcast_to_users.delay.call_args
assert args[0][0] == [user.id]
assert args[0][1]['type'] == 'group_deleted'
assert args[0][1]['group_id'] == group_id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.signals.broadcast_to_group')
def test_application_created(mock_broadcast_to_group, data_fixture):
user = data_fixture.create_user()
group = data_fixture.create_group(user=user)
database = CoreHandler().create_application(user=user, group=group,
type_name='database', name='Database')
mock_broadcast_to_group.delay.assert_called_once()
args = mock_broadcast_to_group.delay.call_args
assert args[0][0] == group.id
assert args[0][1]['type'] == 'application_created'
assert args[0][1]['application']['id'] == database.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.signals.broadcast_to_group')
def test_application_updated(mock_broadcast_to_group, data_fixture):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
CoreHandler().update_application(user=user, application=database, name='Database')
mock_broadcast_to_group.delay.assert_called_once()
args = mock_broadcast_to_group.delay.call_args
assert args[0][0] == database.group_id
assert args[0][1]['type'] == 'application_updated'
assert args[0][1]['application_id'] == database.id
assert args[0][1]['application']['id'] == database.id
@pytest.mark.django_db(transaction=True)
@patch('baserow.ws.signals.broadcast_to_group')
def test_application_deleted(mock_broadcast_to_group, data_fixture):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
database_id = database.id
CoreHandler().delete_application(user=user, application=database)
mock_broadcast_to_group.delay.assert_called_once()
args = mock_broadcast_to_group.delay.call_args
assert args[0][0] == database.group_id
assert args[0][1]['type'] == 'application_deleted'
assert args[0][1]['application_id'] == database_id

View file

@ -0,0 +1,259 @@
import pytest
from asgiref.sync import sync_to_async
from channels.testing import WebsocketCommunicator
from channels.db import database_sync_to_async
from baserow.config.asgi import application
from baserow.ws.tasks import (
broadcast_to_users, broadcast_to_channel_group, broadcast_to_group
)
@pytest.mark.run(order=4)
@pytest.mark.asyncio
@pytest.mark.django_db(transaction=True)
async def test_broadcast_to_users(data_fixture):
user_1, token_1 = data_fixture.create_user_and_token()
user_2, token_2 = data_fixture.create_user_and_token()
communicator_1 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_1}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_1.connect()
response_1 = await communicator_1.receive_json_from()
web_socket_id_1 = response_1['web_socket_id']
communicator_2 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_2}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_2.connect()
response_2 = await communicator_2.receive_json_from()
response_2['web_socket_id']
await sync_to_async(broadcast_to_users)([user_1.id], {'message': 'test'})
response_1 = await communicator_1.receive_json_from(0.1)
await communicator_2.receive_nothing(0.1)
assert response_1['message'] == 'test'
await sync_to_async(broadcast_to_users)(
[user_1.id, user_2.id],
{'message': 'test'},
ignore_web_socket_id=web_socket_id_1
)
await communicator_1.receive_nothing(0.1)
response_2 = await communicator_2.receive_json_from(0.1)
assert response_2['message'] == 'test'
assert communicator_1.output_queue.qsize() == 0
assert communicator_2.output_queue.qsize() == 0
await communicator_1.disconnect()
await communicator_2.disconnect()
@pytest.mark.run(order=5)
@pytest.mark.asyncio
@pytest.mark.django_db(transaction=True)
async def test_broadcast_to_channel_group(data_fixture):
user_1, token_1 = data_fixture.create_user_and_token()
user_2, token_2 = data_fixture.create_user_and_token()
group_1 = data_fixture.create_group(users=[user_1, user_2])
database = data_fixture.create_database_application(group=group_1)
table_1 = data_fixture.create_database_table(user=user_1)
table_2 = data_fixture.create_database_table(user=user_2)
table_3 = data_fixture.create_database_table(database=database)
communicator_1 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_1}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_1.connect()
response_1 = await communicator_1.receive_json_from()
web_socket_id_1 = response_1['web_socket_id']
communicator_2 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_2}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_2.connect()
response_2 = await communicator_2.receive_json_from()
response_2['web_socket_id']
# We don't expect any communicator to receive anything because they didn't join a
# group.
await sync_to_async(broadcast_to_channel_group)(
f'table-{table_1.id}',
{'message': 'nothing2'}
)
await communicator_1.receive_nothing(0.1)
await communicator_2.receive_nothing(0.1)
# User 1 is not allowed to join table 2 so we don't expect any response.
await communicator_1.send_json_to({
'page': 'table',
'table_id': table_2.id
})
await communicator_1.receive_nothing(0.1)
# Because user 1 did not join table 2 we don't expect anything
await sync_to_async(broadcast_to_channel_group)(
f'table-{table_2.id}',
{'message': 'nothing'}
)
await communicator_1.receive_nothing(0.1)
await communicator_2.receive_nothing(0.1)
# Join the table page.
await communicator_1.send_json_to({
'page': 'table',
'table_id': table_1.id
})
response = await communicator_1.receive_json_from(0.1)
assert response['type'] == 'page_add'
assert response['page'] == 'table'
assert response['parameters']['table_id'] == table_1.id
await sync_to_async(broadcast_to_channel_group)(
f'table-{table_1.id}',
{'message': 'test'}
)
response_1 = await communicator_1.receive_json_from(0.1)
assert response_1['message'] == 'test'
await communicator_2.receive_nothing(0.1)
await communicator_1.send_json_to({
'page': 'table',
'table_id': table_3.id
})
response = await communicator_1.receive_json_from(0.1)
assert response['type'] == 'page_discard'
assert response['page'] == 'table'
assert response['parameters']['table_id'] == table_1.id
response = await communicator_1.receive_json_from(0.1)
assert response['type'] == 'page_add'
assert response['page'] == 'table'
assert response['parameters']['table_id'] == table_3.id
await communicator_2.send_json_to({
'page': 'table',
'table_id': table_3.id
})
response = await communicator_2.receive_json_from(0.1)
assert response['type'] == 'page_add'
assert response['page'] == 'table'
assert response['parameters']['table_id'] == table_3.id
await sync_to_async(broadcast_to_channel_group)(
f'table-{table_3.id}',
{'message': 'test2'}
)
response_1 = await communicator_1.receive_json_from(0.1)
assert response_1['message'] == 'test2'
response_1 = await communicator_2.receive_json_from(0.1)
assert response_1['message'] == 'test2'
await sync_to_async(broadcast_to_channel_group)(
f'table-{table_3.id}',
{'message': 'test3'},
web_socket_id_1
)
await communicator_1.receive_nothing(0.1)
response_1 = await communicator_2.receive_json_from(0.1)
assert response_1['message'] == 'test3'
await sync_to_async(broadcast_to_channel_group)(
f'table-{table_2.id}',
{'message': 'test4'}
)
await communicator_1.receive_nothing(0.1)
await communicator_2.receive_nothing(0.1)
assert communicator_1.output_queue.qsize() == 0
assert communicator_2.output_queue.qsize() == 0
await communicator_1.disconnect()
await communicator_2.disconnect()
@pytest.mark.run(order=6)
@pytest.mark.asyncio
@pytest.mark.django_db(transaction=True)
async def test_broadcast_to_group(data_fixture):
user_1, token_1 = data_fixture.create_user_and_token()
user_2, token_2 = data_fixture.create_user_and_token()
user_3, token_3 = data_fixture.create_user_and_token()
user_4, token_4 = data_fixture.create_user_and_token()
group_1 = data_fixture.create_group(users=[user_1, user_2, user_4])
group_2 = data_fixture.create_group(users=[user_2, user_3])
communicator_1 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_1}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_1.connect()
response_1 = await communicator_1.receive_json_from()
web_socket_id_1 = response_1['web_socket_id']
communicator_2 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_2}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_2.connect()
response_2 = await communicator_2.receive_json_from()
web_socket_id_2 = response_2['web_socket_id']
communicator_3 = WebsocketCommunicator(
application,
f'ws/core/?jwt_token={token_3}',
headers=[(b"origin", b"http://localhost")]
)
await communicator_3.connect()
await communicator_3.receive_json_from()
await database_sync_to_async(broadcast_to_group)(group_1.id, {'message': 'test'})
response_1 = await communicator_1.receive_json_from(0.1)
response_2 = await communicator_2.receive_json_from(0.1)
await communicator_3.receive_nothing(0.1)
assert response_1['message'] == 'test'
assert response_2['message'] == 'test'
await database_sync_to_async(broadcast_to_group)(
group_1.id,
{'message': 'test2'},
ignore_web_socket_id=web_socket_id_1
)
await communicator_1.receive_nothing(0.1)
response_2 = await communicator_2.receive_json_from(0.1)
await communicator_3.receive_nothing(0.1)
assert response_2['message'] == 'test2'
await database_sync_to_async(broadcast_to_group)(
group_2.id,
{'message': 'test3'},
ignore_web_socket_id=web_socket_id_2
)
await communicator_1.receive_nothing(0.1)
await communicator_2.receive_nothing(0.1)
await communicator_3.receive_json_from(0.1)
assert communicator_1.output_queue.qsize() == 0
assert communicator_2.output_queue.qsize() == 0
assert communicator_3.output_queue.qsize() == 0
await communicator_1.disconnect()
await communicator_2.disconnect()
await communicator_3.disconnect()

View file

@ -6,6 +6,7 @@
without select options.
* Fixed bug where is was not possible to create a relation to a table that has a single
select as primary field.
* Implemented real time collaboration.
## Released (2021-01-06)

View file

@ -11,6 +11,12 @@ services:
ports:
- 5432:5432
redis:
container_name: redis
image: redis:6.0
ports:
- 6379:6379
mjml:
container_name: baserow-demo-mjml
image: liminspace/mjml-tcpserver:latest
@ -26,6 +32,7 @@ services:
- 8000:8000
depends_on:
- db
- redis
- mjml
web-frontend:

View file

@ -11,6 +11,12 @@ services:
ports:
- 5432:5432
redis:
container_name: redis
image: redis:6.0
ports:
- 6379:6379
mjml:
container_name: mjml
image: liminspace/mjml-tcpserver:latest

View file

@ -0,0 +1,151 @@
# WebSocket API
The web socket API is used for real time collaboration. When a user makes a change, for
example when creating a new database application, then the backend broad casts a
message containing that application to all the users within the related group and who
are connected to the web socket. The web-frontend uses the web socket to update already
fetched data in real time when it has changed. This ensures that the user is always
working with the most recent data without reloading the page.
## Connecting
In order to connect to the web socket you first need to authenticate via the REST API
and obtain a JSON Web Token. After that you can connect to the following URL providing
your JWT as query parameter: `wss://api.baserow.io/ws/core/?jwt_token=YOUR_JWT_TOKEN`.
If you self host you need to replace `api.baserow.io` with our backend URL of course.
The web socket connection only receives messages of the groups that the authenticated
user belongs to. Below an example how to connect to the web socket in JavaScript.
```javascript
const socket = new WebSocket('wss://api.baserow.io/ws/core/?jwt_token=YOUR_JWT_TOKEN')
socket.onopen = () => {
console.log('The connection is made')
}
socket.onmessage = (message) => {
console.log('Received', message)
}
```
## Messages
Broad casted messages containing real time updates are always in JSON format and they
will always contain a key named `type` which indicates what has changed. For example
`create_application` could be the value of the type and in this case an additional key
`application` is provided containing the newly created application in serialized form.
Below you will find an example of a message when another user has created a database
application in a group that the receiver also belongs to. There are of course many
event types, they are described at the bottom of this page.
```json
{
"type": "application_created",
"application": {
"id": 123,
"name": "Test",
"order": 8,
"type": "database",
"group": {
"id": 1,
"name": "Bram's group"
},
"tables": []
}
}
```
## Web Socket ID
After making the connection you will receive an `authentication` message indicating if
the JWT token authentication was successful. If so, the message will also contain a
`web_socket_id`. When making a change via the API, for example creating a new
application, you can provide that id as header `WebSocketId: {YOUR_WEB_SOCKET_ID}` to
exclude yourself from the message containing the change that has already been executed.
Below you will find such an example authentication message including `web_socket_id`
and an example HTTP request containing the `WebSocketId` header.
```json
{
"type": "authentication",
"success": true,
"web_socket_id": "934254ab-0c87-4dbc-9d71-7eeab029296c"
}
```
```
PATCH /api/applications/1/
Host: api.baserow.io
Content-Type: application/json
WebSocketId: 934254ab-0c87-4dbc-9d71-7eeab029296c
{
"name": "Test",
}
```
## Subscribe to a page
A user will receive all the core messages related to groups and application by default,
but we also have messages related to certain pages, for example to the table page.
Because we don't want to cause an overload of messages you can subscribe to a page. If
successful you will only receive messages related to that page and you will
automatically be unsubscribed as soon as you subscribe to another page.
### Table page
At the moment there is only one page, which is the table page and it expects a
`table_id` parameter. Below you will find an example how to subscribe to that page.
```json
{
"page": "table",
"table_id": 1
}
```
Once successfully subscribed you will receive a confirmation message indicating that
you are subscribed to the page.
```json
{
"type": "page_add",
"page": "table",
"parameters": {
"table_id": 1
}
}
```
## Messages types
* `authentication`
* `page_add`
* `page_discard`
* `group_created`
* `group_updated`
* `group_deleted`
* `application_created`
* `application_updated`
* `application_deleted`
### Database message types
* `table_created`
* `table_updated`
* `table_deleted`
* `field_created`
* `field_updated`
* `field_deleted`
* `row_created`
* `row_updated`
* `row_deleted`
* `view_created`
* `view_updated`
* `view_deleted`
* `view_filter_created`
* `view_filter_updated`
* `view_filter_deleted`
* `view_sort_created`
* `view_sort_updated`
* `view_sort_deleted`
* `grid_view_field_options_updated`

View file

@ -21,8 +21,11 @@ server {
location / {
proxy_pass http://127.0.0.1:8000;
proxy_http_version 1.1;
proxy_pass_request_headers on;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
}
}

View file

@ -1,17 +1,24 @@
[supervisord]
nodaemon = true
[program:gunicorn]
environment =
DJANGO_SETTINGS_MODULE="baserow.config.settings.base",
DATABASE_HOST="localhost",
DATABASE_PASSWORD="yourpassword",
SECRET_KEY="SOMETHING_SECRET",
DJANGO_SETTINGS_MODULE='baserow.config.settings.base',
DATABASE_HOST='localhost',
DATABASE_PASSWORD='yourpassword',
SECRET_KEY='SOMETHING_SECRET',
PRIVATE_BACKEND_URL='http://localhost:8000',
PUBLIC_WEB_FRONTEND_URL='https://FRONTEND_DOMAIN',
PUBLIC_BACKEND_URL='https://BACKEND_DOMAIN',
MEDIA_ROOT='/baserow/media',
MEDIA_URL='https://MEDIA_DOMAIN'
command = /baserow/backend/env/bin/gunicorn -w 5 -b 127.0.0.1:8000 baserow.config.wsgi:application --log-level=debug --chdir=/baserow
REDIS_HOST='localhost'
[program:gunicorn]
command = /baserow/backend/env/bin/gunicorn -w 5 -b 127.0.0.1:8000 -k uvicorn.workers.UvicornWorker baserow.config.asgi:application --log-level=debug --chdir=/baserow
stdout_logfile=/var/log/baserow/backend.log
stderr_logfile=/var/log/baserow/backend.error
[program:worker]
directory=/baserow
command = /baserow/backend/env/bin/celery -A baserow worker -l INFO
stdout_logfile=/var/log/baserow/worker.log
stderr_logfile=/var/log/baserow/worker.error

View file

@ -47,6 +47,22 @@ Make sure that you use a secure password instead of `yourpassword`! Also take ca
you use the password you've chosen in any upcoming commands that need the PostgreSQL
baserow user password.
## Install & Setup Redis
Baserow uses Redis for asynchronous tasks and the real time collaboration. You can
install Redis with the following commands.
```
$ sudo add-apt-repository ppa:chris-lea/redis-server
$ sudo apt update
$ sudo apt install redis-server -y
$ sed -i 's/supervised no/supervised systemd/g' /etc/redis/redis.conf
$ sudo systemctl enable --now redis-server
$ sudo systemctl restart redis.service
```
By default Redis is not publicly accessible, so there is no need to setup a password.
## Install Baserow
In this section, we will install Baserow itself. We will need a new user called
@ -172,7 +188,7 @@ commands:
# Prepare for creating the database schema
$ source backend/env/bin/activate
$ export DJANGO_SETTINGS_MODULE='baserow.config.settings.base'
$ export DATABASE_PASSWORD="yourpassword"
$ export DATABASE_PASSWORD='yourpassword'
$ export DATABASE_HOST="localhost"
# Create database schema
@ -205,13 +221,14 @@ You will need to edit the `baserow-frontend.conf` and `baserow-backend.conf` fil
variables. You will need to change at least the following variables which can be found
in the `environment =` section.
**Web frontend and backend**
- `PUBLIC_WEB_FRONTEND_URL`: The URL under which your frontend can be reached from the
internet (HTTP or HTTPS)
internet.
- `PUBLIC_BACKEND_URL`: The URL under which your backend can be reached from the
internet (HTTP or HTTPS)
internet.
- `MEDIA_URL`: The URL under which your media files can be reached from the internet.
**Backend**
- `SECRET_KEY`: The secret key that is used to generate tokens and other random
strings. You can generate one with the following commands:
```bash
@ -219,6 +236,7 @@ internet (HTTP or HTTPS)
```
- `DATABASE_PASSWORD`: The password of the `baserow` database user
- `DATABASE_HOST`: The host computer that runs the database (usually `localhost`)
- `REDIS_HOST`: The host computer that runs the caching server (usually `localhost`)
After modifying these files you need to make supervisor reread the files and apply the
changes.
@ -272,3 +290,28 @@ $ supervisorctl restart nginx
You now have a full installation of Baserow, which will keep the Front- & Backend
running even if there is an unforeseen termination of them.
## Updating existing installation to the latest version
If you already have Baserow installed on your server and you want to update to the
latest version then you can execute the following commands. This only works if there
aren't any additional instructions in the previous release blog posts.
```
$ cd /baserow
$ git pull
$ source backend/env/bin/activate
$ pip3 install -e ./backend
$ export DJANGO_SETTINGS_MODULE='baserow.config.settings.base'
$ export DATABASE_PASSWORD='yourpassword'
$ export DATABASE_HOST='localhost'
$ baserow migrate
$ deactivate
$ cd web-frontend
$ yarn install
$ ./node_modules/nuxt/bin/nuxt.js build --config-file config/nuxt.config.demo.js
$ supervisorctl reread
$ supervisorctl update
$ supervisorctl restart all
```

View file

@ -16,6 +16,8 @@ New to Baserow? This is the place to start.
concepts before using Baserow.
* [API](./getting-started/api.md): An introduction to the REST API and information
about API resources.
* [WebSocket API](./getting-started/web-socket-api.md): An introduction to the
WebSockets API which is used to broad cast real time updates.
* [Database plugin](./getting-started/database-plugin.md) An introduction to the
database plugin which is installed by default.

View file

@ -23,6 +23,10 @@ new_tab "Backend" \
"docker exec -it backend bash" \
"python src/baserow/manage.py runserver 0.0.0.0:8000"
new_tab "Backend" \
"docker exec -it backend bash" \
"watchmedo auto-restart --directory=./ --pattern=*.py --recursive -- celery -A baserow worker -l INFO"
new_tab "Web frontend" \
"docker exec -it web-frontend bash" \
"yarn run dev"

View file

@ -141,4 +141,10 @@ export class ApplicationType extends Registerable {
*
*/
clearChildrenSelected(application) {}
/**
* Before the application values are updated, they can be modified here. This
* might be needed because providing certain values could break the update.
*/
prepareForStoreUpdate(application, data) {}
}

View file

@ -29,6 +29,10 @@
border-radius: 100%;
}
.alert__icon-loading {
margin: 9px auto 0 auto;
}
.alert__title {
font-size: 14px;
font-weight: 700;

View file

@ -0,0 +1,15 @@
<template>
<div class="alert alert--simple alert--with-shadow alert--has-icon">
<div class="alert__icon">
<div class="loading alert__icon-loading"></div>
</div>
<div class="alert__title">Reconnecting</div>
<p class="alert__content">Reconnecting with server.</p>
</div>
</template>
<script>
export default {
name: 'ConnectingNotification',
}
</script>

View file

@ -1,5 +1,6 @@
<template>
<div class="notifications">
<ConnectingNotification v-if="connecting"></ConnectingNotification>
<Notification
v-for="notification in notifications"
:key="notification.id"
@ -12,12 +13,14 @@
import { mapState } from 'vuex'
import Notification from '@baserow/modules/core/components/notifications/Notification'
import ConnectingNotification from '@baserow/modules/core/components/notifications/ConnectingNotification'
export default {
name: 'Notifications',
components: { Notification },
components: { Notification, ConnectingNotification },
computed: {
...mapState({
connecting: (state) => state.notification.connecting,
notifications: (state) => state.notification.items,
}),
},

View file

@ -115,6 +115,13 @@ export default {
nameAbbreviation: 'auth/getNameAbbreviation',
}),
},
mounted() {
// Connect to the web socket so we can start receiving real time updates.
this.$realtime.connect()
},
beforeDestroy() {
this.$realtime.disconnect()
},
methods: {
logoff() {
this.$store.dispatch('auth/logoff')

View file

@ -84,6 +84,9 @@ export default function DatabaseModule(options) {
this.appendPlugin({
src: path.resolve(__dirname, 'plugins/clientHandler.js'),
})
this.appendPlugin({
src: path.resolve(__dirname, 'plugins/realTimeHandler.js'),
})
this.extendRoutes((configRoutes) => {
// Remove all the routes created by nuxt.

View file

@ -209,6 +209,10 @@ export default function ({ store, app }, inject) {
const token = store.getters['auth/token']
config.headers.Authorization = `JWT ${token}`
}
if (store.getters['auth/webSocketId'] !== null) {
const webSocketId = store.getters['auth/webSocketId']
config.headers.WebSocketId = webSocketId
}
return config
})

View file

@ -0,0 +1,204 @@
import { isSecureURL } from '@baserow/modules/core/utils/string'
export class RealTimeHandler {
constructor(context) {
this.context = context
this.socket = null
this.connected = false
this.reconnect = false
this.reconnectTimeout = null
this.events = {}
this.attempts = 0
this.page = null
this.pageParameters = {}
this.subscribedToPage = true
this.registerCoreEvents()
}
/**
* Creates a new connection with to the web socket so that real time updates can be
* received.
*/
connect(reconnect = true) {
this.reconnect = reconnect
const token = this.context.store.getters['auth/token']
// The web socket url is the same as the PUBLIC_BACKEND_URL apart from the
// protocol.
const rawUrl = this.context.app.$env.PUBLIC_BACKEND_URL
const url = new URL(rawUrl)
url.protocol = isSecureURL(rawUrl) ? 'wss:' : 'ws:'
url.pathname = '/ws/core/'
this.socket = new WebSocket(`${url}?jwt_token=${token}`)
this.socket.onopen = () => {
this.context.store.dispatch('notification/setConnecting', false)
this.connected = true
this.attempts = 0
// If the client needs to be subscribed to a page we can do that directly
// after connecting.
if (!this.subscribedToPage) {
this.subscribeToPage()
}
}
/**
* The received messages are always JSON so we need to the parse it, extract the
* type and call the correct event.
*/
this.socket.onmessage = (message) => {
let data = {}
try {
data = JSON.parse(message.data)
} catch {
return
}
if (
Object.prototype.hasOwnProperty.call(data, 'type') &&
Object.prototype.hasOwnProperty.call(this.events, data.type)
) {
this.events[data.type](this.context, data)
}
}
/**
* When the connection closes we want to reconnect immediately because we don't
* want to miss any important real time updates. After the first attempt we want to
* delay retry with 5 seconds.
*/
this.socket.onclose = () => {
this.connected = false
// By default the user not subscribed to a page a.k.a `null`, so if the current
// page is already null we can mark it as subscribed.
this.subscribedToPage = this.page === null
// Automatically reconnect if the socket closes.
if (this.reconnect) {
this.attempts++
this.context.store.dispatch('notification/setConnecting', true)
this.reconnectTimeout = setTimeout(
() => {
this.connect(true)
},
// After the first try, we want to try again every 5 seconds.
this.attempts > 0 ? 5000 : 0
)
}
}
}
/**
* Subscribes the client to a given page. After subscribing the client will
* receive updated related to that page. This is for example used when a user
* opens a table page.
*/
subscribe(page, parameters) {
this.page = page
this.pageParameters = parameters
this.subscribedToPage = false
// If the client is already connected we can directly subscribe to the page.
if (this.connected) {
this.subscribeToPage()
}
}
/**
* Sends a request to the real time server that updates for a certain page +
* parameters must be received.
*/
subscribeToPage() {
this.socket.send(
JSON.stringify({
page: this.page === null ? '' : this.page,
...this.pageParameters,
})
)
this.subscribedToPage = true
}
/**
* Disconnects the socket and resets all the variables. The can be used when
* navigating to another page that doesn't require updates.
*/
disconnect() {
if (!this.connected) {
return
}
this.context.store.dispatch('notification/setConnecting', false)
clearTimeout(this.reconnectTimeout)
this.reconnect = false
this.attempts = 0
this.connected = false
this.socket.close()
}
/**
* Registers a new event with the event registry.
*/
registerEvent(type, callback) {
this.events[type] = callback
}
/**
* Registers all the core event handlers, which is for the groups and applications.
*/
registerCoreEvents() {
// When the authentication is successful we want to store the web socket id in
// auth store. Every AJAX request will include the web socket id as header, this
// way the backend knows that this client does not has to receive the event
// because we already know about the change.
this.registerEvent('authentication', ({ store }, data) => {
store.dispatch('auth/setWebSocketId', data.web_socket_id)
})
this.registerEvent('group_created', ({ store }, data) => {
store.dispatch('group/forceCreate', data.group)
})
this.registerEvent('group_updated', ({ store }, data) => {
const group = store.getters['group/get'](data.group_id)
if (group !== undefined) {
store.dispatch('group/forceUpdate', { group, values: data.group })
}
})
this.registerEvent('group_deleted', ({ store }, data) => {
const group = store.getters['group/get'](data.group_id)
if (group !== undefined) {
store.dispatch('group/forceDelete', group)
}
})
this.registerEvent('application_created', ({ store }, data) => {
store.dispatch('application/forceCreate', { data: data.application })
})
this.registerEvent('application_updated', ({ store }, data) => {
const application = store.getters['application/get'](data.application_id)
if (application !== undefined) {
store.dispatch('application/forceUpdate', {
application,
data: data.application,
})
}
})
this.registerEvent('application_deleted', ({ store }, data) => {
const application = store.getters['application/get'](data.application_id)
if (application !== undefined) {
store.dispatch('application/forceDelete', application)
}
})
}
}
export default function (context, inject) {
inject('realtime', new RealTimeHandler(context))
}

View file

@ -134,6 +134,12 @@ export const actions = {
group.id,
postData
)
dispatch('forceCreate', { data })
},
/**
* Forcefully create an item in the store without making a call to the server.
*/
forceCreate({ commit }, { data }) {
populateApplication(data, this.$registry)
commit('ADD_ITEM', data)
},
@ -145,12 +151,22 @@ export const actions = {
application.id,
values
)
// Create a dict with only the values we want to update.
const update = Object.keys(values).reduce((result, key) => {
result[key] = data[key]
return result
}, {})
commit('UPDATE_ITEM', { id: application.id, values: update })
dispatch('forceUpdate', { application, data: update })
},
/**
* Forcefully update an item in the store without making a call to the server.
*/
forceUpdate({ commit }, { application, data }) {
const type = this.$registry.get('application', application.type)
data = type.prepareForStoreUpdate(application, data)
commit('UPDATE_ITEM', { id: application.id, values: data })
},
/**
* Deletes an existing application.
@ -158,17 +174,23 @@ export const actions = {
async delete({ commit, dispatch, getters }, application) {
try {
await ApplicationService(this.$client).delete(application.id)
const type = this.$registry.get('application', application.type)
type.delete(application, this)
commit('DELETE_ITEM', application.id)
dispatch('forceDelete', application)
} catch (error) {
if (error.response && error.response.status === 404) {
commit('DELETE_ITEM', application.id)
dispatch('forceDelete', application)
} else {
throw error
}
}
},
/**
* Forcefully delete an item in the store without making a call to the server.
*/
forceDelete({ commit }, application) {
const type = this.$registry.get('application', application.type)
type.delete(application, this)
commit('DELETE_ITEM', application.id)
},
/**
* Select an application.
*/

View file

@ -8,6 +8,7 @@ export const state = () => ({
refreshing: false,
token: null,
user: null,
webSocketId: null,
})
export const mutations = {
@ -23,6 +24,9 @@ export const mutations = {
SET_REFRESHING(state, refreshing) {
state.refreshing = refreshing
},
SET_WEB_SOCKET_ID(state, id) {
state.webSocketId = id
},
}
export const actions = {
@ -102,6 +106,14 @@ export const actions = {
commit('SET_REFRESHING', false)
}, (getters.tokenExpireSeconds - 30) * 1000)
},
/**
* The web socket id is generated by the backend when connecting to the real time
* updates web socket. This id will be added to each AJAX request so the backend
* knows not to send any real time changes to the sender.
*/
setWebSocketId({ commit }, webSocketId) {
commit('SET_WEB_SOCKET_ID', webSocketId)
},
}
export const getters = {
@ -114,6 +126,9 @@ export const getters = {
token(state) {
return state.token
},
webSocketId(state) {
return state.webSocketId
},
getName(state) {
return state.user ? state.user.first_name : ''
},

View file

@ -107,9 +107,15 @@ export const actions = {
/**
* Creates a new group with the given values.
*/
async create({ commit }, values) {
async create({ commit, dispatch }, values) {
const { data } = await GroupService(this.$client).create(values)
commit('ADD_ITEM', data)
dispatch('forceCreate', data)
},
/**
* Forcefully create an item in the store without making a call to the server.
*/
forceCreate({ commit }, values) {
commit('ADD_ITEM', values)
},
/**
* Updates the values of the group with the provided id.
@ -121,7 +127,13 @@ export const actions = {
result[key] = data[key]
return result
}, {})
commit('UPDATE_ITEM', { id: group.id, values: update })
dispatch('forceUpdate', { group, values: update })
},
/**
* Forcefully update the item in the store without making a call to the server.
*/
forceUpdate({ commit }, { group, values }) {
commit('UPDATE_ITEM', { id: group.id, values })
},
/**
* Deletes an existing group with the provided id.
@ -141,7 +153,7 @@ export const actions = {
}
},
/**
* Forcibly remove the group from the items without calling the server. The
* Forcefully remove the group from the items without calling the server. The
* delete event is also called for all the applications that are in the
* group. This is needed so that we can redirect the user to another page if for
* example a Table is open that has been deleted because the group has been deleted.

View file

@ -1,6 +1,7 @@
import { uuid } from '@baserow/modules/core/utils/string'
export const state = () => ({
connecting: false,
items: [],
})
@ -12,6 +13,9 @@ export const mutations = {
const index = state.items.indexOf(notification)
state.items.splice(index, 1)
},
SET_CONNECTING(state, value) {
state.connecting = value
},
}
export const actions = {
@ -41,6 +45,9 @@ export const actions = {
remove({ commit }, notification) {
commit('REMOVE', notification)
},
setConnecting({ commit }, value) {
commit('SET_CONNECTING', value)
},
}
export const getters = {}

View file

@ -85,4 +85,16 @@ export class DatabaseApplicationType extends ApplicationType {
}
})
}
/**
* It is not possible to update the tables by updating the application. In fact,
* providing the tables as value could break the current table state. That is why
* we remove it here.
*/
prepareForStoreUpdate(application, data) {
if (Object.prototype.hasOwnProperty.call(data, 'tables')) {
delete data.tables
}
return data
}
}

View file

@ -76,6 +76,11 @@ export default {
: this.fields.find((f) => f.id === this.fieldId)
},
},
watch: {
value(value) {
this.setCopy(value)
},
},
created() {
this.setCopy(this.value)
},

View file

@ -18,6 +18,12 @@ export default {
copy: null,
}
},
watch: {
value(value) {
this.copy = value
clearTimeout(delayTimeout)
},
},
created() {
this.copy = this.value
},

View file

@ -211,6 +211,16 @@ export default {
this.$store.dispatch('table/unselect')
next()
},
beforeMount() {
this.$bus.$on('table-refresh', this.refresh)
},
mounted() {
this.$realtime.subscribe('table', { table_id: this.table.id })
},
beforeDestroy() {
this.$bus.$off('table-refresh', this.refresh)
this.$realtime.subscribe(null)
},
methods: {
getViewComponent(view) {
const type = this.$registry.get('view', view.type)

View file

@ -38,6 +38,8 @@ import viewStore from '@baserow/modules/database/store/view'
import fieldStore from '@baserow/modules/database/store/field'
import gridStore from '@baserow/modules/database/store/view/grid'
import { registerRealtimeEvents } from '@baserow/modules/database/realtime'
export default ({ store, app }) => {
store.registerModule('table', tableStore)
store.registerModule('view', viewStore)
@ -72,4 +74,6 @@ export default ({ store, app }) => {
app.$registry.register('importer', new CSVImporterType())
app.$registry.register('importer', new PasteImporterType())
app.$registry.register('settings', new APITokenSettingsType())
registerRealtimeEvents(app.$realtime)
}

View file

@ -0,0 +1,222 @@
import { clone } from '@baserow/modules/core/utils/object'
/**
* Registers the real time events related to the database module. When a message comes
* in, the state of the stores will be updated to match the latest update. In some
* cases some other events like refreshing all the data needs to be triggered.
*/
export const registerRealtimeEvents = (realtime) => {
realtime.registerEvent('table_created', ({ store }, data) => {
const database = store.getters['application/get'](data.table.database_id)
if (database !== undefined) {
store.dispatch('table/forceCreate', { database, data: data.table })
}
})
realtime.registerEvent('table_updated', ({ store }, data) => {
const database = store.getters['application/get'](data.table.database_id)
if (database !== undefined) {
const table = database.tables.find((table) => table.id === data.table.id)
if (table !== undefined) {
store.dispatch('table/forceUpdate', {
database,
table,
values: data.table,
})
}
}
})
realtime.registerEvent('table_deleted', ({ store }, data) => {
const database = store.getters['application/get'](data.database_id)
if (database !== undefined) {
const table = database.tables.find((table) => table.id === data.table_id)
if (table !== undefined) {
store.dispatch('table/forceDelete', { database, table })
}
}
})
realtime.registerEvent('field_created', ({ store }, data) => {
const table = store.getters['table/getSelected']
if (table !== undefined && table.id === data.field.table_id) {
store.dispatch('field/forceCreate', { table, values: data.field })
}
})
realtime.registerEvent('field_updated', ({ store, app }, data) => {
const field = store.getters['field/get'](data.field.id)
if (field !== undefined) {
const oldField = clone(field)
store.dispatch('field/forceUpdate', {
field,
oldField,
data: data.field,
})
if (store.getters['table/getSelectedId'] === data.field.table_id) {
app.$bus.$emit('table-refresh')
}
}
})
realtime.registerEvent('field_deleted', ({ store, app }, data) => {
const field = store.getters['field/get'](data.field_id)
if (field !== undefined) {
store.dispatch('field/forceDelete', field)
if (store.getters['table/getSelectedId'] === data.table_id) {
app.$bus.$emit('table-refresh')
}
}
})
realtime.registerEvent('row_created', (context, data) => {
const { app } = context
for (const viewType of Object.values(app.$registry.getAll('view'))) {
viewType.rowCreated(context, data.table_id, data.row, data.before_row_id)
}
})
realtime.registerEvent('row_updated', (context, data) => {
const { app } = context
for (const viewType of Object.values(app.$registry.getAll('view'))) {
viewType.rowUpdated(context, data.table_id, data.row)
}
})
realtime.registerEvent('row_deleted', (context, data) => {
const { app } = context
for (const viewType of Object.values(app.$registry.getAll('view'))) {
viewType.rowDeleted(context, data.table_id, data.row_id)
}
})
realtime.registerEvent('view_created', ({ store }, data) => {
if (store.getters['table/getSelectedId'] === data.view.table_id) {
store.dispatch('view/forceCreate', { data: data.view })
}
})
realtime.registerEvent('view_updated', ({ store, app }, data) => {
const view = store.getters['view/get'](data.view.id)
if (view !== undefined) {
const filterType = view.filter_type
const filtersDisabled = view.filters_disabled
store.dispatch('view/forceUpdate', { view, values: data.view })
if (
store.getters['view/getSelectedId'] === view.id &&
(filterType !== data.view.filter_type ||
filtersDisabled !== data.view.filters_disabled)
) {
app.$bus.$emit('table-refresh')
}
}
})
realtime.registerEvent('view_deleted', ({ store }, data) => {
const view = store.getters['view/get'](data.view_id)
if (view !== undefined) {
store.dispatch('view/forceDelete', view)
}
})
realtime.registerEvent('view_filter_created', ({ store, app }, data) => {
const view = store.getters['view/get'](data.view_filter.view)
if (view !== undefined) {
store.dispatch('view/forceCreateFilter', {
view,
values: data.view_filter,
})
if (store.getters['view/getSelectedId'] === view.id) {
app.$bus.$emit('table-refresh')
}
}
})
realtime.registerEvent('view_filter_updated', ({ store, app }, data) => {
const view = store.getters['view/get'](data.view_filter.view)
if (view !== undefined) {
const filter = view.filters.find(
(filter) => filter.id === data.view_filter.id
)
if (filter !== undefined) {
store.dispatch('view/forceUpdateFilter', {
filter,
values: data.view_filter,
})
if (store.getters['view/getSelectedId'] === view.id) {
app.$bus.$emit('table-refresh')
}
}
}
})
realtime.registerEvent('view_filter_deleted', ({ store, app }, data) => {
const view = store.getters['view/get'](data.view_id)
if (view !== undefined) {
const filter = view.filters.find(
(filter) => filter.id === data.view_filter_id
)
if (filter !== undefined) {
store.dispatch('view/forceDeleteFilter', { view, filter })
if (store.getters['view/getSelectedId'] === view.id) {
app.$bus.$emit('table-refresh')
}
}
}
})
realtime.registerEvent('view_sort_created', ({ store, app }, data) => {
const view = store.getters['view/get'](data.view_sort.view)
if (view !== undefined) {
store.dispatch('view/forceCreateSort', {
view,
values: data.view_sort,
})
if (store.getters['view/getSelectedId'] === view.id) {
app.$bus.$emit('table-refresh')
}
}
})
realtime.registerEvent('view_sort_updated', ({ store, app }, data) => {
const view = store.getters['view/get'](data.view_sort.view)
if (view !== undefined) {
const sort = view.sortings.find((sort) => sort.id === data.view_sort_id)
if (sort !== undefined) {
store.dispatch('view/forceUpdateSort', {
sort,
values: data.view_sort,
})
if (store.getters['view/getSelectedId'] === view.id) {
app.$bus.$emit('table-refresh')
}
}
}
})
realtime.registerEvent('view_sort_deleted', ({ store, app }, data) => {
const view = store.getters['view/get'](data.view_id)
if (view !== undefined) {
const sort = view.sortings.find((sort) => sort.id === data.view_sort_id)
if (sort !== undefined) {
store.dispatch('view/forceDeleteSort', { view, sort })
if (store.getters['view/getSelectedId'] === view.id) {
app.$bus.$emit('table-refresh')
}
}
}
})
realtime.registerEvent(
'grid_view_field_options_updated',
({ store }, data) => {
const view = store.getters['view/get'](data.grid_view_id)
if (view !== null && view.id === store.getters['view/getSelectedId']) {
store.dispatch(
'view/grid/forceUpdateAllFieldOptions',
data.grid_view_field_options
)
}
}
)
}

View file

@ -107,7 +107,7 @@ export const actions = {
* Creates a new field with the provided type for the given table.
*/
async create(context, { type, table, values }) {
const { commit } = context
const { dispatch } = context
if (Object.prototype.hasOwnProperty.call(values, 'type')) {
throw new Error(
@ -120,13 +120,19 @@ export const actions = {
throw new Error(`A field with type "${type}" doesn't exist.`)
}
const fieldType = this.$registry.get('field', type)
const postData = clone(values)
postData.type = type
let { data } = await FieldService(this.$client).create(table.id, postData)
data = populateField(data, this.$registry)
const { data } = await FieldService(this.$client).create(table.id, postData)
dispatch('forceCreate', { table, values: data })
},
/**
* Forcefully create a new field without making a call to the backend.
*/
async forceCreate(context, { table, values }) {
const { commit } = context
const fieldType = this.$registry.get('field', values.type)
const data = populateField(values, this.$registry)
commit('ADD_ITEM', data)
// Call the field created event on all the registered views because they might
@ -140,7 +146,7 @@ export const actions = {
* Updates the values of the provided field.
*/
async update(context, { field, type, values }) {
const { dispatch, commit } = context
const { dispatch } = context
if (Object.prototype.hasOwnProperty.call(values, 'type')) {
throw new Error(
@ -153,13 +159,19 @@ export const actions = {
throw new Error(`A field with type "${type}" doesn't exist.`)
}
const fieldType = this.$registry.get('field', type)
const oldField = clone(field)
const postData = clone(values)
postData.type = type
let { data } = await FieldService(this.$client).update(field.id, postData)
const { data } = await FieldService(this.$client).update(field.id, postData)
await dispatch('forceUpdate', { field, oldField, data })
},
/**
* Forcefully update an existing field without making a request to the backend.
*/
async forceUpdate(context, { field, oldField, data }) {
const { commit, dispatch } = context
const fieldType = this.$registry.get('field', data.type)
data = populateField(data, this.$registry)
if (field.primary) {

View file

@ -77,10 +77,16 @@ export const actions = {
initialData,
firstRowHeader
)
commit('ADD_ITEM', { database, table: data })
dispatch('forceCreate', { database, data })
return data
},
/**
* Forcefully create an item in the store without making a call to the server.
*/
forceCreate({ commit }, { database, data }) {
commit('ADD_ITEM', { database, table: data })
},
/**
* Update an existing table of the provided database with the provided tables.
*/
@ -91,7 +97,14 @@ export const actions = {
result[key] = data[key]
return result
}, {})
commit('UPDATE_ITEM', { database, table, values: update })
dispatch('forceUpdate', { database, table, values: update })
},
/**
* Forcefully update an item in the store without making a call to the server.
*/
forceUpdate({ commit }, { database, table, values }) {
commit('UPDATE_ITEM', { database, table, values })
},
/**
* Deletes an existing application.

View file

@ -216,6 +216,12 @@ export const actions = {
postData.type = type
const { data } = await ViewService(this.$client).create(table.id, postData)
dispatch('forceCreate', { data })
},
/**
* Forcefully create a new view without making a request to the server.
*/
forceCreate({ commit }, { data }) {
populateView(data, this.$registry)
commit('ADD_ITEM', data)
},
@ -232,16 +238,22 @@ export const actions = {
}
})
commit('UPDATE_ITEM', { id: view.id, values: newValues })
dispatch('forceUpdate', { view, values: newValues })
try {
await ViewService(this.$client).update(view.id, values)
commit('SET_ITEM_LOADING', { view, value: false })
} catch (error) {
commit('UPDATE_ITEM', { id: view.id, values: oldValues })
dispatch('forceUpdate', { view, values: oldValues })
throw error
}
},
/**
* Forcefully update an existing view without making a request to the backend.
*/
forceUpdate({ commit }, { view, values }) {
commit('UPDATE_ITEM', { id: view.id, values })
},
/**
* Deletes an existing view with the provided id. A request to the server is first
* made and after that it will be deleted from the store.
@ -361,11 +373,19 @@ export const actions = {
return { filter }
},
/**
* Forcefully create a new view filterwithout making a request to the backend.
*/
forceCreateFilter({ commit }, { view, values }) {
const filter = _.assign({}, values)
populateFilter(filter)
commit('ADD_FILTER', { view, filter })
},
/**
* Updates the filter values in the store right away. If the API call fails the
* changes will be undone.
*/
async updateFilter({ commit }, { filter, values }) {
async updateFilter({ dispatch, commit }, { filter, values }) {
commit('SET_FILTER_LOADING', { filter, value: true })
const oldValues = {}
@ -377,32 +397,44 @@ export const actions = {
}
})
commit('UPDATE_FILTER', { filter, values: newValues })
dispatch('forceUpdateFilter', { filter, values: newValues })
try {
await FilterService(this.$client).update(filter.id, values)
commit('SET_FILTER_LOADING', { filter, value: false })
} catch (error) {
commit('UPDATE_FILTER', { filter, values: oldValues })
dispatch('forceUpdateFilter', { filter, values: oldValues })
commit('SET_FILTER_LOADING', { filter, value: false })
throw error
}
},
/**
* Forcefully update an existing view filter without making a request to the backend.
*/
forceUpdateFilter({ commit }, { filter, values }) {
commit('UPDATE_FILTER', { filter, values })
},
/**
* Deletes an existing filter. A request to the server will be made first and
* after that it will be deleted.
*/
async deleteFilter({ commit }, { view, filter }) {
async deleteFilter({ dispatch, commit }, { view, filter }) {
commit('SET_FILTER_LOADING', { filter, value: true })
try {
await FilterService(this.$client).delete(filter.id)
commit('DELETE_FILTER', { view, id: filter.id })
dispatch('forceDeleteFilter', { view, filter })
} catch (error) {
commit('SET_FILTER_LOADING', { filter, value: false })
throw error
}
},
/**
* Forcefully delete an existing field without making a request to the backend.
*/
forceDeleteFilter({ commit }, { view, filter }) {
commit('DELETE_FILTER', { view, id: filter.id })
},
/**
* When a field is deleted the related filters are also automatically deleted in the
* backend so they need to be removed here.
@ -445,11 +477,19 @@ export const actions = {
return { sort }
},
/**
* Forcefully create a new view sorting without making a request to the backend.
*/
forceCreateSort({ commit }, { view, values }) {
const sort = _.assign({}, values)
populateSort(sort)
commit('ADD_SORT', { view, sort })
},
/**
* Updates the sort values in the store right away. If the API call fails the
* changes will be undone.
*/
async updateSort({ commit }, { sort, values }) {
async updateSort({ dispatch, commit }, { sort, values }) {
commit('SET_SORT_LOADING', { sort, value: true })
const oldValues = {}
@ -461,32 +501,44 @@ export const actions = {
}
})
commit('UPDATE_SORT', { sort, values: newValues })
dispatch('forceUpdateSort', { sort, values: newValues })
try {
await SortService(this.$client).update(sort.id, values)
commit('SET_SORT_LOADING', { sort, value: false })
} catch (error) {
commit('UPDATE_SORT', { sort, values: oldValues })
dispatch('forceUpdateSort', { sort, values: oldValues })
commit('SET_SORT_LOADING', { sort, value: false })
throw error
}
},
/**
* Forcefully update an existing view sort without making a request to the backend.
*/
forceUpdateSort({ commit }, { sort, values }) {
commit('UPDATE_SORT', { sort, values })
},
/**
* Deletes an existing sort. A request to the server will be made first and
* after that it will be deleted.
*/
async deleteSort({ commit }, { view, sort }) {
async deleteSort({ dispatch, commit }, { view, sort }) {
commit('SET_SORT_LOADING', { sort, value: true })
try {
await SortService(this.$client).delete(sort.id)
commit('DELETE_SORT', { view, id: sort.id })
dispatch('forceDeleteSort', { view, sort })
} catch (error) {
commit('SET_SORT_LOADING', { sort, value: false })
throw error
}
},
/**
* Forcefully delete an existing view sort without making a request to the backend.
*/
forceDeleteSort({ commit }, { view, sort }) {
commit('DELETE_SORT', { view, id: sort.id })
},
/**
* When a field is deleted the related sortings are also automatically deleted in the
* backend so they need to be removed here.
@ -536,6 +588,12 @@ export const getters = {
hasSelected(state) {
return Object.prototype.hasOwnProperty.call(state.selected, '_')
},
getSelected(state) {
return state.selected
},
getSelectedId(state) {
return state.selected.id || 0
},
isLoaded(state) {
return state.loaded
},

View file

@ -6,7 +6,10 @@ import BigNumber from 'bignumber.js'
import { uuid } from '@baserow/modules/core/utils/string'
import GridService from '@baserow/modules/database/services/view/grid'
import RowService from '@baserow/modules/database/services/row'
import { getRowSortFunction } from '@baserow/modules/database/utils/view'
import {
getRowSortFunction,
rowMatchesFilters,
} from '@baserow/modules/database/utils/view'
export function populateRow(row) {
row._ = {
@ -172,6 +175,9 @@ export const mutations = {
SET_VALUE(state, { row, field, value }) {
row[`field_${field.id}`] = value
},
UPDATE_ROW(state, { row, values }) {
_.assign(row, values)
},
UPDATE_ROWS(state, { rows }) {
rows.forEach((newRow) => {
const row = state.rows.find((row) => row.id === newRow.id)
@ -561,45 +567,19 @@ export const actions = {
* override values that not actually belong to the row to do some preliminary checks.
*/
updateMatchFilters({ commit }, { view, row, overrides = {} }) {
const isValid = (filters, values) => {
// If there aren't any filters then it is not possible to check if the row
// matches any of the filters, so we can mark it as valid.
if (filters.length === 0) {
return true
}
for (const i in filters) {
const filterType = this.$registry.get('viewFilter', filters[i].type)
const filterValue = filters[i].value
const rowValue = values[`field_${filters[i].field}`]
const matches = filterType.matches(rowValue, filterValue)
if (view.filter_type === 'AND' && !matches) {
// With an `AND` filter type, the row must match all the filters, so if
// one of the filters doesn't match we can mark it as isvalid.
return false
} else if (view.filter_type === 'OR' && matches) {
// With an 'OR' filter type, the row only has to match one of the filters,
// that is the case here so we can mark it as valid.
return true
}
}
if (view.filter_type === 'AND') {
// When this point has been reached with an `AND` filter type it means that
// the row matches all the filters and therefore we can mark it as valid.
return true
} else if (view.filter_type === 'OR') {
// When this point has been reached with an `OR` filter type it means that
// the row matches none of the filters and therefore we can mark it as invalid.
return false
}
}
const values = JSON.parse(JSON.stringify(row))
Object.keys(overrides).forEach((key) => {
values[key] = overrides[key]
})
// The value is always valid if the filters are disabled.
const matches = view.filters_disabled ? true : isValid(view.filters, values)
const matches = view.filters_disabled
? true
: rowMatchesFilters(
this.$registry,
view.filter_type,
view.filters,
values
)
commit('SET_ROW_MATCH_FILTERS', { row, value: matches })
},
/**
@ -722,6 +702,60 @@ export const actions = {
throw error
}
},
/**
* Forcefully create a new row without making a call to the backend. It also
* checks if the row matches the filters and sortings and if not it will be
* removed from the buffer.
*/
forceCreate(
{ commit, dispatch, getters },
{ view, fields, primary, values, getScrollTop }
) {
const row = _.assign({}, values)
populateRow(row)
commit('ADD_ROWS', {
rows: [row],
prependToRows: 0,
appendToRows: 1,
count: getters.getCount + 1,
bufferStartIndex: getters.getBufferStartIndex,
bufferLimit: getters.getBufferLimit + 1,
})
dispatch('visibleByScrollTop', {
scrollTop: null,
windowHeight: null,
})
dispatch('updateMatchFilters', { view, row })
dispatch('updateMatchSortings', { view, fields, primary, row })
dispatch('refreshRow', { grid: view, row, fields, primary, getScrollTop })
},
/**
* Forcefully update an existing row without making a call to the backend. It
* could be that the row does not exist in the buffer, but actually belongs in
* there. So after creating or updating the row we can check if it belongs
* there and if not it will be deleted.
*/
forceUpdate(
{ dispatch, commit, getters },
{ view, fields, primary, values, getScrollTop }
) {
const row = getters.getRow(values.id)
if (row === undefined) {
return dispatch('forceCreate', {
view,
fields,
primary,
values,
getScrollTop,
})
} else {
commit('UPDATE_ROW', { row, values })
}
dispatch('updateMatchFilters', { view, row })
dispatch('updateMatchSortings', { view, fields, primary, row })
dispatch('refreshRow', { grid: view, row, fields, primary, getScrollTop })
},
/**
* Deletes an existing row of the provided table. After deleting, the visible rows
* range and the buffer are recalculated because we might need to show different
@ -812,6 +846,12 @@ export const actions = {
values,
})
},
/**
* Forcefully updates all field options without making a call to the backend.
*/
forceUpdateAllFieldOptions({ commit }, fieldOptions) {
commit('REPLACE_ALL_FIELD_OPTIONS', fieldOptions)
},
setRowHover({ commit }, { row, value }) {
commit('SET_ROW_HOVER', { row, value })
},
@ -903,6 +943,9 @@ export const getters = {
getAllRows(state) {
return state.rows
},
getRow: (state) => (id) => {
return state.rows.find((row) => row.id === id)
},
getRows(state) {
return state.rows.slice(state.rowsStartIndex, state.rowsEndIndex)
},

View file

@ -33,3 +33,43 @@ export function getRowSortFunction(
sortFunction = sortFunction.thenBy((a, b) => a.id - b.id)
return sortFunction
}
/**
* A helper function that checks if the provided row values match the provided view
* filters. Returning false indicates that the row should not be visible for that
* view.
*/
export const rowMatchesFilters = ($registry, filterType, filters, values) => {
// If there aren't any filters then it is not possible to check if the row
// matches any of the filters, so we can mark it as valid.
if (filters.length === 0) {
return true
}
for (const i in filters) {
const filterValue = filters[i].value
const rowValue = values[`field_${filters[i].field}`]
const matches = $registry
.get('viewFilter', filters[i].type)
.matches(rowValue, filterValue)
if (filterType === 'AND' && !matches) {
// With an `AND` filter type, the row must match all the filters, so if
// one of the filters doesn't match we can mark it as isvalid.
return false
} else if (filterType === 'OR' && matches) {
// With an 'OR' filter type, the row only has to match one of the filters,
// that is the case here so we can mark it as valid.
return true
}
}
if (filterType === 'AND') {
// When this point has been reached with an `AND` filter type it means that
// the row matches all the filters and therefore we can mark it as valid.
return true
} else if (filterType === 'OR') {
// When this point has been reached with an `OR` filter type it means that
// the row matches none of the filters and therefore we can mark it as invalid.
return false
}
}

View file

@ -117,6 +117,27 @@ export class ViewType extends Registerable {
*/
fieldUpdated(context, field, oldField, fieldType) {}
/**
* Event that is called when a row is created from an outside source, so for example
* via a real time event by another user. It can be used to check if data in an store
* needs to be updated.
*/
rowCreated(context, tableId, rowValues) {}
/**
* Event that is called when a row is updated from an outside source, so for example
* via a real time event by another user. It can be used to check if data in an store
* needs to be updated.
*/
rowUpdated(context, tableId, rowValues) {}
/**
* Event that is called when a row is deleted from an outside source, so for example
* via a real time event by another user. It can be used to check if data in an store
* needs to be updated.
*/
rowDeleted(context, tableId, rowId) {}
/**
* @return object
*/
@ -170,4 +191,49 @@ export class GridViewType extends ViewType {
{ root: true }
)
}
isCurrentView(store, tableId) {
const table = store.getters['table/getSelected']
const grid = store.getters['view/getSelected']
return (
table.id === tableId &&
Object.prototype.hasOwnProperty.call(grid, 'type') &&
grid.type === GridViewType.getType()
)
}
rowCreated({ store }, tableId, rowValues) {
if (this.isCurrentView(store, tableId)) {
store.dispatch('view/grid/forceCreate', {
view: store.getters['view/getSelected'],
fields: store.getters['field/getAll'],
primary: store.getters['field/getPrimary'],
values: rowValues,
getScrollTop: () => store.getters['view/grid/getScrollTop'],
})
}
}
rowUpdated({ store }, tableId, rowValues) {
if (this.isCurrentView(store, tableId)) {
store.dispatch('view/grid/forceUpdate', {
view: store.getters['view/getSelected'],
fields: store.getters['field/getAll'],
primary: store.getters['field/getPrimary'],
values: rowValues,
getScrollTop: () => store.getters['view/grid/getScrollTop'],
})
}
}
rowDeleted({ store }, tableId, rowId) {
if (this.isCurrentView(store, tableId)) {
const row = { id: rowId }
store.dispatch('view/grid/forceDelete', {
grid: store.getters['view/getSelected'],
row,
getScrollTop: () => store.getters['view/grid/getScrollTop'],
})
}
}
}