1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-14 17:18:33 +00:00

Merge branch '3009-row-enters-view-webhook-event-type' into 'develop'

Resolve "Row enters view webhook event type"

Closes 

See merge request 
This commit is contained in:
Davide Silvestri 2025-03-14 15:06:52 +01:00
commit 39292cec85
56 changed files with 2102 additions and 142 deletions

View file

@ -93,6 +93,7 @@ DATABASE_NAME=baserow
# BASEROW_WEBHOOKS_MAX_PER_TABLE=
# BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES=
# BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS=
# BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE:
# BASEROW_AIRTABLE_IMPORT_SOFT_TIME_LIMIT=
# HOURS_UNTIL_TRASH_PERMANENTLY_DELETED=
@ -137,6 +138,7 @@ DATABASE_NAME=baserow
# BASEROW_ENTERPRISE_AUDIT_LOG_CLEANUP_INTERVAL_MINUTES=
# BASEROW_ENTERPRISE_AUDIT_LOG_RETENTION_DAYS=
# BASEROW_ALLOW_MULTIPLE_SSO_PROVIDERS_FOR_SAME_ACCOUNT=
# BASEROW_ENTERPRISE_GROUPED_AGGREGATE_SERVICE_MAX_SERIES:
# BASEROW_PERIODIC_FIELD_UPDATE_CRONTAB=
# BASEROW_PERIODIC_FIELD_UPDATE_UNUSED_WORKSPACE_INTERVAL_MIN=

View file

@ -981,6 +981,9 @@ BASEROW_MAX_WEBHOOK_CALLS_IN_QUEUE_PER_WEBHOOK = (
int(os.getenv("BASEROW_MAX_WEBHOOK_CALLS_IN_QUEUE_PER_WEBHOOK", "0")) or None
)
BASEROW_WEBHOOKS_BATCH_LIMIT = int(os.getenv("BASEROW_WEBHOOKS_BATCH_LIMIT", 5))
BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE = int(
os.getenv("BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE", BATCH_ROWS_SIZE_LIMIT)
)
# ======== WARNING ========
# Please read and understand everything at:

View file

@ -26,8 +26,15 @@ class TableWebhookEventConfig(serializers.Serializer):
choices=webhook_event_type_registry.get_types(),
)
fields = serializers.ListField(
required=False,
child=serializers.IntegerField(),
help_text="A list of field IDs that are related to the event.",
allow_empty=True,
)
views = serializers.ListField(
required=False,
child=serializers.IntegerField(),
help_text="A list of view IDs that are related to the event.",
)
@ -186,13 +193,15 @@ class TableWebhookSerializer(serializers.ModelSerializer):
@extend_schema_field(TableWebhookEventConfig(many=True))
def get_event_config(self, instance):
events = [
{
"event_type": event.event_type,
"fields": [f.id for f in event.fields.all()],
}
for event in instance.events.all()
]
events = []
for event in instance.events.all():
evt = {"event_type": event.event_type}
if fields := [f.id for f in event.fields.all()]:
evt["fields"] = fields
if views := [v.id for v in event.views.all()]:
evt["views"] = views
events.append(evt)
return [TableWebhookEventConfig(event).data for event in events]
@extend_schema_field(OpenApiTypes.OBJECT)

View file

@ -1023,6 +1023,7 @@ class DatabaseConfig(AppConfig):
import baserow.contrib.database.rows.tasks # noqa: F401
import baserow.contrib.database.search.tasks # noqa: F401
import baserow.contrib.database.table.receivers # noqa: F401
import baserow.contrib.database.views.receivers # noqa: F401
import baserow.contrib.database.views.tasks # noqa: F401

View file

@ -6395,13 +6395,17 @@ class MultipleCollaboratorsFieldType(
JSONBAgg(
get_collaborator_extractor(db_column, model_field),
filter=Q(**{f"{db_column}__isnull": False}),
order=f"{db_column}__id",
),
Value([], output_field=JSONField()),
)
else:
return Coalesce(
wrap_in_subquery(
JSONBAgg(get_collaborator_extractor(db_column, model_field)),
JSONBAgg(
get_collaborator_extractor(db_column, model_field),
order=f"{db_column}__id",
),
db_column,
model_field.model,
),

View file

@ -95,10 +95,14 @@ def _run_periodic_field_type_update_per_workspace(
all_updated_fields = []
fields = qs.filter(
table__database__workspace_id=workspace.id,
table__trashed=False,
table__database__trashed=False,
fields = (
qs.filter(
table__database__workspace_id=workspace.id,
table__trashed=False,
table__database__trashed=False,
)
.select_related("table")
.prefetch_related("table__view_set")
)
# noinspection PyBroadException
try:
@ -114,6 +118,8 @@ def _run_periodic_field_type_update_per_workspace(
tb=tb,
)
from baserow.contrib.database.views.handler import ViewSubscriptionHandler
# After a successful periodic update of all fields, we would need to update the
# search index for all of them in one function per table to avoid ending up in a
# deadlock because rows are updated simultaneously.
@ -123,6 +129,11 @@ def _run_periodic_field_type_update_per_workspace(
for _, fields in fields_per_table.items():
SearchHandler().entire_field_values_changed_or_created(fields[0].table, fields)
with transaction.atomic():
ViewSubscriptionHandler().notify_table_views_updates(
fields[0].table.view_set.all()
)
@app.task(bind=True)
def delete_mentions_marked_for_deletion(self):

View file

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-03-05 11:01+0000\n"
"POT-Creation-Date: 2025-03-12 16:34+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
@ -492,111 +492,111 @@ msgstr ""
msgid "ViewFieldOptions updated"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1200
#: src/baserow/contrib/database/views/actions.py:1201
msgid "View slug URL updated"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1201
#: src/baserow/contrib/database/views/actions.py:1202
msgid "View changed public slug URL"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1274
#: src/baserow/contrib/database/views/actions.py:1275
msgid "Update view"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1275
#: src/baserow/contrib/database/views/actions.py:1276
#, python-format
msgid "View \"%(view_name)s\" (%(view_id)s) updated"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1356
#: src/baserow/contrib/database/views/actions.py:1357
msgid "Create view"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1357
#: src/baserow/contrib/database/views/actions.py:1358
#, python-format
msgid "View \"%(view_name)s\" (%(view_id)s) created"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1431
#: src/baserow/contrib/database/views/actions.py:1432
msgid "Duplicate view"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1433
#: src/baserow/contrib/database/views/actions.py:1434
#, python-format
msgid ""
"View \"%(view_name)s\" (%(view_id)s) duplicated from view "
"\"%(original_view_name)s\" (%(original_view_id)s)"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1507
#: src/baserow/contrib/database/views/actions.py:1508
msgid "Delete view"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1508
#: src/baserow/contrib/database/views/actions.py:1509
#, python-format
msgid "View \"%(view_name)s\" (%(view_id)s) deleted"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1570
#: src/baserow/contrib/database/views/actions.py:1571
msgid "Create decoration"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1571
#: src/baserow/contrib/database/views/actions.py:1572
#, python-format
msgid "View decoration %(decorator_id)s created"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1675
#: src/baserow/contrib/database/views/actions.py:1676
msgid "Update decoration"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1676
#: src/baserow/contrib/database/views/actions.py:1677
#, python-format
msgid "View decoration %(decorator_id)s updated"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1813
#: src/baserow/contrib/database/views/actions.py:1814
msgid "Delete decoration"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1814
#: src/baserow/contrib/database/views/actions.py:1815
#, python-format
msgid "View decoration %(decorator_id)s deleted"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1910
#: src/baserow/contrib/database/views/actions.py:1911
msgid "Create a view group"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:1911
#: src/baserow/contrib/database/views/actions.py:1912
#, python-format
msgid "View grouped on field \"%(field_name)s\" (%(field_id)s)"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:2011
#: src/baserow/contrib/database/views/actions.py:2012
msgid "Update a view group"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:2012
#: src/baserow/contrib/database/views/actions.py:2013
#, python-format
msgid "View group by updated on field \"%(field_name)s\" (%(field_id)s)"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:2140
#: src/baserow/contrib/database/views/actions.py:2141
msgid "Delete a view group"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:2141
#: src/baserow/contrib/database/views/actions.py:2142
#, python-format
msgid "View group by deleted from field \"%(field_name)s\" (%(field_id)s)"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:2235
#: src/baserow/contrib/database/views/actions.py:2236
msgid "Submit form"
msgstr ""
#: src/baserow/contrib/database/views/actions.py:2236
#: src/baserow/contrib/database/views/actions.py:2237
#, python-format
msgid "Row (%(row_id)s) created via form submission"
msgstr ""
@ -646,26 +646,27 @@ msgid ""
"to %(webhook_url)s\" updated"
msgstr ""
#: src/baserow/contrib/database/webhooks/notification_types.py:90
#: src/baserow/contrib/database/webhooks/notification_types.py:92
#, python-format
msgid "%(name)s webhook has been deactivated."
msgstr ""
#: src/baserow/contrib/database/webhooks/notification_types.py:97
#: src/baserow/contrib/database/webhooks/notification_types.py:99
#, python-format
msgid ""
"The webhook failed more than %(max_failures)s consecutive times and was "
"therefore deactivated."
msgstr ""
#: src/baserow/contrib/database/webhooks/notification_types.py:147
#: src/baserow/contrib/database/webhooks/notification_types.py:155
#, python-format
msgid "%(name)s webhook payload too large."
msgstr ""
#: src/baserow/contrib/database/webhooks/notification_types.py:154
#: src/baserow/contrib/database/webhooks/notification_types.py:162
#, python-format
msgid ""
"The webhook couldn't send all the data because it reaches the maximum number "
"of batches of %(batch_limit)s."
"The payload for the %(name)s webhook with event ID %(event_id)s was too "
"large. The content has been split into multiple batches, but data above the "
"batch limit of %(batch_limit)s was discarded."
msgstr ""

View file

@ -0,0 +1,93 @@
# Generated by Django 5.0.9 on 2025-03-07 15:43
import django.contrib.postgres.fields
import django.db.models.deletion
from django.db import migrations, models
import baserow.core.fields
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
("database", "0181_tablewebhookcall_batch_id_and_more"),
]
operations = [
migrations.AddField(
model_name="tablewebhookevent",
name="views",
field=models.ManyToManyField(to="database.view"),
),
migrations.CreateModel(
name="ViewRows",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_on", models.DateTimeField(auto_now_add=True)),
("updated_on", baserow.core.fields.SyncedDateTimeField(auto_now=True)),
(
"row_ids",
django.contrib.postgres.fields.ArrayField(
base_field=models.PositiveIntegerField(),
default=list,
help_text="The rows that are shown in the view. This list can be used by webhooks to determine which rows have been changed since the last check.",
size=None,
),
),
(
"view",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="rows",
to="database.view",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="ViewSubscription",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("subscriber_id", models.PositiveIntegerField()),
(
"subscriber_content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="contenttypes.contenttype",
),
),
(
"view",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="subscribers",
to="database.view",
),
),
],
options={
"unique_together": {
("view", "subscriber_content_type", "subscriber_id")
},
},
),
]

View file

@ -816,6 +816,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
send_webhook_events=send_webhook_events,
rows_values_refreshed_from_db=False,
m2m_change_tracker=m2m_change_tracker,
fields=fields,
dependant_fields=dependant_fields,
)
return instance
@ -1005,6 +1007,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
before_return=before_return,
updated_field_ids=updated_field_ids,
m2m_change_tracker=m2m_change_tracker,
fields=[f for f in updated_fields if f.id in updated_field_ids],
dependant_fields=dependant_fields,
)
return row
@ -1215,6 +1219,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
send_webhook_events=send_webhook_events,
prepared_rows_values=prepared_rows_values,
m2m_change_tracker=m2m_change_tracker,
fields=updated_fields,
dependant_fields=dependant_fields,
)
if generate_error_report:
@ -1895,6 +1901,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
if not skip_search_update:
SearchHandler.field_value_updated_or_created(table)
# Reload rows from the database to get the updated values for formulas
updated_rows_to_return = list(
model.objects.all().enhance_by_fields().filter(id__in=row_ids)
)
@ -1910,6 +1917,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
m2m_change_tracker=m2m_change_tracker,
send_realtime_update=send_realtime_update,
send_webhook_events=send_webhook_events,
fields=[f for f in updated_fields if f.id in updated_field_ids],
dependant_fields=dependant_fields,
)
fields_metadata_by_row_id = self.get_fields_metadata_for_rows(
@ -2103,6 +2112,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
updated_field_ids=[],
prepared_rows_values=None,
send_webhook_events=send_webhook_events,
fields=[],
dependant_fields=dependant_fields,
)
return row
@ -2204,6 +2215,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
before_return=before_return,
send_realtime_update=send_realtime_update,
send_webhook_events=send_webhook_events,
fields=updated_fields,
dependant_fields=dependant_fields,
)
def update_dependencies_of_rows_deleted(self, table, row, model):
@ -2265,7 +2278,6 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
triggered. Defaults to true.
:param permanently_delete: If `true` the rows will be permanently deleted
instead of trashed.
:raises RowDoesNotExist: When the row with the provided id does not exist.
"""
workspace = table.database.workspace
@ -2275,8 +2287,46 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
workspace=workspace,
context=table,
)
return self.force_delete_rows(
user,
table,
row_ids,
model=model,
send_realtime_update=send_realtime_update,
send_webhook_events=send_webhook_events,
permanently_delete=permanently_delete,
)
if not model:
def force_delete_rows(
self,
user: AbstractUser,
table: Table,
row_ids: List[int],
model: Optional[Type[GeneratedTableModel]] = None,
send_realtime_update: bool = True,
send_webhook_events: bool = True,
permanently_delete: bool = False,
) -> TrashedRows:
"""
Trashes existing rows of the given table based on row_ids, without checking
user permissions.
:param user: The user of whose behalf the change is made.
:param table: The table for which the row must be deleted.
:param row_ids: The ids of the rows that must be deleted.
:param model: If the correct model has already been generated, it can be
provided so that it does not have to be generated for a second time.
:param send_realtime_update: If set to false then it is up to the caller to
send the rows_created or similar signal. Defaults to True.
:param send_webhook_events: If set the false then the webhooks will not be
triggered. Defaults to true.
:param permanently_delete: If `true` the rows will be permanently deleted
instead of trashed.
:raises RowDoesNotExist: When the row with the provided id does not exist.
"""
workspace = table.database.workspace
if model is None:
model = table.get_model()
non_unique_ids = get_non_unique_values(row_ids)
@ -2310,9 +2360,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
TrashHandler.trash(user, workspace, table.database, trashed_rows)
rows_deleted_counter.add(
len(row_ids),
)
rows_deleted_counter.add(len(row_ids))
updated_field_ids = []
updated_fields = []
@ -2359,6 +2407,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
before_return=before_return,
send_realtime_update=send_realtime_update,
send_webhook_events=send_webhook_events,
fields=updated_fields,
dependant_fields=dependant_fields,
)
return trashed_rows

View file

@ -309,6 +309,8 @@ class RowTrashableItemType(TrashableItemType):
model=model,
before=None,
user=None,
fields=updated_fields,
dependant_fields=dependant_fields,
)
def permanently_delete_item(self, row, trash_item_lookup_cache=None):
@ -432,6 +434,8 @@ class RowsTrashableItemType(TrashableItemType):
model=model,
before=None,
user=None,
fields=updated_fields,
dependant_fields=dependant_fields,
)
else:
# Use table signal here instead of row signal because we don't want

View file

@ -1,4 +1,5 @@
import dataclasses
import itertools
import re
import traceback
from collections import defaultdict, namedtuple
@ -131,7 +132,9 @@ from .models import (
ViewFilter,
ViewFilterGroup,
ViewGroupBy,
ViewRows,
ViewSort,
ViewSubscription,
)
from .registries import (
decorator_type_registry,
@ -142,6 +145,8 @@ from .registries import (
)
from .signals import (
form_submitted,
rows_entered_view,
rows_exited_view,
view_created,
view_decoration_created,
view_decoration_deleted,
@ -3209,7 +3214,12 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
return view
def submit_form_view(
self, user, form, values, model=None, enabled_field_options=None
self,
user,
form,
values,
model: GeneratedTableModel | None = None,
enabled_field_options=None,
):
"""
Handles when a form is submitted. It will validate the data by checking if
@ -3756,3 +3766,127 @@ class CachingPublicViewRowChecker:
# filters and so the result of the first check will be still
# valid for any subsequent checks.
return True
class ViewSubscriptionHandler:
@classmethod
def subscribe_to_views(cls, subscriber: django_models.Model, views: list[View]):
"""
Subscribes a subscriber to the provided views. If the ViewRows already exist, it
ensure to notify any changes to the subscriber first, so that the subscriber can
be notified only for the changes that happened after the subscription.
:param subscriber: The subscriber to subscribe to the views.
:param views: The views to subscribe to.
"""
cls.notify_table_views_updates(views)
ViewRows.create_missing_for_views(views)
new_subscriptions = []
for view in views:
new_subscriptions.append(ViewSubscription(subscriber=subscriber, view=view))
ViewSubscription.objects.bulk_create(new_subscriptions, ignore_conflicts=True)
@classmethod
def unsubscribe_from_views(
cls, subscriber: django_models.Model, views: list[View] | None = None
):
"""
Unsubscribes a subscriber from the provided views. If the views are not
provided, it unsubscribes the subscriber from all views. Make sure to use a
table-specific model for the subscriber to avoid unsubscribing from views that
are not related to the subscriber.
:param subscriber: The subscriber to unsubscribe from the views.
:param views: The views to unsubscribe from. If not provided, the subscriber
will be unsubscribed
"""
q = Q(
subscriber_content_type=ContentType.objects.get_for_model(subscriber),
subscriber_id=subscriber.pk,
)
if views is not None:
q &= Q(view__in=views)
ViewSubscription.objects.filter(q).delete()
@classmethod
def check_views_with_time_sensitive_filters(cls):
"""
Checks for views that have time-sensitive filters. If a view has a
time-sensitive filter, calling this method periodically ensure proper signals
are emitted to notify subscribers that the view results have changed.
"""
views = View.objects.filter(
id__in=ViewFilter.objects.filter(
type__in=view_filter_type_registry.get_time_sensitive_filter_types(),
view__in=ViewSubscription.objects.values("view"),
).values("view_id")
).order_by("table", "id")
for _, view_group in itertools.groupby(views, key=lambda f: f.table):
view_ids = [v.id for v in view_group]
if view_ids:
cls._notify_table_views_updates(view_ids)
@classmethod
def notify_table_views_updates(
cls, views: list[View], model: GeneratedTableModel | None = None
):
"""
Verify if the views have subscribers and notify them of any changes in the view
results.
:param views: The views to notify subscribers of.
:param model: The table model to use for the views. If not provided, the model
will be generated automatically.
"""
view_ids_with_subscribers = ViewSubscription.objects.filter(
view__in=views
).values_list("view_id", flat=True)
if view_ids_with_subscribers:
cls._notify_table_views_updates(view_ids_with_subscribers, model)
@classmethod
def _notify_table_views_updates(
cls, view_ids: list[int], model: GeneratedTableModel | None = None
):
"""
Notify subscribers of any changes in the view results, emitting the appropriate
signals and updating the ViewRows state.
:param view_ids: The view ids to notify subscribers of.
:param model: The table model to use for the views. If not provided, the model
will be generated automatically
"""
view_rows = list(
ViewRows.objects.select_related("view__table")
.filter(view_id__in=view_ids)
.select_for_update(of=("self",))
.order_by("view_id")
)
if model is None:
model = view_rows[0].view.table.get_model()
for view_state in view_rows:
view = view_state.view
new_row_ids, row_ids_entered, row_ids_exited = view_state.get_diff(model)
changed = False
if row_ids_entered:
rows_entered_view.send(
sender=cls, view=view, row_ids=row_ids_entered, model=model
)
changed = True
if row_ids_exited:
rows_exited_view.send(
sender=cls, view=view, row_ids=row_ids_exited, model=model
)
changed = True
if changed:
view_state.row_ids = new_row_ids
view_state.save()

View file

@ -4,7 +4,9 @@ from typing import Iterable, Optional, Union
from django.contrib.auth.hashers import check_password, make_password
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.db.models import Q
from django.db.models.query import Prefetch
@ -936,3 +938,68 @@ class FormViewFieldOptionsCondition(HierarchicalModelMixin, models.Model):
class Meta:
ordering = ("id",)
class ViewRows(CreatedAndUpdatedOnMixin, models.Model):
view = models.OneToOneField(View, on_delete=models.CASCADE, related_name="rows")
row_ids = ArrayField(
models.PositiveIntegerField(),
default=list,
help_text="The rows that are shown in the view. This list can be used by webhooks "
"to determine which rows have been changed since the last check.",
)
@classmethod
def create_missing_for_views(cls, views: list[View], model=None):
"""
Creates ViewRows objects for the given views if they don't already exist.
:param views: The views for which to create ViewRows objects.
"""
from baserow.contrib.database.views.handler import ViewHandler
existing_view_ids = ViewRows.objects.filter(view__in=views).values_list(
"view_id", flat=True
)
view_map = {view.id: view for view in views}
missing_view_ids = list(set(view_map.keys()) - set(existing_view_ids))
view_rows = []
for view_id in missing_view_ids:
view = view_map[view_id]
row_ids = (
ViewHandler()
.get_queryset(view, model=model, apply_sorts=False)
.values_list("id", flat=True)
)
view_rows.append(ViewRows(view=view, row_ids=list(row_ids)))
return ViewRows.objects.bulk_create(view_rows, ignore_conflicts=True)
def get_diff(self, model=None):
"""
Executes the view query and returns the current row IDs in the view,
along with the differences between the current state and the last saved state.
"""
from baserow.contrib.database.views.handler import ViewHandler
rows = ViewHandler().get_queryset(self.view, model=model, apply_sorts=False)
previous_row_ids = set(self.row_ids)
new_row_ids = set(rows.order_by().values_list("id", flat=True))
row_ids_entered = new_row_ids - previous_row_ids
row_ids_exited = previous_row_ids - new_row_ids
return list(new_row_ids), list(row_ids_entered), list(row_ids_exited)
class ViewSubscription(models.Model):
view = models.ForeignKey(View, on_delete=models.CASCADE, related_name="subscribers")
subscriber_content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
subscriber_id = models.PositiveIntegerField()
subscriber = GenericForeignKey("subscriber_content_type", "subscriber_id")
class Meta:
unique_together = ("view", "subscriber_content_type", "subscriber_id")

View file

@ -0,0 +1,98 @@
from django.dispatch import receiver
from baserow.contrib.database.fields.signals import (
field_deleted,
field_restored,
field_updated,
)
from baserow.contrib.database.rows.signals import (
rows_created,
rows_deleted,
rows_updated,
)
from baserow.contrib.database.table.models import GeneratedTableModel, Table
from baserow.contrib.database.views.models import View
from baserow.contrib.database.views.signals import (
view_filter_created,
view_filter_deleted,
view_filter_group_created,
view_filter_group_deleted,
view_filter_group_updated,
view_filter_updated,
view_updated,
)
from .handler import ViewSubscriptionHandler
def _notify_table_data_updated(table: Table, model: GeneratedTableModel | None = None):
"""
Notifies the table views that the table data has been updated. This will result in
the table views to be updated and the subscribers to be notified.
:param table: The table for which the data has been updated.
:param model: The model that was updated if available.
"""
ViewSubscriptionHandler.notify_table_views_updates(
table.view_set.all(), model=model
)
def _notify_view_results_updated(view: View):
"""
Notify the table view that the results of the view have been updated. This will
result in the subscribers to be notified.
:param view: The view for which the results have been updated.
"""
ViewSubscriptionHandler.notify_table_views_updates([view])
@receiver([rows_updated, rows_created, rows_deleted])
def notify_rows_signals(sender, rows, user, table, model, dependant_fields, **kwargs):
_notify_table_data_updated(table, model)
updated_tables = set()
for field in dependant_fields:
updated_tables.add(field.table)
for updated_table in updated_tables:
_notify_table_data_updated(updated_table)
@receiver(view_updated)
def notify_view_updated(sender, view, user, old_view, **kwargs):
_notify_view_results_updated(view)
@receiver([view_filter_created, view_filter_updated, view_filter_deleted])
def notify_view_filter_created_or_updated(sender, view_filter, user, **kwargs):
_notify_view_results_updated(view_filter.view)
@receiver(
[view_filter_group_created, view_filter_group_updated, view_filter_group_deleted]
)
def notify_view_filter_group_created_or_updated(
sender, view_filter_group, user, **kwargs
):
_notify_view_results_updated(view_filter_group.view)
def _notify_tables_of_fields_updated_or_deleted(field, related_fields, user, **kwargs):
tables_to_notify = set([field.table])
for updated_field in related_fields:
tables_to_notify.add(updated_field.table)
for table in tables_to_notify:
_notify_table_data_updated(table)
@receiver([field_restored, field_updated])
def notify_field_updated(sender, field, related_fields, user, **kwargs):
_notify_tables_of_fields_updated_or_deleted(field, related_fields, user, **kwargs)
@receiver(field_deleted)
def notify_field_deleted(sender, field_id, field, related_fields, user, **kwargs):
_notify_tables_of_fields_updated_or_deleted(field, related_fields, user, **kwargs)

View file

@ -1007,6 +1007,19 @@ class ViewFilterType(Instance):
for t in self.compatible_field_types
)
@property
def time_sensitive(self) -> bool:
"""
Indicates if the filter results depend on the current time.
For example, filters like 'date_is' with operators like `today` or `yesterday`
will return different results as time passes, even if the underlying
data hasn't changed.
:returns: True if the filter results change based on current time
"""
return False
class ViewFilterTypeRegistry(Registry):
"""
@ -1020,6 +1033,21 @@ class ViewFilterTypeRegistry(Registry):
does_not_exist_exception_class = ViewFilterTypeDoesNotExist
already_registered_exception_class = ViewFilterTypeAlreadyRegistered
def get_time_sensitive_filter_types(self) -> List[str]:
"""
Returns a list of filter types that are time-dependent. For example, filters
like `date_is` with operators like `today` or `yesterday` will return different
results as time passes, even if the underlying data hasn't changed.
:returns: A list of filter types that are time-sensitive
"""
return [
filter_type.type
for filter_type in self.registry.values()
if filter_type.time_sensitive
]
class ViewAggregationType(Instance):
"""

View file

@ -31,6 +31,9 @@ view_decoration_deleted = Signal()
view_field_options_updated = Signal()
rows_entered_view = Signal()
rows_exited_view = Signal()
@receiver(field_signals.field_deleted)
def field_deleted(sender, field, **kwargs):

View file

@ -1,4 +1,5 @@
import traceback
from datetime import timedelta
from django.conf import settings
from django.core.cache import cache
@ -9,7 +10,10 @@ from loguru import logger
from baserow.config.celery import app
from baserow.contrib.database.views.exceptions import ViewDoesNotExist
from baserow.contrib.database.views.handler import ViewIndexingHandler
from baserow.contrib.database.views.handler import (
ViewIndexingHandler,
ViewSubscriptionHandler,
)
AUTO_INDEX_CACHE_KEY = "auto_index_view_cache_key"
@ -105,3 +109,23 @@ def schedule_view_index_update(view_id: int):
return
transaction.on_commit(lambda: _schedule_view_index_update(view_id))
@app.task(queue="export")
def periodic_check_for_views_with_time_sensitive_filters():
"""
Periodically checks for views that have time-sensitive filters. If a view has a
time-sensitive filter, this task ensure proper signals are emitted to notify
subscribers that the view results have changed.
"""
with transaction.atomic():
ViewSubscriptionHandler.check_views_with_time_sensitive_filters()
@app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(
timedelta(minutes=30),
periodic_check_for_views_with_time_sensitive_filters.s(),
)

View file

@ -437,6 +437,10 @@ class HigherThanOrEqualViewFilterType(NumericComparisonViewFilterType):
class TimezoneAwareDateViewFilterType(ViewFilterType):
@property
def time_sensitive(self) -> bool:
return True
compatible_field_types = [
DateFieldType.type,
LastModifiedFieldType.type,
@ -1765,6 +1769,10 @@ DATE_FILTER_OPERATOR_DELTA_MAP = {
class BaseDateMultiStepViewFilterType(ViewFilterType):
incompatible_operators = []
@property
def time_sensitive(self) -> bool:
return True
def get_filter_date(
self,
operator: str,

View file

@ -18,6 +18,18 @@ class TableWebhookEventConfigFieldNotInTable(Exception):
)
class TableWebhookEventConfigViewNotInTable(Exception):
"""Raised when trying to update the"""
def __init__(self, view_id=None, *args, **kwargs):
self.view_id = view_id
super().__init__(
f"The view {view_id} does not belong to the table.",
*args,
**kwargs,
)
class SkipWebhookCall(Exception):
"""Raised when the webhook call must be skipped"""

View file

@ -11,12 +11,14 @@ from requests import PreparedRequest, Response
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.table.models import Table
from baserow.contrib.database.views.models import View
from baserow.core.handler import CoreHandler
from baserow.core.utils import extract_allowed, set_allowed_attrs
from .exceptions import (
TableWebhookDoesNotExist,
TableWebhookEventConfigFieldNotInTable,
TableWebhookEventConfigViewNotInTable,
TableWebhookMaxAllowedCountExceeded,
)
from .models import (
@ -39,18 +41,21 @@ from .validators import get_webhook_request_function
class WebhookHandler:
def find_webhooks_to_call(self, table_id: int, event_type: str) -> QuerySet:
def find_webhooks_to_call(
self, table_id: int, event_type: str, additional_filters: Q | None = None
) -> QuerySet[TableWebhook]:
"""
This function is responsible for finding all the webhooks related to a table
that must be triggered on a specific event.
"""
q = Q()
q.add(Q(events__event_type__in=[event_type]), Q.OR)
q = Q(events__event_type__in=[event_type])
if additional_filters is not None:
q &= additional_filters
event_type_object = webhook_event_type_registry.get(event_type)
if event_type_object.should_trigger_when_all_event_types_selected:
q.add(Q(include_all_events=True), Q.OR)
q |= Q(include_all_events=True)
return (
TableWebhook.objects.filter(
@ -170,14 +175,25 @@ class WebhookHandler:
# could have been deleted.
if not event_object:
continue
event_fields = Field.objects.filter(
table_id=webhook.table_id, id__in=event["fields"]
)
for field_id in event["fields"]:
if not next((f for f in event_fields if field_id == f.id), None):
# Set fields
field_ids = event.get("fields", [])
fields = Field.objects.filter(table_id=webhook.table_id, id__in=field_ids)
for field_id in field_ids:
if not next((f for f in fields if field_id == f.id), None):
raise TableWebhookEventConfigFieldNotInTable(field_id)
event_object.fields.set(event_fields)
event_object.fields.set(fields)
# Set views
view_ids = event.get("views", [])
views = View.objects.filter(id__in=view_ids, table_id=webhook.table_id)
for view_id in view_ids:
if not next((v for v in views if view_id == v.id), None):
raise TableWebhookEventConfigViewNotInTable(view_id)
event_object.views.set(views)
event_object.get_type().after_update(event_object)
def create_table_webhook(
self,
@ -223,20 +239,24 @@ class WebhookHandler:
values = extract_allowed(kwargs, allowed_fields)
webhook = TableWebhook.objects.create(table_id=table.id, **values)
webhook_events = []
if events is not None and not values.get("include_all_events"):
event_headers = []
for event in events:
event_object = TableWebhookEvent(
event_type=event, webhook_id=webhook.id
for event_type in events:
webhook_event = TableWebhookEvent(
event_type=event_type, webhook=webhook
)
event_object.full_clean()
event_headers.append(event_object)
webhook_event.full_clean()
webhook_events.append(webhook_event)
webhook_events = TableWebhookEvent.objects.bulk_create(event_headers)
webhook_events = TableWebhookEvent.objects.bulk_create(webhook_events)
if event_config is not None and not values.get("include_all_events"):
self._update_webhook_event_config(webhook, event_config, webhook_events)
for webhook_event in webhook_events:
webhook_event_type = webhook_event.get_type()
webhook_event_type.after_create(webhook_event)
if headers is not None:
header_objects = []
for key, value in headers.items():
@ -247,7 +267,6 @@ class WebhookHandler:
header_objects.append(header)
TableWebhookHeader.objects.bulk_create(header_objects)
return webhook
def update_table_webhook(
@ -303,6 +322,7 @@ class WebhookHandler:
kwargs.get("include_all_events", False) and not old_include_all_events
)
created_events = []
if not should_update_events:
TableWebhookEvent.objects.filter(webhook=webhook).delete()
elif events is not None:
@ -327,11 +347,15 @@ class WebhookHandler:
]
if len(events_to_create) > 0:
TableWebhookEvent.objects.bulk_create(events_to_create)
created_events = TableWebhookEvent.objects.bulk_create(events_to_create)
if event_config is not None and should_update_events:
self._update_webhook_event_config(webhook, event_config)
for webhook_event in created_events:
webhook_event_type = webhook_event.get_type()
webhook_event_type.after_create(webhook_event)
if headers is not None:
existing_headers = webhook.headers.all()

View file

@ -1,11 +1,13 @@
import uuid
from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.core.validators import MaxLengthValidator
from django.db import models
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.table.models import Table
from baserow.contrib.database.views.models import View
from baserow.core.models import CreatedAndUpdatedOnMixin
from .validators import header_name_validator, header_value_validator, url_validator
@ -81,6 +83,17 @@ class TableWebhookEvent(CreatedAndUpdatedOnMixin, models.Model):
)
event_type = models.CharField(max_length=50)
fields = models.ManyToManyField(Field)
views = models.ManyToManyField(View)
view_subscriptions = GenericRelation(
"ViewSubscription",
content_type_field="subscriber_content_type",
object_id_field="subscriber_id",
)
def get_type(self):
from .registries import webhook_event_type_registry
return webhook_event_type_registry.get(self.event_type)
class Meta:
ordering = ("id",)

View file

@ -1,10 +1,13 @@
import uuid
from typing import Optional
from django.core.exceptions import ImproperlyConfigured
from django.db import transaction
from django.db.models import Q
from django.dispatch.dispatcher import Signal
from baserow.contrib.database.table.models import Table
from baserow.contrib.database.webhooks.models import TableWebhook, TableWebhookEvent
from baserow.core.registry import Instance, ModelRegistryMixin, Registry
from .exceptions import SkipWebhookCall, WebhookPayloadTooLarge
@ -97,6 +100,17 @@ class WebhookEventType(Instance):
return table
def get_additional_filters_for_webhooks_to_call(
self, **kwargs: dict
) -> Optional[Q]:
"""
Filters to pass to WebhookHandler.find_webhooks_to_call. By default, no
additional filters are applied.
:param kwargs: The arguments of the signal.
:return: A dictionary of additional filters.
"""
def listener(self, **kwargs: dict):
"""
The method that is called when the signal is triggered. By default it will
@ -107,7 +121,9 @@ class WebhookEventType(Instance):
transaction.on_commit(lambda: self.listener_after_commit(**kwargs))
def _paginate_payload(self, payload) -> tuple[dict, dict | None]:
def _paginate_payload(
self, webhook: TableWebhook, event_id: str, payload: dict[str, any]
) -> tuple[dict, dict | None]:
"""
This method is called in the celery task and can be overwritten to paginate the
payload, if it's too large to send all the data at once. The default
@ -144,7 +160,9 @@ class WebhookEventType(Instance):
f"the batch limit of ({webhook.batch_limit} batches)."
)
prepared_payload, remaining_payload = self._paginate_payload(payload)
prepared_payload, remaining_payload = self._paginate_payload(
webhook, event_id, payload
)
if remaining_payload is not None:
prepared_payload["batch_id"] = batch_id
@ -168,7 +186,8 @@ class WebhookEventType(Instance):
table = self.get_table_object(**kwargs)
webhook_handler = WebhookHandler()
webhooks = webhook_handler.find_webhooks_to_call(table.id, self.type)
filters = self.get_additional_filters_for_webhooks_to_call(**kwargs)
webhooks = webhook_handler.find_webhooks_to_call(table.id, self.type, filters)
event_id = uuid.uuid4()
for webhook in webhooks:
try:
@ -189,6 +208,22 @@ class WebhookEventType(Instance):
except SkipWebhookCall:
pass
def after_create(self, webhook_event: TableWebhookEvent):
"""
This method is called after a webhook event has been created. By default it
does nothing, but can be overwritten to add additional functionality.
:param webhook_event: The created webhook event.
"""
def after_update(self, webhook_event: TableWebhookEvent):
"""
This method is called after a webhook event has been updated. By default it
does nothing, but can be overwritten to add additional functionality.
:param webhook_event: The updated webhook event.
"""
class WebhookEventTypeRegistry(ModelRegistryMixin, Registry[WebhookEventType]):
name = "webhook_event"

View file

@ -139,7 +139,7 @@ def call_webhook(
webhook, event_id, event_type, method, url, headers, payload
)
# enqueue the next call if there is still remaining payload
if success and remaining is not None:
if success and remaining:
args = (
webhook_id,
event_id,

View file

@ -1,4 +1,5 @@
import json
from collections import defaultdict
from typing import Any, Optional
from redis.client import Redis
@ -99,3 +100,20 @@ class RedisQueue:
"""
self.redis_connection.delete(self.queue_key)
class WebhookRedisQueue(RedisQueue):
queues = defaultdict(list)
def enqueue_task(self, task_object):
self.queues[self.queue_key].append(task_object)
return True
def get_and_pop_next(self):
try:
return self.queues[self.queue_key].pop(0)
except IndexError:
return None
def clear(self):
self.queues[self.queue_key] = []

View file

@ -462,9 +462,7 @@ def test_update_webhook(api_client, data_fixture):
assert response_json["include_all_events"] is False
assert response_json["failed_triggers"] == 0
assert response_json["events"] == ["rows.created"]
assert response_json["event_config"] == [
{"event_type": "rows.created", "fields": []}
]
assert response_json["event_config"] == [{"event_type": "rows.created"}]
assert response_json["headers"] == {"Baserow-add-1": "Value 1"}
assert response_json["calls"] == []
@ -530,7 +528,7 @@ def test_update_webhook_with_event_config(api_client, data_fixture):
assert response_json["events"] == ["rows.updated", "rows.deleted"]
assert response_json["event_config"] == [
{"event_type": "rows.updated", "fields": [field_1.id, field_2.id]},
{"event_type": "rows.deleted", "fields": []},
{"event_type": "rows.deleted"},
]
response = api_client.patch(
@ -545,7 +543,7 @@ def test_update_webhook_with_event_config(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json["events"] == ["rows.deleted"]
assert response_json["event_config"] == [
{"event_type": "rows.deleted", "fields": []},
{"event_type": "rows.deleted"},
]
response = api_client.patch(
@ -558,7 +556,7 @@ def test_update_webhook_with_event_config(api_client, data_fixture):
assert response.status_code == HTTP_200_OK
assert response_json["events"] == ["rows.deleted"]
assert response_json["event_config"] == [
{"event_type": "rows.deleted", "fields": []},
{"event_type": "rows.deleted"},
]

View file

@ -1,10 +1,18 @@
from unittest.mock import patch
import pytest
from django.db import transaction
import pytest
from freezegun import freeze_time
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.tasks import run_periodic_fields_updates
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.views.handler import ViewHandler, ViewSubscriptionHandler
from baserow.contrib.database.views.signals import (
view_loaded_create_indexes_and_columns,
)
from baserow.core.trash.handler import TrashHandler
@patch("baserow.contrib.database.views.handler.ViewIndexingHandler")
@ -36,3 +44,491 @@ def test_view_loaded_creates_last_modified_by_column(indexing_handler, data_fixt
None, view, table_model, table=table, user=user
)
setup.delay.assert_called_once_with(table_id=view.table.id)
@pytest.mark.django_db
def test_rows_enter_and_exit_view_are_called_when_rows_updated(data_fixture):
user = data_fixture.create_user()
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables()
# Create a view per table and assert that the the signal is called for both tables
# when changing the link_a_to_b data
row_handler = RowHandler()
model_a = table_a.get_model()
model_b = table_b.get_model()
row_a = model_a.objects.create()
row_b = model_b.objects.create()
view_a = data_fixture.create_grid_view(table=table_a)
view_a_has_row_b = data_fixture.create_view_filter(
view=view_a, field=link_a_to_b, type="link_row_has", value=row_b.id
)
view_b = data_fixture.create_grid_view(table=table_b)
view_b_has_row_a = data_fixture.create_view_filter(
view=view_b,
field=link_a_to_b.link_row_related_field,
type="link_row_has",
value=row_a.id,
)
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
row_handler.force_update_rows(
user,
table_a,
[{"id": row_a.id, link_a_to_b.db_column: [row_b.id]}],
model_a,
)
p.assert_not_called()
# let's subscribe to the view in table_a first
ViewSubscriptionHandler.subscribe_to_views(user, [view_a])
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
row_handler.force_update_rows(
user, table_a, [{"id": row_a.id, link_a_to_b.db_column: []}]
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_a.id]
# Now let's subscribe also to the view in table_b and assert that the signal is
# called for both views
ViewSubscriptionHandler.subscribe_to_views(user, [view_b])
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
row_handler.force_update_rows(
user,
table_a,
[{"id": row_a.id, link_a_to_b.db_column: [row_b.id]}],
model_a,
)
assert p.call_count == 2
assert p.call_args_list[0][1]["view"].id == view_a.id
assert p.call_args_list[1][1]["row_ids"] == [row_a.id]
assert p.call_args_list[1][1]["view"].id == view_b.id
assert p.call_args_list[1][1]["row_ids"] == [row_b.id]
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
row_handler.force_update_rows(
user, table_a, [{"id": row_a.id, link_a_to_b.db_column: []}]
)
assert p.call_count == 2
assert p.call_args_list[0][1]["view"].id == view_a.id
assert p.call_args_list[1][1]["row_ids"] == [row_a.id]
assert p.call_args_list[1][1]["view"].id == view_b.id
assert p.call_args_list[1][1]["row_ids"] == [row_b.id]
# Once unsubcribed, the signal should not be sent anymore
ViewSubscriptionHandler.unsubscribe_from_views(user)
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
row_handler.force_update_rows(
user,
table_a,
[{"id": row_a.id, link_a_to_b.db_column: [row_b.id]}],
model_a,
)
p.assert_not_called()
@pytest.mark.django_db
def test_rows_enter_and_exit_view_are_called_when_rows_created_or_deleted(
data_fixture,
):
user = data_fixture.create_user()
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables()
row_handler = RowHandler()
model_a = table_a.get_model()
model_b = table_b.get_model()
row_b = model_b.objects.create()
view_a = data_fixture.create_grid_view(table=table_a)
view_a_has_row_b = data_fixture.create_view_filter(
view=view_a, field=link_a_to_b, type="link_row_has", value=row_b.id
)
view_b = data_fixture.create_grid_view(table=table_b)
view_b_has_row_a = data_fixture.create_view_filter(
view=view_b, field=link_a_to_b.link_row_related_field, type="not_empty"
)
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
)
p.assert_not_called()
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
row_handler.force_delete_rows(user, table_a, [new_row.id])
p.assert_not_called()
# let's subscribe to the view in table_a first
ViewSubscriptionHandler.subscribe_to_views(user, [view_a])
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [new_row.id]
# Deleting the row should also trigger the signal
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
row_handler.force_delete_rows(user, table_a, [new_row.id])
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [new_row.id]
# Now let's subscribe also to the view in table_b and assert that the signal is
# called for both views
ViewSubscriptionHandler.subscribe_to_views(user, [view_b])
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
)
assert p.call_count == 2
assert p.call_args_list[0][1]["view"].id == view_a.id
assert p.call_args_list[0][1]["row_ids"] == [new_row.id]
assert p.call_args_list[1][1]["view"].id == view_b.id
assert p.call_args_list[1][1]["row_ids"] == [row_b.id]
# Deleting the row should also trigger the signal for both views
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
row_handler.force_delete_rows(user, table_a, [new_row.id])
assert p.call_count == 2
assert p.call_args_list[0][1]["view"].id == view_a.id
assert p.call_args_list[0][1]["row_ids"] == [new_row.id]
assert p.call_args_list[1][1]["view"].id == view_b.id
assert p.call_args_list[1][1]["row_ids"] == [row_b.id]
# Once unsubcribed, the signal should not be sent anymore
ViewSubscriptionHandler.unsubscribe_from_views(user)
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
(new_row,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b.id]}], model=model_a
)
p.assert_not_called()
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
row_handler.force_delete_rows(user, table_a, [new_row.id])
p.assert_not_called()
@pytest.mark.django_db
def test_rows_enter_and_exit_view_are_called_when_view_filters_change(
data_fixture,
):
user = data_fixture.create_user()
table_a = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table_a)
model_a = table_a.get_model()
view_a = data_fixture.create_grid_view(table=table_a)
row_1 = model_a.objects.create()
row_2 = model_a.objects.create(**{text_field.db_column: "bbb"})
ViewSubscriptionHandler.subscribe_to_views(user, [view_a])
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
view_filter_1 = ViewHandler().create_filter(
user, view_a, text_field, "equal", "aaa"
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_1.id, row_2.id]
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
view_filter_1 = ViewHandler().delete_filter(user, view_filter_1)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_1.id, row_2.id]
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
view_filter_2 = ViewHandler().create_filter(
user, view_a, text_field, "equal", "bbb"
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_1.id]
with patch(
"baserow.contrib.database.views.signals.rows_entered_view.send"
) as entered, patch(
"baserow.contrib.database.views.signals.rows_exited_view.send"
) as exited:
view_filter_2 = ViewHandler().update_filter(
user, view_filter_2, type_name="empty"
)
entered.assert_called_once()
assert entered.call_args[1]["view"].id == view_a.id
assert entered.call_args[1]["row_ids"] == [row_1.id]
exited.assert_called_once()
assert exited.call_args[1]["view"].id == view_a.id
assert exited.call_args[1]["row_ids"] == [row_2.id]
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
view_filter_3 = ViewHandler().create_filter(
user, view_a, text_field, "equal", "bbb"
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_1.id]
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
ViewHandler().update_view(user, view_a, filters_disabled=True)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_1.id, row_2.id]
@pytest.mark.django_db
def test_rows_enter_and_exit_view_are_called_when_view_filter_groups_change(
data_fixture,
):
user = data_fixture.create_user()
table_a = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table_a)
model_a = table_a.get_model()
view = data_fixture.create_grid_view(table=table_a)
row_1 = model_a.objects.create()
row_2 = model_a.objects.create(**{text_field.db_column: "bbb"})
ViewSubscriptionHandler.subscribe_to_views(user, [view])
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
filter_group_1 = ViewHandler().create_filter_group(user, view)
p.assert_not_called()
view_filter_1 = ViewHandler().create_filter(
user, view, text_field, "empty", "", filter_group_id=filter_group_1.id
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_2.id]
view_filter_2 = ViewHandler().create_filter(
user, view, text_field, "equal", "bbb", filter_group_id=filter_group_1.id
)
p.call_count == 2
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
ViewHandler().update_filter_group(user, filter_group_1, filter_type="OR")
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id, row_2.id]
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
filter_group_2 = ViewHandler().create_filter_group(user, view)
p.assert_not_called()
view_filter_3 = ViewHandler().create_filter(
user, view, text_field, "equal", "aaa", filter_group_id=filter_group_2.id
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id, row_2.id]
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
ViewHandler().delete_filter_group(user, filter_group_2)
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id, row_2.id]
@pytest.mark.django_db
def test_rows_enter_and_exit_view_are_called_when_fields_change(
data_fixture,
):
user = data_fixture.create_user()
table_a = data_fixture.create_database_table(user=user)
primary_field = data_fixture.create_text_field(table=table_a, primary=True)
text_field = data_fixture.create_text_field(table=table_a)
text_field_id = text_field.id
model_a = table_a.get_model()
view = data_fixture.create_grid_view(table=table_a)
row_1 = model_a.objects.create()
row_2 = model_a.objects.create(**{text_field.db_column: "bbb"})
view_filter_1 = data_fixture.create_view_filter(
view=view, field=text_field, type="equal", value="bbb"
)
ViewSubscriptionHandler.subscribe_to_views(user, [view])
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
FieldHandler().delete_field(user, text_field)
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
TrashHandler.restore_item(user, "field", text_field_id)
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
FieldHandler().update_field(user, text_field, new_type_name="number")
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
@pytest.mark.django_db
def test_rows_enter_and_exit_view_with_periodic_fields_updates(data_fixture):
user = data_fixture.create_user()
with freeze_time("2021-01-01"):
table = data_fixture.create_database_table(user=user)
now_field = data_fixture.create_formula_field(
table=table, formula="today()", name="today"
)
year_field = data_fixture.create_formula_field(
table=table, formula="tonumber(datetime_format(field('today'), 'YYYY'))"
)
model = table.get_model()
view = data_fixture.create_grid_view(table=table)
row_1 = model.objects.create()
view_filter_1 = data_fixture.create_view_filter(
view=view, field=year_field, type="equal", value="2022"
)
ViewSubscriptionHandler.subscribe_to_views(user, [view])
with patch(
"baserow.contrib.database.views.signals.rows_entered_view.send"
) as p, freeze_time("2022-01-01"):
run_periodic_fields_updates(table.database.workspace_id)
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
with patch(
"baserow.contrib.database.views.signals.rows_exited_view.send"
) as p, freeze_time("2023-01-01"):
run_periodic_fields_updates(table.database.workspace_id)
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
@pytest.mark.django_db(transaction=True)
def test_rows_enter_and_exit_view_when_time_sensitive_filters_are_used(
data_fixture,
):
user = data_fixture.create_user()
with freeze_time("2021-01-01"):
table = data_fixture.create_database_table(user=user)
date_field = data_fixture.create_date_field(table=table)
model = table.get_model()
view = data_fixture.create_grid_view(table=table)
row_1 = model.objects.create(**{date_field.db_column: "2022-01-01"})
view_filter_1 = data_fixture.create_view_filter(
view=view, field=date_field, type="date_is", value="Europe/Rome??today"
)
ViewSubscriptionHandler.subscribe_to_views(user, [view])
with patch(
"baserow.contrib.database.views.signals.rows_entered_view.send"
) as p, freeze_time("2022-01-01"):
with transaction.atomic():
ViewSubscriptionHandler.check_views_with_time_sensitive_filters()
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
with patch(
"baserow.contrib.database.views.signals.rows_exited_view.send"
) as p, freeze_time("2022-01-02"):
with transaction.atomic():
ViewSubscriptionHandler.check_views_with_time_sensitive_filters()
p.assert_called_once()
assert p.call_args[1]["view"].id == view.id
assert p.call_args[1]["row_ids"] == [row_1.id]
@pytest.mark.django_db
def test_rows_enter_and_exit_view_when_data_changes_in_looked_up_tables(
data_fixture,
):
user = data_fixture.create_user()
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables()
text_field_b = data_fixture.create_text_field(table=table_b)
lookup_field = data_fixture.create_lookup_field(
name="lookup",
table=table_a,
through_field=link_a_to_b,
target_field=text_field_b,
through_field_name=link_a_to_b.name,
target_field_name=text_field_b.name,
)
# Create a view per table and assert that the the signal is called for both tables
# when changing the link_a_to_b data
row_handler = RowHandler()
model_a = table_a.get_model()
model_b = table_b.get_model()
(row_b1,) = row_handler.force_create_rows(
user, table_b, [{text_field_b.db_column: ""}], model=model_b
)
_, row_a2 = row_handler.force_create_rows(
user, table_a, [{}, {link_a_to_b.db_column: [row_b1.id]}], model=model_a
)
view_a = data_fixture.create_grid_view(table=table_a)
view_filter = data_fixture.create_view_filter(
view=view_a, field=lookup_field, type="has_not_empty_value", value=""
)
ViewSubscriptionHandler.subscribe_to_views(user, [view_a])
with patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p:
row_handler.force_update_rows(
user, table_b, [{"id": row_b1.id, text_field_b.db_column: "a"}], model_b
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_a2.id]
(row_a3,) = row_handler.force_create_rows(
user, table_a, [{link_a_to_b.db_column: [row_b1.id]}], model=model_a
)
assert p.call_count == 2
assert p.call_args_list[1][1]["view"].id == view_a.id
assert p.call_args_list[1][1]["row_ids"] == [row_a3.id]
with patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p:
row_handler.force_update_rows(
user, table_b, [{"id": row_b1.id, text_field_b.db_column: ""}], model_b
)
p.assert_called_once()
assert p.call_args[1]["view"].id == view_a.id
assert p.call_args[1]["row_ids"] == [row_a2.id, row_a3.id]

View file

@ -1,4 +1,3 @@
from collections import defaultdict
from unittest.mock import MagicMock, patch
from django.db import transaction
@ -22,34 +21,17 @@ from baserow.contrib.database.webhooks.tasks import (
)
from baserow.core.models import WorkspaceUser
from baserow.core.notifications.models import Notification
from baserow.core.redis import RedisQueue
from baserow.core.redis import WebhookRedisQueue
from baserow.test_utils.helpers import stub_getaddrinfo
class MemoryQueue(RedisQueue):
queues = defaultdict(list)
def enqueue_task(self, task_object):
self.queues[self.queue_key].append(task_object)
return True
def get_and_pop_next(self):
try:
return self.queues[self.queue_key].pop(0)
except IndexError:
return None
def clear(self):
self.queues[self.queue_key] = []
@pytest.mark.django_db(transaction=True)
@responses.activate
@override_settings(
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
@patch("baserow.contrib.database.webhooks.tasks.clear_webhook_queue")
def test_call_webhook_webhook_does_not_exist(mock_clear_queue):
@ -73,7 +55,7 @@ def test_call_webhook_webhook_does_not_exist(mock_clear_queue):
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_call_webhook_webhook_url_cannot_be_reached(data_fixture):
webhook = data_fixture.create_table_webhook()
@ -113,7 +95,7 @@ def test_call_webhook_webhook_url_cannot_be_reached(data_fixture):
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_call_webhook_becomes_inactive_max_failed_reached(data_fixture):
webhook = data_fixture.create_table_webhook(active=True, failed_triggers=1)
@ -141,7 +123,7 @@ def test_call_webhook_becomes_inactive_max_failed_reached(data_fixture):
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_call_webhook_skipped_because_not_active(data_fixture):
webhook = data_fixture.create_table_webhook(active=False, failed_triggers=1)
@ -168,7 +150,7 @@ def test_call_webhook_skipped_because_not_active(data_fixture):
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_call_webhook_reset_after_success_call(data_fixture):
webhook = data_fixture.create_table_webhook(failed_triggers=1)
@ -196,7 +178,7 @@ def test_call_webhook_reset_after_success_call(data_fixture):
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_call_webhook(data_fixture):
webhook = data_fixture.create_table_webhook()
@ -256,7 +238,7 @@ def test_call_webhook(data_fixture):
@pytest.mark.django_db(transaction=True, databases=["default", "default-copy"])
@responses.activate
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_call_webhook_concurrent_task_moved_to_queue(data_fixture):
from baserow.contrib.database.webhooks.tasks import get_queue
@ -285,7 +267,7 @@ def test_call_webhook_concurrent_task_moved_to_queue(data_fixture):
@pytest.mark.django_db(transaction=True, databases=["default"])
@responses.activate
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
@patch("baserow.contrib.database.webhooks.tasks.schedule_next_task_in_queue")
def test_call_webhook_next_item_scheduled(mock_schedule, data_fixture):
@ -312,7 +294,7 @@ def test_call_webhook_next_item_scheduled(mock_schedule, data_fixture):
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=0,
)
@httpretty.activate(verbose=True, allow_net_connect=False)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
def test_cant_call_webhook_to_localhost_when_private_addresses_not_allowed(
@ -345,7 +327,7 @@ def test_cant_call_webhook_to_localhost_when_private_addresses_not_allowed(
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=0,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=0,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_can_call_webhook_to_localhost_when_private_addresses_allowed(
data_fixture,
@ -380,7 +362,7 @@ def test_can_call_webhook_to_localhost_when_private_addresses_allowed(
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
@patch("baserow.ws.tasks.broadcast_to_users.apply")
def test_call_webhook_failed_reached_notification_send(
@ -448,7 +430,7 @@ class PaginatedWebhookEventType(WebhookEventType):
def __init__(self):
self.i = 1
def _paginate_payload(self, payload) -> tuple[dict, dict | None]:
def _paginate_payload(self, webhook, event_id, payload) -> tuple[dict, dict | None]:
payload["data"] = f"part {self.i}"
self.i += 1
return payload, {"data": f"part {self.i}"}
@ -456,7 +438,7 @@ class PaginatedWebhookEventType(WebhookEventType):
@pytest.mark.django_db(transaction=True)
@responses.activate
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
def test_webhook_with_paginated_payload(
mutable_webhook_event_type_registry, data_fixture
@ -523,7 +505,7 @@ def test_webhook_with_paginated_payload(
@pytest.mark.django_db(transaction=True)
@responses.activate
@override_settings(BASEROW_WEBHOOKS_BATCH_LIMIT=1)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", MemoryQueue)
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
@patch("baserow.ws.tasks.broadcast_to_users.apply")
def test_call_webhook_payload_too_large_send_notification(

View file

@ -0,0 +1,8 @@
{
"type": "feature",
"message": "Add rows enter view webhook event type.",
"domain": "database",
"issue_number": 3009,
"bullet_points": [],
"created_at": "2025-03-07"
}

View file

@ -156,6 +156,8 @@ x-backend-variables: &backend-variables
BASEROW_BUILDER_DOMAINS:
BASEROW_FRONTEND_SAME_SITE_COOKIE:
BASEROW_ICAL_VIEW_MAX_EVENTS: ${BASEROW_ICAL_VIEW_MAX_EVENTS:-}
BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE:
BASEROW_ENTERPRISE_GROUPED_AGGREGATE_SERVICE_MAX_SERIES:
services:
# A caddy reverse proxy sitting in-front of all the services. Responsible for routing

View file

@ -174,6 +174,8 @@ x-backend-variables: &backend-variables
BASEROW_AUTO_VACUUM:
BASEROW_BUILDER_DOMAINS:
BASEROW_ICAL_VIEW_MAX_EVENTS: ${BASEROW_ICAL_VIEW_MAX_EVENTS:-}
BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE:
BASEROW_ENTERPRISE_GROUPED_AGGREGATE_SERVICE_MAX_SERIES:
services:
backend:

View file

@ -201,6 +201,7 @@ x-backend-variables: &backend-variables
BASEROW_ENTERPRISE_MAX_PERIODIC_DATA_SYNC_CONSECUTIVE_ERRORS:
BASEROW_USE_LOCAL_CACHE:
BASEROW_WEBHOOKS_BATCH_LIMIT:
BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE:
BASEROW_ENTERPRISE_GROUPED_AGGREGATE_SERVICE_MAX_SERIES:
services:

View file

@ -121,6 +121,7 @@ The installation methods referred to in the variable descriptions are:
| BASEROW\_WEBHOOKS\_REQUEST\_TIMEOUT\_SECONDS | How long to wait on making the webhook request before timing out. | 5 |
| BASEROW\_MAX\_WEBHOOK\_CALLS\_IN\_QUEUE\_PER\_WEBHOOK | Maximum number of calls that can be in the webhook's queue. Can be useful to limit when massive numbers of webhooks are triggered due an automation loop. If not set or set to `0`, then there is no limit. | 0 |
| BASEROW\_WEBHOOKS\_BATCH\_LIMIT | This limit applies to all webhook_event_types that split large payloads into multiple batches. Each batch is a separate request with the same event_id and an incremental batch_id. This parameter sets the maximum number of batches per event. Set to 0 for unlimited batches.  | 5 |
| BASEROW\_WEBHOOK\_ROWS\_ENTER\_VIEW\_BATCH\_SIZE | Defines the number of rows that can be sent in a single webhook call of type `view.rows_entered`. This is used to prevent the webhook call from being too large and potentially causing issues while serializing the data or sending it over the network to the webhook endpoint. | 200 |
### Generative AI configuration

View file

@ -296,6 +296,13 @@ class BaserowEnterpriseConfig(AppConfig):
action_type_registry.register(UpdatePeriodicDataSyncIntervalActionType())
from baserow.contrib.database.webhooks.registries import (
webhook_event_type_registry,
)
from baserow_enterprise.webhook_event_types import RowsEnterViewEventType
webhook_event_type_registry.register(RowsEnterViewEventType())
# Create default roles
post_migrate.connect(sync_default_roles_after_migrate, sender=self)

View file

@ -8,5 +8,6 @@ SECURE_FILE_SERVE = "secure_file_serve"
ENTERPRISE_SETTINGS = "ENTERPRISE_SETTINGS"
DATA_SYNC = "data_sync"
CHART_WIDGET = "chart_widget"
ADVANCED_WEBHOOKS = "advanced_webhooks"
BUILDER_SSO = "application_user_sso"

View file

@ -6,6 +6,7 @@ from baserow_premium.license.registries import LicenseType, SeatUsageSummary
from baserow.core.models import Workspace
from baserow_enterprise.features import (
ADVANCED_WEBHOOKS,
AUDIT_LOG,
BUILDER_SSO,
CHART_WIDGET,
@ -36,6 +37,7 @@ class EnterpriseWithoutSupportLicenseType(LicenseType):
DATA_SYNC,
CHART_WIDGET,
BUILDER_SSO,
ADVANCED_WEBHOOKS,
]
instance_wide = True
seats_manually_assigned = False

View file

@ -0,0 +1,117 @@
from django.conf import settings
from django.db.models import Q
from baserow_premium.license.handler import LicenseHandler
from baserow.contrib.database.api.rows.serializers import (
RowSerializer,
get_row_serializer_class,
)
from baserow.contrib.database.api.views.serializers import ViewSerializer
from baserow.contrib.database.views.handler import ViewSubscriptionHandler
from baserow.contrib.database.views.models import GridView
from baserow.contrib.database.views.signals import rows_entered_view
from baserow.contrib.database.webhooks.registries import WebhookEventType
from baserow_enterprise.features import ADVANCED_WEBHOOKS
class EnterpriseWebhookEventType(WebhookEventType):
def listener(self, **kwargs: dict):
"""
Only calls the super listener if the workspace has a valid license.
"""
table = self.get_table_object(**kwargs)
if LicenseHandler.workspace_has_feature(
ADVANCED_WEBHOOKS, table.database.workspace
):
super().listener(**kwargs)
class RowsEnterViewEventType(EnterpriseWebhookEventType):
type = "view.rows_entered"
signal = rows_entered_view
should_trigger_when_all_event_types_selected = False
def get_table_object(self, model, **kwargs):
return model.baserow_table
def get_additional_filters_for_webhooks_to_call(self, view, **kwargs):
return Q(events__views=view)
def serialize_rows(self, model, rows, use_user_field_names):
rows_serializer = get_row_serializer_class(
model,
RowSerializer,
is_response=True,
user_field_names=use_user_field_names,
)
return rows_serializer(rows, many=True).data
def serialize_view(self, view):
return ViewSerializer(view).data
def _paginate_payload(self, webhook, event_id, payload):
"""
Paginates the payload if it's too large. This is done by splitting the rows
into multiple payloads and returning a list of them. This method also replaces
the row_ids with the actual row data.
"""
row_ids = payload.pop("row_ids")
batch_size = settings.BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE
current_batch_row_ids = row_ids[:batch_size]
if len(current_batch_row_ids) < payload["total_count"]:
payload["offset"] = (payload.get("batch_id", 1) - 1) * batch_size
payload["batch_size"] = len(current_batch_row_ids)
# prepare the remaining payload with the other row ids for the next batch
remaining = None
if len(row_ids) > batch_size:
remaining = payload.copy()
remaining["row_ids"] = row_ids[batch_size:]
# get_payload only serialized row_ids, but since this method runs in the
# celery worker, we have more time to fully serialize fields data.
table = webhook.table
model = table.get_model()
row_fields = [
field.name if webhook.use_user_field_names else field.db_column
for field in model.get_fields()
]
payload["fields"] = ["id", "order", *row_fields]
rows = model.objects_and_trash.filter(
id__in=current_batch_row_ids
).enhance_by_fields()
payload["rows"] = self.serialize_rows(model, rows, webhook.use_user_field_names)
return payload, remaining
def get_test_call_payload(self, table, model, event_id, webhook):
view = GridView(id=0, name="View", table=table, order=1)
row = model(id=0, order=0)
payload = self.get_payload(
event_id=event_id, webhook=webhook, view=view, row_ids=[row.id]
)
return payload
def get_payload(self, event_id, webhook, view, row_ids, **kwargs):
payload = super().get_payload(event_id, webhook, **kwargs)
payload["view"] = self.serialize_view(view)
payload["row_ids"] = row_ids
payload["total_count"] = len(row_ids)
return payload
def after_create(self, webhook_event):
views = webhook_event.views.all()
if views:
ViewSubscriptionHandler.subscribe_to_views(webhook_event, views)
def after_update(self, webhook_event):
ViewSubscriptionHandler.unsubscribe_from_views(webhook_event)
views = webhook_event.views.all()
if views:
ViewSubscriptionHandler.subscribe_to_views(webhook_event, views)

View file

@ -0,0 +1,443 @@
from unittest.mock import MagicMock, patch
from django.db import transaction
from django.test.utils import override_settings
import pytest
import responses
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.webhooks.handler import WebhookHandler
from baserow.contrib.database.webhooks.registries import webhook_event_type_registry
from baserow.core.redis import WebhookRedisQueue
@pytest.mark.django_db()
def test_rows_enter_view_event_type(enterprise_data_fixture):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
view = enterprise_data_fixture.create_grid_view(table=table)
field = enterprise_data_fixture.create_text_field(
table=table, primary=True, name="Test 1"
)
model = table.get_model()
row = model.objects.create()
webhook = enterprise_data_fixture.create_table_webhook(
table=table,
request_method="POST",
url="http://localhost",
use_user_field_names=False,
)
payload = webhook_event_type_registry.get("view.rows_entered").get_payload(
event_id="1", webhook=webhook, view=view, row_ids=[row.id]
)
serialized_view = {
"id": view.id,
"table_id": table.id,
"order": 0,
"type": "grid",
"name": view.name,
"table": {
"id": table.id,
"order": 0,
"name": table.name,
"database_id": table.database_id,
},
"type": "grid",
"filters_disabled": False,
"show_logo": True,
"allow_public_export": False,
"public_view_has_password": False,
"filter_type": "AND",
"ownership_type": "collaborative",
"owned_by_id": None,
}
expected_payload = {
"table_id": table.id,
"database_id": table.database_id,
"workspace_id": table.database.workspace_id,
"event_id": "1",
"event_type": "view.rows_entered",
"total_count": 1,
"view": serialized_view,
"row_ids": [row.id],
}
assert payload == expected_payload
paginated_payload, _ = webhook_event_type_registry.get(
"view.rows_entered"
).paginate_payload(webhook, "1", payload)
assert paginated_payload == {
"table_id": table.id,
"database_id": table.database_id,
"workspace_id": table.database.workspace_id,
"event_id": "1",
"event_type": "view.rows_entered",
"total_count": 1,
"view": serialized_view,
"fields": ["id", "order", field.db_column],
"rows": [
{
"id": 1,
"order": "1.00000000000000000000",
field.db_column: None,
}
],
}
webhook.use_user_field_names = True
webhook.save()
payload = webhook_event_type_registry.get("view.rows_entered").get_payload(
event_id="1", webhook=webhook, view=view, row_ids=[row.id]
)
assert payload == expected_payload
paginated_payload, _ = webhook_event_type_registry.get(
"view.rows_entered"
).paginate_payload(webhook, "1", payload)
assert paginated_payload == {
"table_id": table.id,
"database_id": table.database_id,
"workspace_id": table.database.workspace_id,
"event_id": "1",
"event_type": "view.rows_entered",
"total_count": 1,
"view": serialized_view,
"fields": ["id", "order", "Test 1"],
"rows": [
{
"id": 1,
"order": "1.00000000000000000000",
"Test 1": None,
}
],
}
@pytest.mark.django_db(transaction=True)
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.webhooks.registries.call_webhook")
def test_rows_enter_view_event_type_require_enterprise_license(
mock_call_webhook, enterprise_data_fixture
):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
view = enterprise_data_fixture.create_grid_view(table=table)
with transaction.atomic():
webhook = WebhookHandler().create_table_webhook(
user=user,
table=table,
url="http://localhost/",
include_all_events=False,
events=["view.rows_entered"],
event_config=[{"event_type": "view.rows_entered", "views": [view.id]}],
headers={"Baserow-header-1": "Value 1"},
)
RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
mock_call_webhook.delay.assert_not_called()
# From now on, the webhook should be called.
enterprise_data_fixture.enable_enterprise()
with transaction.atomic():
RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
mock_call_webhook.delay.assert_called_once()
mock_call_webhook.reset_mock()
enterprise_data_fixture.delete_all_licenses()
with transaction.atomic():
RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
mock_call_webhook.delay.assert_not_called()
@pytest.mark.django_db(transaction=True)
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.webhooks.registries.call_webhook")
def test_rows_enter_view_event_type_not_triggerd_with_include_all_events(
mock_call_webhook, enterprise_data_fixture, enable_enterprise
):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
view = enterprise_data_fixture.create_grid_view(table=table)
with transaction.atomic():
webhook = WebhookHandler().create_table_webhook(
user=user, table=table, url="http://localhost/", include_all_events=True
)
RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
assert mock_call_webhook.delay.call_count == 1
assert mock_call_webhook.delay.call_args[1]["event_type"] == "rows.created"
@pytest.mark.django_db()
def test_rows_enter_view_event_event_type_test_payload(enterprise_data_fixture):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
field = enterprise_data_fixture.create_text_field(
table=table, primary=True, name="Test 1"
)
model = table.get_model()
webhook = enterprise_data_fixture.create_table_webhook(
table=table,
request_method="POST",
url="http://localhost",
use_user_field_names=False,
)
webhook_event_type = webhook_event_type_registry.get("view.rows_entered")
payload = webhook_event_type.get_test_call_payload(table, model, "1", webhook)
serialized_view = {
"id": 0,
"table_id": table.id,
"order": 1,
"type": "grid",
"name": "View",
"table": {
"id": table.id,
"order": 0,
"name": table.name,
"database_id": table.database_id,
},
"type": "grid",
"filters_disabled": False,
"show_logo": True,
"allow_public_export": False,
"public_view_has_password": False,
"filter_type": "AND",
"ownership_type": "collaborative",
"owned_by_id": None,
}
assert payload == {
"table_id": table.id,
"database_id": table.database_id,
"workspace_id": table.database.workspace_id,
"event_id": "1",
"event_type": "view.rows_entered",
"total_count": 1,
"view": serialized_view,
"row_ids": [0],
}
@pytest.mark.django_db(transaction=True)
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.webhooks.registries.call_webhook")
def test_rows_enter_view_event_type_not_called_without_view(
mock_call_webhook, enterprise_data_fixture, enable_enterprise
):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
view = enterprise_data_fixture.create_grid_view(table=table)
with transaction.atomic(): # No views
webhook = WebhookHandler().create_table_webhook(
user=user,
table=table,
url="http://localhost/",
include_all_events=False,
events=["view.rows_entered"],
event_config=[{"event_type": "view.rows_entered", "views": []}],
headers={"Baserow-header-1": "Value 1"},
)
RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
mock_call_webhook.delay.assert_not_called()
with transaction.atomic(): # Now with a view
WebhookHandler().update_table_webhook(
user=user,
webhook=webhook,
events=["view.rows_entered"],
event_config=[{"event_type": "view.rows_entered", "views": [view.id]}],
headers={"Baserow-header-1": "Value 1"},
)
RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
mock_call_webhook.delay.assert_called_once()
@pytest.mark.django_db(transaction=True)
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.webhooks.registries.call_webhook")
def test_rows_enter_view_event_type_called_once_per_view(
mock_call_webhook, enterprise_data_fixture, enable_enterprise
):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
view_a = enterprise_data_fixture.create_grid_view(table=table)
view_b = enterprise_data_fixture.create_grid_view(table=table)
with transaction.atomic():
WebhookHandler().create_table_webhook(
user=user,
table=table,
url="http://localhost/",
include_all_events=False,
events=["view.rows_entered"],
event_config=[
{"event_type": "view.rows_entered", "views": [view_a.id, view_b.id]}
],
headers={"Baserow-header-1": "Value 1"},
)
RowHandler().force_create_rows(user=user, table=table, rows_values=[{}])
assert mock_call_webhook.delay.call_count == 2
@pytest.mark.django_db(transaction=True)
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.webhooks.registries.call_webhook")
def test_rows_enter_view_event_type_only_right_webhook_is_called(
mock_call_webhook, enterprise_data_fixture, enable_enterprise
):
user = enterprise_data_fixture.create_user()
table_a = enterprise_data_fixture.create_database_table(user=user)
view_a = enterprise_data_fixture.create_grid_view(table=table_a)
table_b = enterprise_data_fixture.create_database_table(user=user)
view_b = enterprise_data_fixture.create_grid_view(table=table_b)
with transaction.atomic():
WebhookHandler().create_table_webhook(
user=user,
table=table_a,
url="http://localhost/",
include_all_events=False,
events=["view.rows_entered"],
event_config=[{"event_type": "view.rows_entered", "views": [view_a.id]}],
headers={"Baserow-header-1": "Value 1"},
)
WebhookHandler().create_table_webhook(
user=user,
table=table_b,
url="http://localhost/",
include_all_events=False,
events=["view.rows_entered"],
event_config=[{"event_type": "view.rows_entered", "views": [view_b.id]}],
headers={"Baserow-header-1": "Value 1"},
)
RowHandler().force_create_rows(user=user, table=table_a, rows_values=[{}])
assert mock_call_webhook.delay.call_count == 1
@pytest.mark.django_db(transaction=True)
@responses.activate
@patch("baserow.contrib.database.webhooks.tasks.RedisQueue", WebhookRedisQueue)
@patch("baserow.contrib.database.webhooks.tasks.cache", MagicMock())
@patch(
"baserow.contrib.database.webhooks.tasks.make_request_and_save_result",
side_effect=lambda *args, **kwargs: True,
)
@override_settings(DEBUG=True, BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE=2)
def test_rows_enter_view_event_type_paginate_data(
mock_make_request, enterprise_data_fixture, enable_enterprise
):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
text_field = enterprise_data_fixture.create_text_field(table=table, name="text")
view = enterprise_data_fixture.create_grid_view(table=table)
responses.add(responses.POST, "http://localhost/", json={}, status=200)
serialized_view = {
"id": view.id,
"table_id": table.id,
"order": 0,
"type": "grid",
"name": view.name,
"table": {
"id": table.id,
"order": 0,
"name": table.name,
"database_id": table.database_id,
},
"type": "grid",
"filters_disabled": False,
"show_logo": True,
"allow_public_export": False,
"public_view_has_password": False,
"filter_type": "AND",
"ownership_type": "collaborative",
"owned_by_id": None,
}
expected_first_page_payload = {
"table_id": table.id,
"database_id": table.database_id,
"workspace_id": table.database.workspace_id,
"event_type": "view.rows_entered",
"offset": 0,
"total_count": 3,
"batch_id": 1,
"batch_size": 2,
"view": serialized_view,
"fields": ["id", "order", "text"],
"rows": [
{"id": 1, "order": "1.00000000000000000000", "text": "a"},
{"id": 2, "order": "2.00000000000000000000", "text": "b"},
],
}
with transaction.atomic():
webhook = WebhookHandler().create_table_webhook(
user=user,
table=table,
url="http://localhost/",
include_all_events=False,
events=["view.rows_entered"],
event_config=[{"event_type": "view.rows_entered", "views": [view.id]}],
headers={"Baserow-header-1": "Value 1"},
use_user_field_names=True,
)
rows = RowHandler().force_create_rows(
user=user,
table=table,
rows_values=[
{text_field.db_column: "a"},
{text_field.db_column: "b"},
{text_field.db_column: "c"},
],
)
assert mock_make_request.call_count == 2
first_call_args = mock_make_request.call_args_list[0][0]
event_id = first_call_args[1]
first_page_payload = first_call_args[6]
# first batch
expected_first_page_payload["event_id"] = event_id
assert first_page_payload == expected_first_page_payload
# second batch
second_call_args = mock_make_request.call_args_list[1][0]
second_page_payload = second_call_args[6]
assert second_page_payload == {
"event_id": event_id,
"table_id": table.id,
"database_id": table.database_id,
"workspace_id": table.database.workspace_id,
"event_type": "view.rows_entered",
"offset": 2,
"total_count": 3,
"batch_id": 2,
"batch_size": 1,
"view": serialized_view,
"fields": ["id", "order", "text"],
"rows": [
{"id": 3, "order": "3.00000000000000000000", "text": "c"},
],
}

View file

@ -35,6 +35,13 @@
<i class="iconoir-check premium-features__feature-icon"></i>
{{ $t('enterpriseFeatures.coBranding') }}
</li>
<li
v-if="!hiddenFeatures.includes(enterpriseFeatures.ADVANCED_WEBHOOKS)"
class="premium-features__feature"
>
<i class="iconoir-check premium-features__feature-icon"></i>
{{ $t('enterpriseFeatures.advancedWebhooks') }}
</li>
<li
v-if="!hiddenFeatures.includes(enterpriseFeatures.SUPPORT)"
class="premium-features__feature"

View file

@ -8,6 +8,7 @@ const EnterpriseFeatures = {
DATA_SYNC: 'DATA_SYNC',
CHART_WIDGET: 'CHART_WIDGET',
BUILDER_SSO: 'BUILDER_SSO',
ADVANCED_WEBHOOKS: 'ADVANCED_WEBHOOKS',
}
export default EnterpriseFeatures

View file

@ -46,6 +46,7 @@ export class EnterpriseWithoutSupportLicenseType extends LicenseType {
EnterpriseFeaturesObject.DATA_SYNC,
EnterpriseFeaturesObject.CHART_WIDGET,
EnterpriseFeaturesObject.BUILDER_SSO,
EnterpriseFeaturesObject.ADVANCED_WEBHOOKS,
]
}

View file

@ -9,7 +9,8 @@
"rbac": "RBAC",
"sso": "SSO",
"licenseDescription": "Viewers are free with Baserow Enterprise. If a user has any other role, in any workspace then they will use a paid seat automatically.",
"overflowWarning": "You have too many non-viewer users and have used up all of your paid seats. Change users to become viewers on each workspaces members page."
"overflowWarning": "You have too many non-viewer users and have used up all of your paid seats. Change users to become viewers on each workspaces members page.",
"deactivated": "Available in the advanced/enterprise version"
},
"trashType": {
"team": "team"
@ -286,7 +287,8 @@
"dataSync": "Data sync",
"coBranding": "Co-branding logo replacement",
"support": "Direct support",
"chartWidget": "Chart widget"
"chartWidget": "Chart widget",
"advancedWebhooks": "Advanced webhooks"
},
"chatwootSupportSidebarWorkspace": {
"directSupport": "Direct support"
@ -470,5 +472,12 @@
},
"periodicDataSyncDeactivatedNotification": {
"body": "{name} periodic data sync has been deactivated because it failed too many times consecutively."
},
"webhook": {
"rowsEnterVieweventType": "Rows enter view"
},
"webhookForm": {
"triggerWhenRowsEnterView": "Triggered when rows enter the view.",
"helpTriggerWhenRowsEnterView": "Triggers only when `Rows enter view` is selected and rows that now match the filters enter the view."
}
}

View file

@ -73,6 +73,7 @@ import {
MedianViewAggregationType,
} from '@baserow/modules/database/viewAggregationTypes'
import { PeriodicDataSyncDeactivatedNotificationType } from '@baserow_enterprise/notificationTypes'
import { RowsEnterViewWebhookEventType } from '@baserow_enterprise/webhookEventTypes'
import {
TextFieldType,
LongTextFieldType,
@ -301,6 +302,11 @@ export default (context) => {
new PeriodicIntervalFieldsConfigureDataSyncType(context)
)
app.$registry.register(
'webhookEvent',
new RowsEnterViewWebhookEventType(context)
)
app.$registry.register('dashboardWidget', new ChartWidgetType(context))
app.$registry.register(
'chartFieldFormatting',

View file

@ -0,0 +1,61 @@
import {
WebhookEventType,
viewExample,
} from '@baserow/modules/database/webhookEventTypes'
import EnterpriseModal from '@baserow_enterprise/components/EnterpriseModal'
import EnterpriseFeatures from '@baserow_enterprise/features'
class EnterpriseWebhookEventType extends WebhookEventType {
getDeactivatedText() {
return this.app.i18n.t('enterprise.deactivated')
}
getDeactivatedClickModal() {
return EnterpriseModal
}
isDeactivated(workspaceId) {
return !this.app.$hasFeature(
EnterpriseFeatures.ADVANCED_WEBHOOKS,
workspaceId
)
}
getFeatureName() {
return this.app.i18n.t('enterpriseFeatures.advancedWebhooks')
}
}
export class RowsEnterViewWebhookEventType extends EnterpriseWebhookEventType {
static getType() {
return 'view.rows_entered'
}
getName() {
const { i18n } = this.app
return i18n.t('webhook.rowsEnterVieweventType')
}
getExamplePayload(database, table, rowExample) {
const payload = super.getExamplePayload(database, table, rowExample)
payload.view = viewExample
payload.fields = Object.keys(rowExample)
payload.rows = [rowExample]
payload.total_count = 1
return payload
}
getHasRelatedView() {
return true
}
getRelatedViewPlaceholder() {
const { i18n } = this.app
return i18n.t('webhookForm.triggerWhenRowsEnterView')
}
getRelatedViewHelpText() {
const { i18n } = this.app
return i18n.t('webhookForm.helpTriggerWhenRowsEnterView')
}
}

View file

@ -184,7 +184,6 @@ export default {
* doesn't duplicate the name of an existing custom color.
*/
addCustomColor() {
console.log('addCustomColor')
// To avoid duplicating names, newColorId is incremented until an unused
// value is found.
const existingNames = this.values.custom_colors.map((color) => color.name)

View file

@ -78,7 +78,6 @@ export const registerRealtimeEvents = (realtime) => {
realtime.registerEvent('element_updated', ({ store }, { element }) => {
const selectedPage = store.getters['page/getSelected']
if (selectedPage.id === element.page_id) {
console.log('it is')
const builder = store.getters['application/get'](selectedPage.builder_id)
store.dispatch('element/forceUpdate', {
builder,

View file

@ -117,6 +117,7 @@
width: 262px;
display: flex;
align-items: center;
z-index: 2;
}
.webhook__type-dropdown {

View file

@ -76,6 +76,7 @@
:database="database"
:view="view"
:table="table"
:views="views"
@enable-rename="$refs.rename.edit()"
>
</ViewContext>

View file

@ -126,12 +126,7 @@
:table="table"
:fields="fields"
/>
<WebhookModal
ref="webhookModal"
:database="database"
:table="table"
:fields="fields"
/>
<WebhookModal ref="webhookModal" :database="database" :table="table" />
</Context>
</template>

View file

@ -6,6 +6,7 @@
:database="database"
:table="table"
:fields="fields"
:views="views"
@submitted="submit"
>
<div class="actions">
@ -42,6 +43,10 @@ export default {
type: Array,
required: true,
},
views: {
type: Array,
required: true,
},
},
data() {
return {

View file

@ -6,6 +6,7 @@
:database="database"
:table="table"
:fields="fields"
:views="views"
:default-values="webhook"
@submitted="submit"
@formchange="handleFormChange"
@ -66,6 +67,10 @@ export default {
type: Array,
required: true,
},
views: {
type: Array,
required: true,
},
webhook: {
type: Object,
required: true,

View file

@ -52,6 +52,7 @@
:database="database"
:table="table"
:fields="fields"
:views="views"
@updated="$emit('updated', $event)"
@deleted="$emit('deleted', $event)"
/>
@ -94,6 +95,10 @@ export default {
type: Array,
required: true,
},
views: {
type: Array,
required: true,
},
},
data() {
return {

View file

@ -113,20 +113,32 @@
<div
v-for="webhookEvent in webhookEventTypes"
:key="webhookEvent.type"
v-tooltip="
webhookEvent.isDeactivated()
? webhookEvent.getDeactivatedText()
: null
"
class="webhook__type"
tooltip-position="bottom-cursor"
@mousedown="
webhookEvent.isDeactivated() &&
!values.events.includes(webhookEvent.type) &&
$refs[`${webhookEvent.getName()}DeactivatedClickModal`][0].show()
"
>
<Checkbox
:checked="values.events.includes(webhookEvent.type)"
@input="
$event
? values.events.push(webhookEvent.type)
: values.events.splice(
values.events.indexOf(webhookEvent.type),
1
)
:disabled="
!values.events.includes(webhookEvent.type) &&
webhookEvent.isDeactivated()
"
>{{ webhookEvent.getName() }}</Checkbox
@input="toggleEventType(webhookEvent, $event)"
>
{{ webhookEvent.getName() }}
<div v-if="webhookEvent.isDeactivated()" class="deactivated-label">
<i class="iconoir-lock"></i>
</div>
</Checkbox>
<div
v-if="webhookEvent.getHasRelatedFields()"
class="webhook__type-dropdown-container"
@ -157,6 +169,42 @@
:tooltip="webhookEvent.getRelatedFieldsHelpText()"
/>
</div>
<div
v-if="webhookEvent.getHasRelatedView()"
class="webhook__type-dropdown-container"
>
<Dropdown
:value="
values.events.includes(webhookEvent.type)
? getEventView(webhookEvent)
: null
"
:placeholder="webhookEvent.getRelatedViewPlaceholder()"
:disabled="!values.events.includes(webhookEvent.type)"
class="dropdown--tiny webhook__type-dropdown"
@input="setEventView(webhookEvent, $event)"
>
<DropdownItem
v-for="view in filterableViews"
:key="view.id"
:name="view.name"
:value="view.id"
>
</DropdownItem>
</Dropdown>
<HelpIcon
v-if="webhookEvent.getRelatedViewHelpText()"
class="margin-left-1"
:tooltip="webhookEvent.getRelatedViewHelpText()"
/>
</div>
<component
:is="webhookEvent.getDeactivatedClickModal()"
v-if="webhookEvent.isDeactivated()"
:ref="`${webhookEvent.getName()}DeactivatedClickModal`"
:workspace="database.workspace"
:name="webhookEvent.getFeatureName()"
></component>
</div>
</div>
@ -275,6 +323,10 @@ export default {
type: Array,
required: true,
},
views: {
type: Array,
required: true,
},
},
setup() {
const values = reactive({
@ -351,6 +403,11 @@ export default {
webhookEventTypes() {
return this.$registry.getAll('webhookEvent')
},
filterableViews() {
return this.views.filter(
(view) => this.$registry.get('view', view.type).canFilter
)
},
/**
* Generates an example payload of the webhook event based on the chosen webhook
* event type.
@ -424,6 +481,45 @@ export default {
eventConfig.fields = fields
},
getEventView(event) {
const eventConfig = this.values.event_config.find(
(e) => e.event_type === event.type
)
if (eventConfig === undefined) {
return null
}
const viewId = eventConfig.views?.[0]
const view =
viewId && this.filterableViews.find((view) => view.id === viewId)
return view?.id || null
},
setEventView(event, view) {
const eventConfig = this.values.event_config.find(
(e) => e.event_type === event.type
)
if (eventConfig === undefined) {
this.values.event_config.push({
event_type: event.type,
views: [],
})
return this.setEventView(event, view)
}
this.$set(eventConfig, 'views', [view])
},
toggleEventType(webhookEvent, event) {
if (event) {
this.values.events.push(webhookEvent.type)
} else {
this.values.events.splice(
this.values.events.indexOf(webhookEvent.type),
1
)
this.values.event_config.splice(
this.values.event_config.indexOf((e) => e.event_type === event.type),
1
)
}
},
prepareHeaders(headers) {
const preparedHeaders = {}
headers.forEach((header) => {

View file

@ -10,6 +10,7 @@
:database="database"
:table="table"
:fields="fields"
:views="views"
@updated="$emit('updated', $event)"
@deleted="$emit('deleted', $event)"
/>
@ -37,6 +38,10 @@ export default {
type: Array,
required: true,
},
views: {
type: Array,
required: true,
},
webhooks: {
type: Array,
required: true,

View file

@ -21,6 +21,7 @@
:database="database"
:table="table"
:fields="tableFields"
:views="tableViews"
:webhooks="webhooks"
@updated="updated"
@deleted="deleted"
@ -30,6 +31,7 @@
:database="database"
:table="table"
:fields="tableFields"
:views="tableViews"
@created="created"
/>
</template>
@ -43,6 +45,7 @@ import WebhookList from '@baserow/modules/database/components/webhook/WebhookLis
import CreateWebhook from '@baserow/modules/database/components/webhook/CreateWebhook'
import WebhookService from '@baserow/modules/database/services/webhook'
import FieldService from '@baserow/modules/database/services/field'
import ViewService from '@baserow/modules/database/services/view'
export default {
name: 'WebhookModal',
@ -65,6 +68,11 @@ export default {
required: false,
default: null,
},
views: {
type: [Array, null],
required: false,
default: null,
},
},
data() {
return {
@ -72,6 +80,7 @@ export default {
state: 'list',
webhooks: [],
tableFields: [],
tableViews: [],
}
},
methods: {
@ -92,21 +101,35 @@ export default {
this.handleError(e)
}
const selectedTableId = this.$store.getters['table/getSelected']?.id
const isSelectedTable =
selectedTableId && selectedTableId === this.table.id
// The parent component can provide the fields, but if it doesn't we need to
// fetch them ourselves. If the table is the selected one, we can use the
// store, otherwise we need to fetch them.
if (Array.isArray(this.fields)) {
this.tableFields = this.fields
} else if (isSelectedTable) {
this.tableFields = this.$store.getters['field/getAll']
} else {
const selectedTable = this.$store.getters['table/getSelected']
if (selectedTable && selectedTable.id === this.table.id) {
this.tableFields = this.$store.getters['field/getAll']
} else {
const { data: fields } = await FieldService(this.$client).fetchAll(
this.table.id
)
this.tableFields = fields
}
const { data: fields } = await FieldService(this.$client).fetchAll(
this.table.id
)
this.tableFields = fields
}
// The parent component can provide the views, but if it doesn't we need to
// fetch them ourselves. If the table is the selected one, we can use the
// store, otherwise we need to fetch them.
if (Array.isArray(this.views)) {
this.tableViews = this.views
} else if (isSelectedTable) {
this.tableViews = this.$store.getters['view/getAll']
} else {
const { data: views } = await ViewService(this.$client).fetchAll(
this.table.id
)
this.tableViews = views
}
this.loading = false

View file

@ -49,6 +49,39 @@ export class WebhookEventType extends Registerable {
getRelatedFieldsHelpText() {
return null
}
/**
* If `true`, then a dropdown will be shown next to the webhook type allowing the user
* to choose related views. This can for example for an event that's restricted to
* certain view updates.
*/
getHasRelatedView() {
return false
}
getRelatedViewPlaceholder() {
return null
}
getRelatedViewHelpText() {
return null
}
getDeactivatedText() {
return ''
}
getDeactivatedClickModal() {
return null
}
isDeactivated(workspaceId) {
return false
}
getFeatureName() {
return ''
}
}
export class RowsCreatedWebhookEventType extends WebhookEventType {
@ -183,13 +216,18 @@ export class FieldDeletedWebhookEventType extends WebhookEventType {
// Unfortunately, we don't have an example of the field object in the web-frontend, so
// we would need to hardcode it here.
const viewExample = {
export const viewExample = {
id: 0,
table_id: 0,
name: 'View',
order: 1,
type: 'grid',
table: null,
table: {
id: 0,
order: 1,
name: 'Table',
database_id: 0,
},
filter_type: 'AND',
filters_disabled: false,
public_view_has_password: false,

View file

@ -18,6 +18,7 @@ describe('Webhook form Input Tests', () => {
table: { id: 1 },
database: { id: 2, workspace: { id: 3 } },
fields: [{ id: 1, name: 'Name', type: 'text' }],
views: [],
},
})
}