1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-14 17:18:33 +00:00

Merge branch 'chart-widget-type' into 'develop'

Introduce new chart widget type

Closes 

See merge request 
This commit is contained in:
Petr Stribny 2025-02-07 18:11:54 +00:00
commit c454cf61bf
13 changed files with 698 additions and 65 deletions
backend/src/baserow/test_utils
enterprise/backend
src/baserow_enterprise
api/integrations/local_baserow
apps.py
dashboard/widgets
integrations/local_baserow
migrations
tests/baserow_enterprise_tests

View file

@ -591,6 +591,16 @@ class AnyStr(str):
return isinstance(other, str)
class AnyDict(dict):
"""
A class that can be used to check if a value is a dict. Useful in tests when
you don't care about the contents.
"""
def __eq__(self, other):
return isinstance(other, dict)
def load_test_cases(name: str) -> Union[List, Dict]:
"""
Load test data from the global cases directory. These cases are used to run the

View file

@ -7,16 +7,18 @@ from baserow_enterprise.integrations.local_baserow.models import (
class LocalBaserowTableServiceAggregationSeriesSerializer(serializers.ModelSerializer):
field_id = serializers.IntegerField()
order = serializers.IntegerField(read_only=True)
class Meta:
model = LocalBaserowTableServiceAggregationSeries
fields = ("id", "order", "field", "aggregation_type")
fields = ("order", "aggregation_type", "field_id")
class LocalBaserowTableServiceAggregationGroupBySerializer(serializers.ModelSerializer):
field_id = serializers.IntegerField()
order = serializers.IntegerField(read_only=True)
class Meta:
model = LocalBaserowTableServiceAggregationGroupBy
fields = ("id", "order", "field")
fields = ("order", "field_id")

View file

@ -187,6 +187,8 @@ class BaserowEnterpriseConfig(AppConfig):
app_auth_provider_type_registry.register(SamlAppAuthProviderType())
app_auth_provider_type_registry.register(OpenIdConnectAppAuthProviderType())
from baserow.contrib.dashboard.widgets.registries import widget_type_registry
from baserow_enterprise.dashboard.widgets.widget_types import ChartWidgetType
from baserow_enterprise.integrations.local_baserow.service_types import (
LocalBaserowGroupedAggregateRowsUserServiceType,
)
@ -195,6 +197,7 @@ class BaserowEnterpriseConfig(AppConfig):
service_type_registry.register(
LocalBaserowGroupedAggregateRowsUserServiceType()
)
widget_type_registry.register(ChartWidgetType())
from baserow.contrib.builder.elements.registries import element_type_registry
from baserow_enterprise.builder.elements.element_types import (

View file

@ -0,0 +1,11 @@
from django.db import models
from baserow.contrib.dashboard.widgets.models import Widget
class ChartWidget(Widget):
data_source = models.ForeignKey(
"dashboard.DashboardDataSource",
on_delete=models.PROTECT,
help_text="Data source for fetching the result to display.",
)

View file

@ -0,0 +1,96 @@
from rest_framework import serializers
from baserow.contrib.dashboard.data_sources.handler import DashboardDataSourceHandler
from baserow.contrib.dashboard.data_sources.models import DashboardDataSource
from baserow.contrib.dashboard.types import WidgetDict
from baserow.contrib.dashboard.widgets.models import Widget
from baserow.contrib.dashboard.widgets.registries import WidgetType
from baserow.core.services.registries import service_type_registry
from baserow_enterprise.integrations.local_baserow.service_types import (
LocalBaserowGroupedAggregateRowsUserServiceType,
)
from .models import ChartWidget
class ChartWidgetType(WidgetType):
type = "chart"
model_class = ChartWidget
serializer_field_names = ["data_source_id"]
serializer_field_overrides = {
"data_source_id": serializers.PrimaryKeyRelatedField(
queryset=DashboardDataSource.objects.all(),
required=False,
default=None,
help_text="References a data source field for the widget.",
)
}
request_serializer_field_names = []
request_serializer_field_overrides = {}
class SerializedDict(WidgetDict):
data_source_id: int
def prepare_value_for_db(self, values: dict, instance: Widget | None = None):
if instance is None:
# When the widget is being created we want to automatically
# create a data source for it
available_name = DashboardDataSourceHandler().find_unused_data_source_name(
values["dashboard"], "WidgetDataSource"
)
data_source = DashboardDataSourceHandler().create_data_source(
dashboard=values["dashboard"],
name=available_name,
service_type=service_type_registry.get(
LocalBaserowGroupedAggregateRowsUserServiceType.type
),
)
values["data_source"] = data_source
return values
def before_trashed(self, instance: Widget):
instance.data_source.trashed = True
instance.data_source.save()
def before_restore(self, instance: Widget):
instance.data_source.trashed = False
instance.data_source.save()
def after_delete(self, instance: Widget):
DashboardDataSourceHandler().delete_data_source(instance.data_source)
def deserialize_property(
self,
prop_name: str,
value: any,
id_mapping: dict[str, any],
**kwargs,
) -> any:
if prop_name == "data_source_id" and value:
return id_mapping["dashboard_data_sources"][value]
return super().deserialize_property(
prop_name,
value,
id_mapping,
**kwargs,
)
def serialize_property(
self,
instance: Widget,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
if prop_name == "data_source_id":
return instance.data_source_id
return super().serialize_property(
instance,
prop_name,
files_zip=files_zip,
storage=storage,
cache=cache,
)

View file

@ -85,10 +85,6 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
return (
super().allowed_fields
+ LocalBaserowTableServiceFilterableMixin.mixin_allowed_fields
+ [
"service_aggregation_series",
"service_aggregation_group_bys",
]
)
@property
@ -96,6 +92,7 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
return (
super().serializer_field_names
+ LocalBaserowTableServiceFilterableMixin.mixin_serializer_field_names
+ LocalBaserowTableServiceSortableMixin.mixin_serializer_field_names
) + ["aggregation_series", "aggregation_group_bys"]
@property
@ -103,8 +100,9 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
return {
**super().serializer_field_overrides,
**LocalBaserowTableServiceFilterableMixin.mixin_serializer_field_overrides,
**LocalBaserowTableServiceSortableMixin.mixin_serializer_field_overrides,
"aggregation_series": LocalBaserowTableServiceAggregationSeriesSerializer(
many=True, source="service_aggregation_series", required=True
many=True, source="service_aggregation_series", required=False
),
"aggregation_group_bys": LocalBaserowTableServiceAggregationGroupBySerializer(
many=True, source="service_aggregation_group_bys", required=False
@ -225,9 +223,13 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
service.service_sorts.all().delete()
if service_sorts is not None:
table_field_ids = service.table.field_set.values_list("id", flat=True)
model = service.table.get_model()
allowed_sort_field_ids = []
allowed_sort_field_ids = [
series.field_id
for series in service.service_aggregation_series.all()
]
if service.service_aggregation_group_bys.count() > 0:
group_by = service.service_aggregation_group_bys.all()[0]
allowed_sort_field_ids += (
@ -237,15 +239,15 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
)
def validate_sort(service_sort):
if service_sort["field_id"] not in table_field_ids:
if service_sort["field"].id not in table_field_ids:
raise DRFValidationError(
detail=f"The field with ID {service_sort['field_id']} is not "
detail=f"The field with ID {service_sort['field'].id} is not "
"related to the given table.",
code="invalid_field",
)
if service_sort["field_id"] not in allowed_sort_field_ids:
if service_sort["field"].id not in allowed_sort_field_ids:
raise DRFValidationError(
detail=f"The field with ID {service_sort['field_id']} cannot be used for sorting.",
detail=f"The field with ID {service_sort['field'].id} cannot be used for sorting.",
code="invalid_field",
)
@ -270,13 +272,13 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
metadata.
"""
if "aggregation_series" in values:
if "service_aggregation_series" in values:
self._update_service_aggregation_series(
instance, values.pop("aggregation_series")
instance, values.pop("service_aggregation_series")
)
if "aggregation_group_bys" in values:
if "service_aggregation_group_bys" in values:
self._update_service_aggregation_group_bys(
instance, values.pop("aggregation_group_bys")
instance, values.pop("service_aggregation_group_bys")
)
if "service_sorts" in values:
self._update_service_sortings(instance, values.pop("service_sorts"))
@ -302,16 +304,16 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
# the things that are no longer applicable for the other table.
from_table, to_table = changes.get("table", (None, None))
if "aggregation_series" in values:
if "service_aggregation_series" in values:
self._update_service_aggregation_series(
instance, values.pop("aggregation_series")
instance, values.pop("service_aggregation_series")
)
elif from_table and to_table:
instance.service_aggregation_series.all().delete()
if "aggregation_group_bys" in values:
if "service_aggregation_group_bys" in values:
self._update_service_aggregation_group_bys(
instance, values.pop("aggregation_group_bys")
instance, values.pop("service_aggregation_group_bys")
)
elif from_table and to_table:
instance.service_aggregation_group_bys.all().delete()

View file

@ -0,0 +1,42 @@
# Generated by Django 5.0.9 on 2025-02-05 13:00
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("baserow_enterprise", "0039_openidconnectappauthprovidermodel"),
("dashboard", "0003_widget_dashboarddatasource_summarywidget"),
]
operations = [
migrations.CreateModel(
name="ChartWidget",
fields=[
(
"widget_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="dashboard.widget",
),
),
(
"data_source",
models.ForeignKey(
help_text="Data source for fetching the result to display.",
on_delete=django.db.models.deletion.PROTECT,
to="dashboard.dashboarddatasource",
),
),
],
options={
"abstract": False,
},
bases=("dashboard.widget",),
),
]

View file

@ -0,0 +1,70 @@
import pytest
from rest_framework.reverse import reverse
from rest_framework.status import HTTP_200_OK
from baserow.test_utils.helpers import AnyInt
@pytest.mark.django_db
def test_create_chart_widget(api_client, enterprise_data_fixture):
user, token = enterprise_data_fixture.create_user_and_token()
dashboard = enterprise_data_fixture.create_dashboard_application(user=user)
url = reverse("api:dashboard:widgets:list", kwargs={"dashboard_id": dashboard.id})
response = api_client.post(
url,
{
"title": "Title",
"description": "Description",
"type": "chart",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_200_OK, response_json
assert response_json == {
"id": AnyInt(),
"title": "Title",
"description": "Description",
"data_source_id": AnyInt(),
"dashboard_id": dashboard.id,
"order": "1.00000000000000000000",
"type": "chart",
}
@pytest.mark.django_db
def test_get_widgets_with_chart_widget(api_client, enterprise_data_fixture):
user, token = enterprise_data_fixture.create_user_and_token()
dashboard = enterprise_data_fixture.create_dashboard_application(user=user)
data_source = enterprise_data_fixture.create_dashboard_local_baserow_grouped_aggregate_rows_data_source(
dashboard=dashboard, name="Name 1"
)
widget = enterprise_data_fixture.create_chart_widget(
dashboard=dashboard,
data_source=data_source,
title="Widget 1",
description="Description 1",
)
url = reverse("api:dashboard:widgets:list", kwargs={"dashboard_id": dashboard.id})
response = api_client.get(
url,
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response_json == [
{
"id": widget.id,
"title": "Widget 1",
"description": "Description 1",
"dashboard_id": dashboard.id,
"data_source_id": data_source.id,
"order": "1.00000000000000000000",
"type": "chart",
},
]

View file

@ -0,0 +1,302 @@
import pytest
from rest_framework.reverse import reverse
from rest_framework.status import HTTP_200_OK
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.integrations.local_baserow.models import (
LocalBaserowTableServiceSort,
)
from baserow.test_utils.helpers import AnyDict, AnyInt
from baserow_enterprise.integrations.local_baserow.models import (
LocalBaserowGroupedAggregateRows,
LocalBaserowTableServiceAggregationGroupBy,
LocalBaserowTableServiceAggregationSeries,
)
@pytest.mark.django_db
def test_grouped_aggregate_rows_get_dashboard_data_sources(
api_client, enterprise_data_fixture
):
user, token = enterprise_data_fixture.create_user_and_token()
dashboard = enterprise_data_fixture.create_dashboard_application(user=user)
table = enterprise_data_fixture.create_database_table(user=user)
field = enterprise_data_fixture.create_number_field(table=table)
field_2 = enterprise_data_fixture.create_number_field(table=table)
field_3 = enterprise_data_fixture.create_number_field(table=table)
data_source1 = enterprise_data_fixture.create_dashboard_local_baserow_grouped_aggregate_rows_data_source(
dashboard=dashboard, name="Name 1"
)
data_source1.service.table = table
data_source1.service.save()
LocalBaserowTableServiceAggregationSeries.objects.create(
service=data_source1.service, field=field, aggregation_type="sum", order=1
)
LocalBaserowTableServiceAggregationSeries.objects.create(
service=data_source1.service, field=field_2, aggregation_type="sum", order=1
)
LocalBaserowTableServiceAggregationGroupBy.objects.create(
service=data_source1.service, field=field_3, order=1
)
LocalBaserowTableServiceSort.objects.create(
service=data_source1.service, field=field_3, order=2, order_by="ASC"
)
data_source2 = (
enterprise_data_fixture.create_dashboard_local_baserow_list_rows_data_source(
dashboard=dashboard, name="Name 2"
)
)
url = reverse(
"api:dashboard:data_sources:list", kwargs={"dashboard_id": dashboard.id}
)
response = api_client.get(
url,
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_200_OK
assert len(response_json) == 2
assert response_json[0] == {
"aggregation_group_bys": [{"field_id": field_3.id, "order": 1}],
"aggregation_series": [
{"aggregation_type": "sum", "field_id": field.id, "order": 1},
{"aggregation_type": "sum", "field_id": field_2.id, "order": 1},
],
"context_data": {},
"context_data_schema": None,
"dashboard_id": dashboard.id,
"filter_type": "AND",
"filters": [],
"sortings": [
{"field": field_3.id, "id": AnyInt(), "order": 2, "order_by": "ASC"}
],
"id": data_source1.id,
"integration_id": AnyInt(),
"name": "Name 1",
"order": "1.00000000000000000000",
"schema": AnyDict(),
"table_id": table.id,
"type": "local_baserow_grouped_aggregate_rows",
"view_id": None,
}
assert response_json[1] == {
"context_data": None,
"context_data_schema": None,
"dashboard_id": dashboard.id,
"filter_type": "AND",
"filters": [],
"sortings": [],
"id": data_source2.id,
"integration_id": AnyInt(),
"name": "Name 2",
"order": "2.00000000000000000000",
"schema": None,
"search_query": "",
"table_id": None,
"type": "local_baserow_list_rows",
"view_id": None,
}
@pytest.mark.django_db
def test_grouped_aggregate_rows_update_data_source(api_client, enterprise_data_fixture):
user, token = enterprise_data_fixture.create_user_and_token()
dashboard = enterprise_data_fixture.create_dashboard_application(user=user)
table = enterprise_data_fixture.create_database_table(user=user)
view = enterprise_data_fixture.create_grid_view(user, table=table)
field = enterprise_data_fixture.create_number_field(table=table)
field_2 = enterprise_data_fixture.create_number_field(table=table)
field_3 = enterprise_data_fixture.create_number_field(table=table)
data_source1 = enterprise_data_fixture.create_dashboard_local_baserow_grouped_aggregate_rows_data_source(
dashboard=dashboard
)
url = reverse(
"api:dashboard:data_sources:item", kwargs={"data_source_id": data_source1.id}
)
response = api_client.patch(
url,
{
"table_id": table.id,
"view_id": view.id,
"name": "name test",
"aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
{"field_id": field_2.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field_3.id}],
"sortings": [{"field": field.id}],
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK, response.json()
response_json = response.json()
assert response_json["table_id"] == table.id
assert response_json["view_id"] == view.id
assert response_json["name"] == "name test"
assert response_json["type"] == "local_baserow_grouped_aggregate_rows"
assert response_json["aggregation_series"] == [
{
"aggregation_type": "sum",
"field_id": field.id,
"order": 0,
},
{
"aggregation_type": "sum",
"field_id": field_2.id,
"order": 1,
},
]
assert response_json["aggregation_group_bys"] == [
{"field_id": field_3.id, "order": 0}
]
assert response_json["sortings"] == [
{"id": AnyInt(), "field": field.id, "order": 0, "order_by": "ASC"}
]
@pytest.mark.django_db
def test_grouped_aggregate_rows_dispatch_dashboard_data_source(
api_client, enterprise_data_fixture
):
user, token = enterprise_data_fixture.create_user_and_token()
workspace = enterprise_data_fixture.create_workspace(user=user)
database = enterprise_data_fixture.create_database_application(workspace=workspace)
table = enterprise_data_fixture.create_database_table(user=user, database=database)
dashboard = enterprise_data_fixture.create_dashboard_application(
user=user, workspace=workspace
)
field = enterprise_data_fixture.create_number_field(table=table)
field_2 = enterprise_data_fixture.create_number_field(table=table)
field_3 = enterprise_data_fixture.create_number_field(table=table)
integration = enterprise_data_fixture.create_local_baserow_integration(
application=dashboard, user=user
)
service = enterprise_data_fixture.create_service(
LocalBaserowGroupedAggregateRows,
integration=integration,
table=table,
)
data_source1 = enterprise_data_fixture.create_dashboard_local_baserow_grouped_aggregate_rows_data_source(
dashboard=dashboard, service=service, integration_args={"user": user}
)
LocalBaserowTableServiceAggregationSeries.objects.create(
service=service, field=field, aggregation_type="sum", order=1
)
LocalBaserowTableServiceAggregationSeries.objects.create(
service=service, field=field_2, aggregation_type="sum", order=2
)
LocalBaserowTableServiceAggregationSeries.objects.create(
service=service, field=field_3, aggregation_type="sum", order=3
)
LocalBaserowTableServiceAggregationGroupBy.objects.create(
service=service, field=field, order=1
)
LocalBaserowTableServiceSort.objects.create(
service=service, field=field_3, order=1, order_by="ASC"
)
LocalBaserowTableServiceSort.objects.create(
service=service, field=field_2, order=2, order_by="DESC"
)
RowHandler().create_rows(
user,
table,
rows_values=[
# group 1
{
f"field_{field.id}": 10,
f"field_{field_2.id}": 1,
f"field_{field_3.id}": 2,
},
{
f"field_{field.id}": 10,
f"field_{field_2.id}": 1,
f"field_{field_3.id}": 2,
},
{
f"field_{field.id}": 10,
f"field_{field_2.id}": 1,
f"field_{field_3.id}": 2,
},
# group 2
{
f"field_{field.id}": 20,
f"field_{field_2.id}": 2,
f"field_{field_3.id}": 2,
},
{
f"field_{field.id}": 20,
f"field_{field_2.id}": 2,
f"field_{field_3.id}": 2,
},
{
f"field_{field.id}": 20,
f"field_{field_2.id}": 2,
f"field_{field_3.id}": 2,
},
# group 3
{
f"field_{field.id}": 30,
f"field_{field_2.id}": 3,
f"field_{field_3.id}": 1,
},
{
f"field_{field.id}": 30,
f"field_{field_2.id}": 3,
f"field_{field_3.id}": 1,
},
{
f"field_{field.id}": 30,
f"field_{field_2.id}": 3,
f"field_{field_3.id}": 1,
},
# group 4
{
f"field_{field.id}": None,
f"field_{field_2.id}": 100,
f"field_{field_3.id}": 100,
},
],
)
url = reverse(
"api:dashboard:data_sources:dispatch",
kwargs={"data_source_id": data_source1.id},
)
response = api_client.post(
url,
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert response.status_code == HTTP_200_OK, response_json
assert response_json == {
"result": [
{
f"field_{field.id}": 90.0,
f"field_{field_2.id}": 9.0,
f"field_{field_3.id}": 3.0,
},
{
f"field_{field.id}": 60.0,
f"field_{field_2.id}": 6.0,
f"field_{field_3.id}": 6.0,
},
{
f"field_{field.id}": 30.0,
f"field_{field_2.id}": 3.0,
f"field_{field_3.id}": 6.0,
},
{
f"field_{field.id}": None,
f"field_{field_2.id}": 100.0,
f"field_{field_3.id}": 100.0,
},
],
}

View file

@ -18,6 +18,7 @@ VALID_ONE_SEAT_ENTERPRISE_LICENSE = (
@pytest.fixture # noqa: F405
def enterprise_data_fixture(fake, data_fixture):
from .dashboard.dashboard_fixtures import DashboardFixture
from .enterprise_fixtures import EnterpriseFixtures as EnterpriseFixturesBase
from .fixtures.sso import OAuth2Fixture, SamlFixture
@ -25,6 +26,7 @@ def enterprise_data_fixture(fake, data_fixture):
EnterpriseFixturesBase,
SamlFixture,
OAuth2Fixture,
DashboardFixture,
data_fixture.__class__,
):
pass

View file

@ -0,0 +1,27 @@
from baserow_enterprise.dashboard.widgets.models import ChartWidget
from baserow_enterprise.integrations.local_baserow.models import (
LocalBaserowGroupedAggregateRows,
)
class DashboardFixture:
def create_dashboard_local_baserow_grouped_aggregate_rows_data_source(
self, **kwargs
):
return self.create_dashboard_data_source(
service_model_class=LocalBaserowGroupedAggregateRows, **kwargs
)
def create_chart_widget(self, dashboard=None, **kwargs):
dashboard_args = kwargs.pop("dashboard_args", {})
if dashboard is None:
dashboard = self.create_dashboard_application(**dashboard_args)
if "data_source" not in kwargs:
data_source = (
self.create_dashboard_local_baserow_grouped_aggregate_rows_data_source(
dashboard=dashboard
)
)
kwargs["data_source"] = data_source
widget = ChartWidget.objects.create(dashboard=dashboard, **kwargs)
return widget

View file

@ -0,0 +1,60 @@
from django.contrib.contenttypes.models import ContentType
from django.db.models.deletion import ProtectedError
import pytest
from baserow.contrib.dashboard.data_sources.models import DashboardDataSource
from baserow.contrib.dashboard.data_sources.service import DashboardDataSourceService
from baserow.contrib.dashboard.widgets.service import WidgetService
from baserow.contrib.dashboard.widgets.trash_types import WidgetTrashableItemType
from baserow.core.trash.handler import TrashHandler
from baserow_enterprise.integrations.local_baserow.models import (
LocalBaserowGroupedAggregateRows,
)
@pytest.mark.django_db
def test_create_chart_widget_creates_data_source(enterprise_data_fixture):
user = enterprise_data_fixture.create_user()
dashboard = enterprise_data_fixture.create_dashboard_application(user=user)
widget_type = "chart"
created_widget = WidgetService().create_widget(
user, widget_type, dashboard.id, title="My widget", description="My description"
)
assert created_widget.data_source is not None
assert (
created_widget.data_source.service.content_type
== ContentType.objects.get_for_model(LocalBaserowGroupedAggregateRows)
)
@pytest.mark.django_db
def test_chart_widget_trash_restore(enterprise_data_fixture):
user = enterprise_data_fixture.create_user()
dashboard = enterprise_data_fixture.create_dashboard_application(user=user)
widget = enterprise_data_fixture.create_chart_widget(dashboard=dashboard)
data_source_id = widget.data_source.id
TrashHandler.trash(user, dashboard.workspace, dashboard, widget)
ds = DashboardDataSource.objects_and_trash.get(id=data_source_id)
assert ds.trashed is True
TrashHandler.restore_item(user, WidgetTrashableItemType.type, widget.id)
ds = DashboardDataSource.objects_and_trash.get(id=data_source_id)
assert ds.trashed is False
@pytest.mark.django_db
def test_chart_widget_datasource_cannot_be_deleted(enterprise_data_fixture):
user = enterprise_data_fixture.create_user()
dashboard = enterprise_data_fixture.create_dashboard_application(user=user)
chart_widget = enterprise_data_fixture.create_chart_widget(dashboard=dashboard)
with pytest.raises(ProtectedError):
DashboardDataSourceService().delete_data_source(
user, chart_widget.data_source.id
)

View file

@ -59,11 +59,11 @@ def test_create_grouped_aggregate_rows_service(data_fixture):
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
{"field_id": field_2.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}],
"service_aggregation_group_bys": [{"field_id": field.id}],
},
user,
)
@ -102,11 +102,11 @@ def test_create_grouped_aggregate_rows_service_series_field_not_in_table(data_fi
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
{"field_id": field_2.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}],
"service_aggregation_group_bys": [{"field_id": field.id}],
},
user,
)
@ -135,10 +135,10 @@ def test_create_grouped_aggregate_rows_service_series_agg_type_doesnt_exist(
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "avg"},
],
"aggregation_group_bys": [{"field_id": field.id}],
"service_aggregation_group_bys": [{"field_id": field.id}],
},
user,
)
@ -169,11 +169,11 @@ def test_create_grouped_aggregate_rows_service_series_incompatible_aggregation_t
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
{"field_id": field_2.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}],
"service_aggregation_group_bys": [{"field_id": field.id}],
},
user,
)
@ -205,10 +205,10 @@ def test_create_grouped_aggregate_rows_service_group_by_field_not_in_table(
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field_2.id}],
"service_aggregation_group_bys": [{"field_id": field_2.id}],
},
user,
)
@ -240,7 +240,7 @@ def test_create_grouped_aggregate_rows_service_max_series_exceeded(
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
{"field_id": field_2.id, "aggregation_type": "sum"},
{"field_id": field_3.id, "aggregation_type": "sum"},
@ -276,10 +276,13 @@ def test_create_grouped_aggregate_rows_service_max_group_bys_exceeded(
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}, {"field_id": field_2.id}],
"service_aggregation_group_bys": [
{"field_id": field.id},
{"field_id": field_2.id},
],
},
user,
)
@ -310,12 +313,12 @@ def test_create_grouped_aggregate_rows_service_sort_by_field_outside_of_series_g
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}],
"service_aggregation_group_bys": [{"field_id": field.id}],
"service_sorts": [
{"field_id": field_2.id},
{"field": field_2},
],
},
user,
@ -347,12 +350,12 @@ def test_create_grouped_aggregate_rows_service_sort_by_primary_field_no_group_by
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [],
"service_aggregation_group_bys": [],
"service_sorts": [
{"field_id": field_2.id},
{"field": field_2},
],
},
user,
@ -384,10 +387,10 @@ def test_create_grouped_aggregate_rows_service_sort_by_primary_field_with_group_
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [],
"aggregation_group_bys": [{"field_id": field_2.id}],
"service_aggregation_series": [],
"service_aggregation_group_bys": [{"field_id": field_2.id}],
"service_sorts": [
{"field_id": field.id},
{"field": field},
],
},
user,
@ -436,11 +439,11 @@ def test_update_grouped_aggregate_rows_service(data_fixture):
"view_id": table_2_view.id,
"table_id": table_2.id,
"integration_id": table_2_integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": table_2_field.id, "aggregation_type": "sum"},
{"field_id": table_2_field_2.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": table_2_field.id}],
"service_aggregation_group_bys": [{"field_id": table_2_field.id}],
},
user,
service,
@ -493,10 +496,10 @@ def test_update_grouped_aggregate_rows_service_series_field_not_in_table(data_fi
"view_id": table_2_view.id,
"table_id": table_2.id,
"integration_id": table_2_integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": table_2_field.id}],
"service_aggregation_group_bys": [{"field_id": table_2_field.id}],
},
user,
service,
@ -538,10 +541,10 @@ def test_update_grouped_aggregate_rows_service_series_agg_type_doesnt_exist(
"view_id": table_2_view.id,
"table_id": table_2.id,
"integration_id": table_2_integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": table_2_field.id, "aggregation_type": "avg"},
],
"aggregation_group_bys": [{"field_id": table_2_field.id}],
"service_aggregation_group_bys": [{"field_id": table_2_field.id}],
},
user,
service,
@ -584,10 +587,10 @@ def test_update_grouped_aggregate_rows_service_series_incompatible_aggregation_t
"view_id": table_2_view.id,
"table_id": table_2.id,
"integration_id": table_2_integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": table_2_field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": table_2_field.id}],
"service_aggregation_group_bys": [{"field_id": table_2_field.id}],
},
user,
service,
@ -631,10 +634,10 @@ def test_update_grouped_aggregate_rows_service_group_by_field_not_in_table(
"view_id": table_2_view.id,
"table_id": table_2.id,
"integration_id": table_2_integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": table_2_field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}],
"service_aggregation_group_bys": [{"field_id": field.id}],
},
user,
service,
@ -673,7 +676,7 @@ def test_update_grouped_aggregate_rows_service_max_series_exceeded(
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
{"field_id": field_2.id, "aggregation_type": "sum"},
{"field_id": field_3.id, "aggregation_type": "sum"},
@ -715,10 +718,13 @@ def test_update_grouped_aggregate_rows_service_max_group_bys_exceeded(
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}, {"field_id": field_2.id}],
"service_aggregation_group_bys": [
{"field_id": field.id},
{"field_id": field_2.id},
],
},
user,
)
@ -755,12 +761,12 @@ def test_update_grouped_aggregate_rows_service_sort_by_field_outside_of_series_g
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [{"field_id": field.id}],
"service_aggregation_group_bys": [{"field_id": field.id}],
"service_sorts": [
{"field_id": field_2.id},
{"field": field_2},
],
},
user,
@ -798,12 +804,12 @@ def test_update_grouped_aggregate_rows_service_sort_by_primary_field_no_group_by
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [
"service_aggregation_series": [
{"field_id": field.id, "aggregation_type": "sum"},
],
"aggregation_group_bys": [],
"service_aggregation_group_bys": [],
"service_sorts": [
{"field_id": field_2.id},
{"field": field_2},
],
},
user,
@ -841,10 +847,10 @@ def test_update_grouped_aggregate_rows_service_sort_by_primary_field_with_group_
"view_id": view.id,
"table_id": view.table_id,
"integration_id": integration.id,
"aggregation_series": [],
"aggregation_group_bys": [{"field_id": field_2.id}],
"service_aggregation_series": [],
"service_aggregation_group_bys": [{"field_id": field_2.id}],
"service_sorts": [
{"field_id": field.id},
{"field": field},
],
},
user,