mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-14 17:18:33 +00:00
Merge branch 'ab-final-fix-for-service-responses' into 'develop'
Final fix for service responses See merge request baserow/baserow!3119
This commit is contained in:
commit
5a7376fa63
19 changed files with 111 additions and 89 deletions
backend
src/baserow
contrib
builder
api/workflow_actions
data_providers
data_sources
workflow_actions
dashboard/data_sources
integrations/local_baserow
core
tests/baserow/contrib
builder/data_providers
integrations/local_baserow/service_types
enterprise/backend
src/baserow_enterprise/integrations/local_baserow
tests/baserow_enterprise_tests/integrations/local_baserow/service_types
|
@ -404,6 +404,4 @@ class DispatchBuilderWorkflowActionView(APIView):
|
||||||
request.user, workflow_action, dispatch_context # type: ignore
|
request.user, workflow_action, dispatch_context # type: ignore
|
||||||
)
|
)
|
||||||
|
|
||||||
if not isinstance(response, Response):
|
return Response(response.data, status=response.status)
|
||||||
response = Response(response)
|
|
||||||
return response
|
|
||||||
|
|
|
@ -5,8 +5,6 @@ from django.conf import settings
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
|
||||||
from rest_framework.response import Response
|
|
||||||
|
|
||||||
from baserow.contrib.builder.data_providers.exceptions import (
|
from baserow.contrib.builder.data_providers.exceptions import (
|
||||||
DataProviderChunkInvalidException,
|
DataProviderChunkInvalidException,
|
||||||
FormDataProviderChunkInvalidException,
|
FormDataProviderChunkInvalidException,
|
||||||
|
@ -31,6 +29,7 @@ from baserow.contrib.builder.workflow_actions.handler import (
|
||||||
from baserow.core.formula.exceptions import FormulaRecursion, InvalidBaserowFormula
|
from baserow.core.formula.exceptions import FormulaRecursion, InvalidBaserowFormula
|
||||||
from baserow.core.formula.registries import DataProviderType
|
from baserow.core.formula.registries import DataProviderType
|
||||||
from baserow.core.services.dispatch_context import DispatchContext
|
from baserow.core.services.dispatch_context import DispatchContext
|
||||||
|
from baserow.core.services.types import DispatchResult
|
||||||
from baserow.core.user_sources.constants import DEFAULT_USER_ROLE_PREFIX
|
from baserow.core.user_sources.constants import DEFAULT_USER_ROLE_PREFIX
|
||||||
from baserow.core.user_sources.user_source_user import UserSourceUser
|
from baserow.core.user_sources.user_source_user import UserSourceUser
|
||||||
from baserow.core.utils import get_value_at_path
|
from baserow.core.utils import get_value_at_path
|
||||||
|
@ -449,7 +448,7 @@ class PreviousActionProviderType(DataProviderType):
|
||||||
self,
|
self,
|
||||||
dispatch_context: DispatchContext,
|
dispatch_context: DispatchContext,
|
||||||
workflow_action: WorkflowAction,
|
workflow_action: WorkflowAction,
|
||||||
result: Any,
|
dispatch_result: DispatchResult,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
If the current_dispatch_id exists in the request data, create a unique
|
If the current_dispatch_id exists in the request data, create a unique
|
||||||
|
@ -470,7 +469,7 @@ class PreviousActionProviderType(DataProviderType):
|
||||||
)
|
)
|
||||||
cache.set(
|
cache.set(
|
||||||
cache_key,
|
cache_key,
|
||||||
{} if isinstance(result, Response) else result,
|
dispatch_result.data,
|
||||||
timeout=settings.BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS,
|
timeout=settings.BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -448,7 +448,7 @@ class DataSourceHandler:
|
||||||
# it later
|
# it later
|
||||||
dispatch_context.cache["data_source_contents"][
|
dispatch_context.cache["data_source_contents"][
|
||||||
data_source.id
|
data_source.id
|
||||||
] = service_dispatch
|
] = service_dispatch.data
|
||||||
|
|
||||||
return dispatch_context.cache["data_source_contents"][data_source.id]
|
return dispatch_context.cache["data_source_contents"][data_source.id]
|
||||||
|
|
||||||
|
|
|
@ -384,7 +384,7 @@ class DataSourceService:
|
||||||
Dispatch the service related to the data_source if the user has the permission.
|
Dispatch the service related to the data_source if the user has the permission.
|
||||||
|
|
||||||
:param user: The current user.
|
:param user: The current user.
|
||||||
:param data_sources: The data source to be dispatched.
|
:param data_source: The data source to be dispatched.
|
||||||
:param dispatch_context: The context used for the dispatch.
|
:param dispatch_context: The context used for the dispatch.
|
||||||
:return: return the dispatch result.
|
:return: return the dispatch result.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional
|
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
from django.core.files.storage import Storage
|
from django.core.files.storage import Storage
|
||||||
|
@ -25,6 +25,7 @@ from baserow.contrib.builder.workflow_actions.registries import (
|
||||||
)
|
)
|
||||||
from baserow.core.exceptions import IdDoesNotExist
|
from baserow.core.exceptions import IdDoesNotExist
|
||||||
from baserow.core.services.handler import ServiceHandler
|
from baserow.core.services.handler import ServiceHandler
|
||||||
|
from baserow.core.services.types import DispatchResult
|
||||||
from baserow.core.workflow_actions.handler import WorkflowActionHandler
|
from baserow.core.workflow_actions.handler import WorkflowActionHandler
|
||||||
from baserow.core.workflow_actions.models import WorkflowAction
|
from baserow.core.workflow_actions.models import WorkflowAction
|
||||||
from baserow.core.workflow_actions.registries import WorkflowActionType
|
from baserow.core.workflow_actions.registries import WorkflowActionType
|
||||||
|
@ -174,7 +175,7 @@ class BuilderWorkflowActionHandler(WorkflowActionHandler):
|
||||||
self,
|
self,
|
||||||
workflow_action: BuilderWorkflowServiceAction,
|
workflow_action: BuilderWorkflowServiceAction,
|
||||||
dispatch_context: BuilderDispatchContext,
|
dispatch_context: BuilderDispatchContext,
|
||||||
) -> Any:
|
) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Dispatch the service related to the workflow_action.
|
Dispatch the service related to the workflow_action.
|
||||||
|
|
||||||
|
@ -185,11 +186,13 @@ class BuilderWorkflowActionHandler(WorkflowActionHandler):
|
||||||
:return: The result of dispatching the workflow action.
|
:return: The result of dispatching the workflow action.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(
|
dispatch_result = ServiceHandler().dispatch_service(
|
||||||
workflow_action.service.specific, dispatch_context
|
workflow_action.service.specific, dispatch_context
|
||||||
)
|
)
|
||||||
|
|
||||||
for data_provider in builder_data_provider_type_registry.get_all():
|
for data_provider in builder_data_provider_type_registry.get_all():
|
||||||
data_provider.post_dispatch(dispatch_context, workflow_action, result)
|
data_provider.post_dispatch(
|
||||||
|
dispatch_context, workflow_action, dispatch_result
|
||||||
|
)
|
||||||
|
|
||||||
return result
|
return dispatch_result
|
||||||
|
|
|
@ -40,6 +40,7 @@ from baserow.contrib.builder.workflow_actions.workflow_action_types import (
|
||||||
BuilderWorkflowActionType,
|
BuilderWorkflowActionType,
|
||||||
)
|
)
|
||||||
from baserow.core.handler import CoreHandler
|
from baserow.core.handler import CoreHandler
|
||||||
|
from baserow.core.services.types import DispatchResult
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from baserow.contrib.builder.models import Builder
|
from baserow.contrib.builder.models import Builder
|
||||||
|
@ -341,4 +342,7 @@ class BuilderWorkflowActionService:
|
||||||
"external", {}
|
"external", {}
|
||||||
).get(workflow_action.service.id, [])
|
).get(workflow_action.service.id, [])
|
||||||
|
|
||||||
return self.remove_unused_field_names(result, field_names)
|
return DispatchResult(
|
||||||
|
data=self.remove_unused_field_names(result.data, field_names),
|
||||||
|
status=result.status,
|
||||||
|
)
|
||||||
|
|
|
@ -307,7 +307,7 @@ class DashboardDataSourceHandler:
|
||||||
data_source.service.specific, dispatch_context
|
data_source.service.specific, dispatch_context
|
||||||
)
|
)
|
||||||
|
|
||||||
return service_dispatch
|
return service_dispatch.data
|
||||||
|
|
||||||
def export_data_source(
|
def export_data_source(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -17,7 +17,6 @@ from django.db.models import QuerySet
|
||||||
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from rest_framework.exceptions import ValidationError as DRFValidationError
|
from rest_framework.exceptions import ValidationError as DRFValidationError
|
||||||
from rest_framework.response import Response
|
|
||||||
|
|
||||||
from baserow.contrib.builder.data_providers.exceptions import (
|
from baserow.contrib.builder.data_providers.exceptions import (
|
||||||
DataProviderChunkInvalidException,
|
DataProviderChunkInvalidException,
|
||||||
|
@ -103,6 +102,7 @@ from baserow.core.services.registries import (
|
||||||
ServiceType,
|
ServiceType,
|
||||||
)
|
)
|
||||||
from baserow.core.services.types import (
|
from baserow.core.services.types import (
|
||||||
|
DispatchResult,
|
||||||
ServiceDict,
|
ServiceDict,
|
||||||
ServiceFilterDictSubClass,
|
ServiceFilterDictSubClass,
|
||||||
ServiceSortDictSubClass,
|
ServiceSortDictSubClass,
|
||||||
|
@ -1099,7 +1099,7 @@ class LocalBaserowListRowsUserServiceType(
|
||||||
"public_allowed_properties": only_field_names,
|
"public_allowed_properties": only_field_names,
|
||||||
}
|
}
|
||||||
|
|
||||||
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> Any:
|
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Given the rows found in `dispatch_data`, serializes them.
|
Given the rows found in `dispatch_data`, serializes them.
|
||||||
|
|
||||||
|
@ -1120,10 +1120,12 @@ class LocalBaserowListRowsUserServiceType(
|
||||||
field_ids=field_ids,
|
field_ids=field_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return DispatchResult(
|
||||||
"results": serializer(dispatch_data["results"], many=True).data,
|
data={
|
||||||
"has_next_page": dispatch_data["has_next_page"],
|
"results": serializer(dispatch_data["results"], many=True).data,
|
||||||
}
|
"has_next_page": dispatch_data["has_next_page"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
def get_record_names(
|
def get_record_names(
|
||||||
self,
|
self,
|
||||||
|
@ -1508,7 +1510,7 @@ class LocalBaserowAggregateRowsUserServiceType(
|
||||||
def dispatch_transform(
|
def dispatch_transform(
|
||||||
self,
|
self,
|
||||||
data: Dict[str, Any],
|
data: Dict[str, Any],
|
||||||
) -> Dict[str, Any]:
|
) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Responsible for transforming the data returned by the `dispatch_data`
|
Responsible for transforming the data returned by the `dispatch_data`
|
||||||
method into a format that can be used by the frontend.
|
method into a format that can be used by the frontend.
|
||||||
|
@ -1517,7 +1519,7 @@ class LocalBaserowAggregateRowsUserServiceType(
|
||||||
:return: A dictionary containing the aggregation result.
|
:return: A dictionary containing the aggregation result.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return data["data"]
|
return DispatchResult(data=data["data"])
|
||||||
|
|
||||||
def extract_properties(self, path: List[str], **kwargs) -> List[str]:
|
def extract_properties(self, path: List[str], **kwargs) -> List[str]:
|
||||||
"""
|
"""
|
||||||
|
@ -1714,7 +1716,7 @@ class LocalBaserowGetRowUserServiceType(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> Any:
|
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Responsible for serializing the `dispatch_data` row.
|
Responsible for serializing the `dispatch_data` row.
|
||||||
|
|
||||||
|
@ -1737,7 +1739,7 @@ class LocalBaserowGetRowUserServiceType(
|
||||||
|
|
||||||
serialized_row = serializer(dispatch_data["data"]).data
|
serialized_row = serializer(dispatch_data["data"]).data
|
||||||
|
|
||||||
return serialized_row
|
return DispatchResult(data=serialized_row)
|
||||||
|
|
||||||
def resolve_service_formulas(
|
def resolve_service_formulas(
|
||||||
self,
|
self,
|
||||||
|
@ -2069,7 +2071,7 @@ class LocalBaserowUpsertRowServiceType(
|
||||||
def enhance_queryset(self, queryset):
|
def enhance_queryset(self, queryset):
|
||||||
return super().enhance_queryset(queryset).prefetch_related("field_mappings")
|
return super().enhance_queryset(queryset).prefetch_related("field_mappings")
|
||||||
|
|
||||||
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> Any:
|
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Responsible for serializing the `dispatch_data` row.
|
Responsible for serializing the `dispatch_data` row.
|
||||||
|
|
||||||
|
@ -2091,7 +2093,7 @@ class LocalBaserowUpsertRowServiceType(
|
||||||
)
|
)
|
||||||
serialized_row = serializer(dispatch_data["data"]).data
|
serialized_row = serializer(dispatch_data["data"]).data
|
||||||
|
|
||||||
return serialized_row
|
return DispatchResult(data=serialized_row)
|
||||||
|
|
||||||
def resolve_service_formulas(
|
def resolve_service_formulas(
|
||||||
self,
|
self,
|
||||||
|
@ -2331,17 +2333,17 @@ class LocalBaserowDeleteRowServiceType(
|
||||||
resolved_values = super().resolve_service_formulas(service, dispatch_context)
|
resolved_values = super().resolve_service_formulas(service, dispatch_context)
|
||||||
return self.resolve_row_id(resolved_values, service, dispatch_context)
|
return self.resolve_row_id(resolved_values, service, dispatch_context)
|
||||||
|
|
||||||
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> Response:
|
def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
The delete row action's `dispatch_data` will contain an empty
|
The delete row action's `dispatch_data` will contain an empty
|
||||||
`data` dictionary. When we get to this method and wish to transform
|
`data` dictionary. When we get to this method and wish to transform
|
||||||
the data, we can simply return a 204 response.
|
the data, we can simply return a 204 response.
|
||||||
|
|
||||||
:param dispatch_data: The `dispatch_data` result.
|
:param dispatch_data: The `dispatch_data` result.
|
||||||
:return: A 204 response.
|
:return: A dispatch result with no data, and a 204 status code.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return Response(status=204)
|
return DispatchResult(status=204)
|
||||||
|
|
||||||
def dispatch_data(
|
def dispatch_data(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -15,6 +15,7 @@ from baserow.core.formula.types import (
|
||||||
)
|
)
|
||||||
from baserow.core.registry import Instance, Registry
|
from baserow.core.registry import Instance, Registry
|
||||||
from baserow.core.services.dispatch_context import DispatchContext
|
from baserow.core.services.dispatch_context import DispatchContext
|
||||||
|
from baserow.core.services.types import DispatchResult
|
||||||
from baserow.core.workflow_actions.models import WorkflowAction
|
from baserow.core.workflow_actions.models import WorkflowAction
|
||||||
|
|
||||||
|
|
||||||
|
@ -183,7 +184,7 @@ class DataProviderType(
|
||||||
self,
|
self,
|
||||||
dispatch_context: DispatchContext,
|
dispatch_context: DispatchContext,
|
||||||
workflow_action: WorkflowAction,
|
workflow_action: WorkflowAction,
|
||||||
result: Any,
|
dispatch_result: DispatchResult,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
This hook is called after a Workflow Action has been dispatched. It is
|
This hook is called after a Workflow Action has been dispatched. It is
|
||||||
|
|
|
@ -18,7 +18,7 @@ from baserow.core.storage import ExportZipFile
|
||||||
from baserow.core.utils import extract_allowed
|
from baserow.core.utils import extract_allowed
|
||||||
|
|
||||||
from .dispatch_context import DispatchContext
|
from .dispatch_context import DispatchContext
|
||||||
from .types import ServiceForUpdate, UpdatedService
|
from .types import DispatchResult, ServiceForUpdate, UpdatedService
|
||||||
|
|
||||||
|
|
||||||
class ServiceHandler:
|
class ServiceHandler:
|
||||||
|
@ -202,7 +202,7 @@ class ServiceHandler:
|
||||||
self,
|
self,
|
||||||
service: Service,
|
service: Service,
|
||||||
dispatch_context: DispatchContext,
|
dispatch_context: DispatchContext,
|
||||||
) -> Any:
|
) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Dispatch the given service.
|
Dispatch the given service.
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ from baserow.core.registry import (
|
||||||
Registry,
|
Registry,
|
||||||
)
|
)
|
||||||
from baserow.core.services.dispatch_context import DispatchContext
|
from baserow.core.services.dispatch_context import DispatchContext
|
||||||
|
from baserow.core.services.types import DispatchResult
|
||||||
|
|
||||||
from .exceptions import ServiceTypeDoesNotExist
|
from .exceptions import ServiceTypeDoesNotExist
|
||||||
from .models import Service
|
from .models import Service
|
||||||
|
@ -195,13 +196,13 @@ class ServiceType(
|
||||||
def dispatch_transform(
|
def dispatch_transform(
|
||||||
self,
|
self,
|
||||||
data: Any,
|
data: Any,
|
||||||
) -> Any:
|
) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Responsible for taking the `dispatch_data` result and transforming its value
|
Responsible for taking the `dispatch_data` result and transforming its value
|
||||||
for API consumer's consumption.
|
for API consumer's consumption.
|
||||||
|
|
||||||
:param data: The `dispatch_data` result.
|
:param data: The `dispatch_data` result.
|
||||||
:return: The transformed `dispatch_transform` result if any.
|
:return: The transformed `dispatch_transform` result.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def dispatch_data(
|
def dispatch_data(
|
||||||
|
@ -224,7 +225,7 @@ class ServiceType(
|
||||||
self,
|
self,
|
||||||
service: ServiceSubClass,
|
service: ServiceSubClass,
|
||||||
dispatch_context: DispatchContext,
|
dispatch_context: DispatchContext,
|
||||||
) -> Any:
|
) -> DispatchResult:
|
||||||
"""
|
"""
|
||||||
Responsible for calling `dispatch_data` and `dispatch_transform` to execute
|
Responsible for calling `dispatch_data` and `dispatch_transform` to execute
|
||||||
the service's task, and generating the dispatch's response, respectively.
|
the service's task, and generating the dispatch's response, respectively.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass, field
|
||||||
from typing import NewType, Optional, TypedDict, TypeVar
|
from typing import NewType, Optional, TypedDict, TypeVar
|
||||||
|
|
||||||
from baserow.core.formula.runtime_formula_context import RuntimeFormulaContext
|
from baserow.core.formula.runtime_formula_context import RuntimeFormulaContext
|
||||||
|
@ -25,6 +25,12 @@ class ServiceSortDict(TypedDict):
|
||||||
order: str
|
order: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DispatchResult:
|
||||||
|
data: dict = field(default_factory=dict)
|
||||||
|
status: int = 200
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class UpdatedService:
|
class UpdatedService:
|
||||||
service: Service
|
service: Service
|
||||||
|
|
|
@ -7,7 +7,6 @@ from django.http import HttpRequest
|
||||||
from django.shortcuts import reverse
|
from django.shortcuts import reverse
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from rest_framework.response import Response
|
|
||||||
|
|
||||||
from baserow.contrib.builder.data_providers.data_provider_types import (
|
from baserow.contrib.builder.data_providers.data_provider_types import (
|
||||||
CurrentRecordDataProviderType,
|
CurrentRecordDataProviderType,
|
||||||
|
@ -39,6 +38,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
|
||||||
from baserow.core.formula.exceptions import InvalidBaserowFormula
|
from baserow.core.formula.exceptions import InvalidBaserowFormula
|
||||||
from baserow.core.formula.registries import DataProviderType
|
from baserow.core.formula.registries import DataProviderType
|
||||||
from baserow.core.services.exceptions import ServiceImproperlyConfigured
|
from baserow.core.services.exceptions import ServiceImproperlyConfigured
|
||||||
|
from baserow.core.services.types import DispatchResult
|
||||||
from baserow.core.user_sources.constants import DEFAULT_USER_ROLE_PREFIX
|
from baserow.core.user_sources.constants import DEFAULT_USER_ROLE_PREFIX
|
||||||
from baserow.core.user_sources.user_source_user import UserSourceUser
|
from baserow.core.user_sources.user_source_user import UserSourceUser
|
||||||
from baserow.core.utils import MirrorDict
|
from baserow.core.utils import MirrorDict
|
||||||
|
@ -984,7 +984,7 @@ def test_previous_action_data_provider_post_dispatch_caches_result():
|
||||||
workflow_action.id = 100
|
workflow_action.id = 100
|
||||||
|
|
||||||
mock_cache_key = "mock-cache-key"
|
mock_cache_key = "mock-cache-key"
|
||||||
mock_result = {"mock-key": "mock-value"}
|
mock_result = DispatchResult(data={"mock-key": "mock-value"})
|
||||||
previous_action_data_provider.get_dispatch_action_cache_key = MagicMock(
|
previous_action_data_provider.get_dispatch_action_cache_key = MagicMock(
|
||||||
return_value=mock_cache_key
|
return_value=mock_cache_key
|
||||||
)
|
)
|
||||||
|
@ -1001,12 +1001,12 @@ def test_previous_action_data_provider_post_dispatch_caches_result():
|
||||||
)
|
)
|
||||||
mock_cache.set.assert_called_once_with(
|
mock_cache.set.assert_called_once_with(
|
||||||
mock_cache_key,
|
mock_cache_key,
|
||||||
mock_result,
|
mock_result.data,
|
||||||
timeout=settings.BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS,
|
timeout=settings.BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_previous_action_data_provider_post_dispatch_with_response_doesnt_cache_result():
|
def test_previous_action_data_provider_post_dispatch_with_empty_response_cache_result():
|
||||||
"""
|
"""
|
||||||
Ensure that when a current_dispatch_id is present in the request, the
|
Ensure that when a current_dispatch_id is present in the request, the
|
||||||
provided result is cached.
|
provided result is cached.
|
||||||
|
@ -1026,7 +1026,7 @@ def test_previous_action_data_provider_post_dispatch_with_response_doesnt_cache_
|
||||||
workflow_action.id = 100
|
workflow_action.id = 100
|
||||||
|
|
||||||
mock_cache_key = "mock-cache-key"
|
mock_cache_key = "mock-cache-key"
|
||||||
mock_result = Response(status=204)
|
mock_result = DispatchResult(status=204)
|
||||||
previous_action_data_provider.get_dispatch_action_cache_key = MagicMock(
|
previous_action_data_provider.get_dispatch_action_cache_key = MagicMock(
|
||||||
return_value=mock_cache_key
|
return_value=mock_cache_key
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from rest_framework.response import Response
|
|
||||||
|
|
||||||
from baserow.contrib.database.rows.handler import RowHandler
|
from baserow.contrib.database.rows.handler import RowHandler
|
||||||
from baserow.contrib.integrations.local_baserow.models import LocalBaserowDeleteRow
|
from baserow.contrib.integrations.local_baserow.models import LocalBaserowDeleteRow
|
||||||
|
@ -158,5 +157,4 @@ def test_local_baserow_delete_row_service_dispatch_transform(data_fixture):
|
||||||
service_type = LocalBaserowDeleteRowServiceType()
|
service_type = LocalBaserowDeleteRowServiceType()
|
||||||
dispatch_data = {"data": {}, "baserow_table_model": Mock()}
|
dispatch_data = {"data": {}, "baserow_table_model": Mock()}
|
||||||
result = service_type.dispatch_transform(dispatch_data)
|
result = service_type.dispatch_transform(dispatch_data)
|
||||||
assert isinstance(result, Response)
|
assert result.status == 204
|
||||||
assert result.status_code == 204
|
|
||||||
|
|
|
@ -212,7 +212,7 @@ def test_local_baserow_get_row_service_dispatch_transform(data_fixture):
|
||||||
)
|
)
|
||||||
result = service_type.dispatch_transform(dispatch_data)
|
result = service_type.dispatch_transform(dispatch_data)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"id": rows[1].id,
|
"id": rows[1].id,
|
||||||
fields[0].db_column: "Audi",
|
fields[0].db_column: "Audi",
|
||||||
fields[1].db_column: "Orange",
|
fields[1].db_column: "Orange",
|
||||||
|
@ -325,7 +325,7 @@ def test_local_baserow_get_row_service_dispatch_data_with_service_integer_search
|
||||||
)
|
)
|
||||||
result = service_type.dispatch_transform(dispatch_data)
|
result = service_type.dispatch_transform(dispatch_data)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"id": rows[2].id,
|
"id": rows[2].id,
|
||||||
fields[0].db_column: "42",
|
fields[0].db_column: "42",
|
||||||
"order": AnyStr(),
|
"order": AnyStr(),
|
||||||
|
@ -771,7 +771,7 @@ def test_dispatch_transform_passes_field_ids(mock_get_serializer, field_names):
|
||||||
|
|
||||||
results = service_type.dispatch_transform(dispatch_data)
|
results = service_type.dispatch_transform(dispatch_data)
|
||||||
|
|
||||||
assert results == mock_serializer_instance.data
|
assert results.data == mock_serializer_instance.data
|
||||||
mock_get_serializer.assert_called_once_with(
|
mock_get_serializer.assert_called_once_with(
|
||||||
dispatch_data["baserow_table_model"],
|
dispatch_data["baserow_table_model"],
|
||||||
RowSerializer,
|
RowSerializer,
|
||||||
|
@ -851,7 +851,7 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
# Normal dispatch
|
# Normal dispatch
|
||||||
result = service.get_type().dispatch(service, dispatch_context)
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
assert len(result.keys()) == table.field_set.count() + 2
|
assert len(result.data.keys()) == table.field_set.count() + 2
|
||||||
|
|
||||||
# Now can we dispatch the table if all fields are hidden?
|
# Now can we dispatch the table if all fields are hidden?
|
||||||
field_names = {
|
field_names = {
|
||||||
|
@ -866,7 +866,7 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
# means that the enhance_by_field is filtered to only used field.
|
# means that the enhance_by_field is filtered to only used field.
|
||||||
result = service.get_type().dispatch(service, dispatch_context)
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
assert len(result.keys()) == 1 + 1 # We also have the order at that point
|
assert len(result.data.keys()) == 1 + 1 # We also have the order at that point
|
||||||
|
|
||||||
# Test with a filter on a single select field. Single select have a select_related
|
# Test with a filter on a single select field. Single select have a select_related
|
||||||
single_select_field = table.field_set.get(name="single_select")
|
single_select_field = table.field_set.get(name="single_select")
|
||||||
|
@ -879,7 +879,7 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
|
|
||||||
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
||||||
|
|
||||||
assert len(result.keys()) == 1 + 1
|
assert len(result.data.keys()) == 1 + 1
|
||||||
|
|
||||||
# Let's remove the filter to not interfer with the sort
|
# Let's remove the filter to not interfer with the sort
|
||||||
service_filter.delete()
|
service_filter.delete()
|
||||||
|
@ -890,7 +890,7 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
)
|
)
|
||||||
|
|
||||||
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
||||||
assert len(result.keys()) == 1 + 1
|
assert len(result.data.keys()) == 1 + 1
|
||||||
|
|
||||||
service_sort.delete()
|
service_sort.delete()
|
||||||
|
|
||||||
|
@ -899,4 +899,4 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
service.save()
|
service.save()
|
||||||
|
|
||||||
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
||||||
assert len(result.keys()) == 1 + 1
|
assert len(result.data.keys()) == 1 + 1
|
||||||
|
|
|
@ -231,7 +231,7 @@ def test_local_baserow_list_rows_service_dispatch_transform(data_fixture):
|
||||||
)
|
)
|
||||||
result = service_type.dispatch_transform(dispatch_data)
|
result = service_type.dispatch_transform(dispatch_data)
|
||||||
|
|
||||||
assert [dict(r) for r in result["results"]] == [
|
assert [dict(r) for r in result.data["results"]] == [
|
||||||
{
|
{
|
||||||
"id": rows[0].id,
|
"id": rows[0].id,
|
||||||
fields[0].db_column: "BMW",
|
fields[0].db_column: "BMW",
|
||||||
|
@ -245,7 +245,7 @@ def test_local_baserow_list_rows_service_dispatch_transform(data_fixture):
|
||||||
"order": AnyStr(),
|
"order": AnyStr(),
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
assert result["has_next_page"] is False
|
assert result.data["has_next_page"] is False
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
|
@ -943,7 +943,7 @@ def test_can_dispatch_table_with_deleted_field(data_fixture):
|
||||||
result = service.get_type().dispatch(service, dispatch_context)
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
len(result["results"][0].keys()) == 2 + 1
|
len(result.data["results"][0].keys()) == 2 + 1
|
||||||
) # We also have the order at that point
|
) # We also have the order at that point
|
||||||
|
|
||||||
|
|
||||||
|
@ -975,8 +975,8 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
# Normal dispatch
|
# Normal dispatch
|
||||||
result = service.get_type().dispatch(service, dispatch_context)
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
assert len(result["results"]) == 2
|
assert len(result.data["results"]) == 2
|
||||||
assert len(result["results"][0].keys()) == table.field_set.count() + 2
|
assert len(result.data["results"][0].keys()) == table.field_set.count() + 2
|
||||||
|
|
||||||
# Now can we dispatch the table if all fields are hidden?
|
# Now can we dispatch the table if all fields are hidden?
|
||||||
field_names = {
|
field_names = {
|
||||||
|
@ -992,7 +992,7 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
result = service.get_type().dispatch(service, dispatch_context)
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
len(result["results"][0].keys()) == 1 + 1
|
len(result.data["results"][0].keys()) == 1 + 1
|
||||||
) # We also have the order at that point
|
) # We also have the order at that point
|
||||||
|
|
||||||
# Test with a filter on a single select field. Single select have a select_related
|
# Test with a filter on a single select field. Single select have a select_related
|
||||||
|
@ -1000,15 +1000,19 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
service_filter = data_fixture.create_local_baserow_table_service_filter(
|
service_filter = data_fixture.create_local_baserow_table_service_filter(
|
||||||
service=service,
|
service=service,
|
||||||
field=single_select_field,
|
field=single_select_field,
|
||||||
value="'A'",
|
type="not_equal",
|
||||||
|
value="'Nothing'",
|
||||||
|
value_is_formula=True,
|
||||||
order=0,
|
order=0,
|
||||||
)
|
)
|
||||||
|
|
||||||
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
||||||
|
|
||||||
assert len(result["results"][0].keys()) == 1 + 1
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
# Let's remove the filter to not interfer with the sort
|
assert len(result.data["results"][0].keys()) == 2 + 1
|
||||||
|
|
||||||
|
# Let's remove the filter to not interfere with the sort
|
||||||
service_filter.delete()
|
service_filter.delete()
|
||||||
|
|
||||||
# Test with a sort
|
# Test with a sort
|
||||||
|
@ -1017,16 +1021,22 @@ def test_can_dispatch_interesting_table(data_fixture):
|
||||||
)
|
)
|
||||||
|
|
||||||
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
||||||
assert len(result["results"][0].keys()) == 1 + 1
|
|
||||||
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
|
assert len(result.data["results"][0].keys()) == 2 + 1
|
||||||
|
|
||||||
service_sort.delete()
|
service_sort.delete()
|
||||||
|
|
||||||
# Now with a search
|
# Now with a search query
|
||||||
service.search_query = "'A'"
|
service.search_query = "1"
|
||||||
service.save()
|
service.save()
|
||||||
|
|
||||||
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
dispatch_context = FakeDispatchContext(public_allowed_properties=field_names)
|
||||||
assert len(result["results"][0].keys()) == 1 + 1
|
|
||||||
|
result = service.get_type().dispatch(service, dispatch_context)
|
||||||
|
|
||||||
|
assert len(result.data["results"][0].keys()) == 1 + 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -1063,7 +1073,7 @@ def test_dispatch_transform_passes_field_ids(mock_get_serializer, field_names):
|
||||||
|
|
||||||
results = service_type.dispatch_transform(dispatch_data)
|
results = service_type.dispatch_transform(dispatch_data)
|
||||||
|
|
||||||
assert results == {
|
assert results.data == {
|
||||||
"has_next_page": False,
|
"has_next_page": False,
|
||||||
"results": mock_serializer_instance.data,
|
"results": mock_serializer_instance.data,
|
||||||
}
|
}
|
||||||
|
|
|
@ -361,7 +361,7 @@ def test_local_baserow_upsert_row_service_dispatch_transform(
|
||||||
)
|
)
|
||||||
|
|
||||||
serialized_row = service_type.dispatch_transform(dispatch_data)
|
serialized_row = service_type.dispatch_transform(dispatch_data)
|
||||||
assert dict(serialized_row) == {
|
assert dict(serialized_row.data) == {
|
||||||
"id": dispatch_data["data"].id,
|
"id": dispatch_data["data"].id,
|
||||||
"order": "1.00000000000000000000",
|
"order": "1.00000000000000000000",
|
||||||
ingredient.db_column: str(2),
|
ingredient.db_column: str(2),
|
||||||
|
@ -473,7 +473,7 @@ def test_local_baserow_upsert_row_service_dispatch_data_convert_value(data_fixtu
|
||||||
)
|
)
|
||||||
serialized_row = service_type.dispatch_transform(dispatch_data)
|
serialized_row = service_type.dispatch_transform(dispatch_data)
|
||||||
|
|
||||||
assert dict(serialized_row) == {
|
assert dict(serialized_row.data) == {
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"order": "1.00000000000000000000",
|
"order": "1.00000000000000000000",
|
||||||
# The string 'true' was converted to a boolean value
|
# The string 'true' was converted to a boolean value
|
||||||
|
@ -722,7 +722,7 @@ def test_dispatch_transform_passes_field_ids(
|
||||||
|
|
||||||
results = service_type.dispatch_transform(dispatch_data)
|
results = service_type.dispatch_transform(dispatch_data)
|
||||||
|
|
||||||
assert results == mock_serializer_instance.data
|
assert results.data == mock_serializer_instance.data
|
||||||
mock_get_serializer.assert_called_once_with(
|
mock_get_serializer.assert_called_once_with(
|
||||||
dispatch_data["baserow_table_model"],
|
dispatch_data["baserow_table_model"],
|
||||||
RowSerializer,
|
RowSerializer,
|
||||||
|
|
|
@ -25,7 +25,7 @@ from baserow.contrib.integrations.local_baserow.service_types import (
|
||||||
from baserow.core.services.dispatch_context import DispatchContext
|
from baserow.core.services.dispatch_context import DispatchContext
|
||||||
from baserow.core.services.exceptions import ServiceImproperlyConfigured
|
from baserow.core.services.exceptions import ServiceImproperlyConfigured
|
||||||
from baserow.core.services.registries import DispatchTypes
|
from baserow.core.services.registries import DispatchTypes
|
||||||
from baserow.core.services.types import ServiceSortDictSubClass
|
from baserow.core.services.types import DispatchResult, ServiceSortDictSubClass
|
||||||
from baserow.core.utils import atomic_if_not_already
|
from baserow.core.utils import atomic_if_not_already
|
||||||
from baserow_enterprise.api.integrations.local_baserow.serializers import (
|
from baserow_enterprise.api.integrations.local_baserow.serializers import (
|
||||||
LocalBaserowTableServiceAggregationGroupBySerializer,
|
LocalBaserowTableServiceAggregationGroupBySerializer,
|
||||||
|
@ -512,5 +512,5 @@ class LocalBaserowGroupedAggregateRowsUserServiceType(
|
||||||
def dispatch_transform(
|
def dispatch_transform(
|
||||||
self,
|
self,
|
||||||
data: any,
|
data: any,
|
||||||
) -> any:
|
) -> DispatchResult:
|
||||||
return data["data"]
|
return DispatchResult(data=data["data"])
|
||||||
|
|
|
@ -953,7 +953,7 @@ def test_grouped_aggregate_rows_service_dispatch(data_fixture):
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": {
|
"result": {
|
||||||
f"field_{field.id}": Decimal("20"),
|
f"field_{field.id}": Decimal("20"),
|
||||||
f"field_{field_2.id}": Decimal("8"),
|
f"field_{field_2.id}": Decimal("8"),
|
||||||
|
@ -1003,7 +1003,7 @@ def test_grouped_aggregate_rows_service_dispatch_with_view(data_fixture):
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": {
|
"result": {
|
||||||
f"field_{field.id}": Decimal("6"),
|
f"field_{field.id}": Decimal("6"),
|
||||||
f"field_{field_2.id}": Decimal("4"),
|
f"field_{field_2.id}": Decimal("4"),
|
||||||
|
@ -1053,7 +1053,7 @@ def test_grouped_aggregate_rows_service_dispatch_with_service_filters(data_fixtu
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": {
|
"result": {
|
||||||
f"field_{field.id}": Decimal("6"),
|
f"field_{field.id}": Decimal("6"),
|
||||||
f"field_{field_2.id}": Decimal("4"),
|
f"field_{field_2.id}": Decimal("4"),
|
||||||
|
@ -1278,7 +1278,7 @@ def test_grouped_aggregate_rows_service_dispatch_with_total_aggregation(data_fix
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": {
|
"result": {
|
||||||
f"field_{field.id}": 75.0,
|
f"field_{field.id}": 75.0,
|
||||||
f"field_{field_2.id}": 25.0,
|
f"field_{field_2.id}": 25.0,
|
||||||
|
@ -1358,7 +1358,7 @@ def test_grouped_aggregate_rows_service_dispatch_group_by(data_fixture):
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": Decimal("1"),
|
f"field_{field.id}": Decimal("1"),
|
||||||
|
@ -1424,7 +1424,7 @@ def test_grouped_aggregate_rows_service_dispatch_group_by_id(data_fixture):
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": Decimal("2"),
|
f"field_{field.id}": Decimal("2"),
|
||||||
|
@ -1554,7 +1554,7 @@ def test_grouped_aggregate_rows_service_dispatch_sort_by_series_with_group_by(
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": Decimal("90"),
|
f"field_{field.id}": Decimal("90"),
|
||||||
|
@ -1655,7 +1655,7 @@ def test_grouped_aggregate_rows_service_dispatch_sort_by_series_with_group_by_ro
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": None,
|
f"field_{field.id}": None,
|
||||||
|
@ -1764,7 +1764,7 @@ def test_grouped_aggregate_rows_service_dispatch_sort_by_series_without_group_by
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
# the results are still a dictionary, not sorted on the backend
|
# the results are still a dictionary, not sorted on the backend
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": {
|
"result": {
|
||||||
f"field_{field.id}": Decimal("9"),
|
f"field_{field.id}": Decimal("9"),
|
||||||
f"field_{field_2.id}": Decimal("14"),
|
f"field_{field_2.id}": Decimal("14"),
|
||||||
|
@ -1869,7 +1869,7 @@ def test_grouped_aggregate_rows_service_dispatch_sort_by_group_by_field(data_fix
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": None,
|
f"field_{field.id}": None,
|
||||||
|
@ -1962,7 +1962,7 @@ def test_grouped_aggregate_rows_service_dispatch_sort_by_group_by_row_id(data_fi
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": "",
|
f"field_{field.id}": "",
|
||||||
|
@ -2207,7 +2207,7 @@ def test_grouped_aggregate_rows_service_dispatch_sort_by_series_with_group_by_ig
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": None,
|
f"field_{field.id}": None,
|
||||||
|
@ -2298,7 +2298,7 @@ def test_grouped_aggregate_rows_service_dispatch_max_buckets_sort_on_group_by_fi
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": Decimal("10"),
|
f"field_{field.id}": Decimal("10"),
|
||||||
|
@ -2385,7 +2385,7 @@ def test_grouped_aggregate_rows_service_dispatch_max_buckets_sort_on_series(
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": Decimal("10"),
|
f"field_{field.id}": Decimal("10"),
|
||||||
|
@ -2472,7 +2472,7 @@ def test_grouped_aggregate_rows_service_dispatch_max_buckets_sort_on_primary_fie
|
||||||
|
|
||||||
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
result = ServiceHandler().dispatch_service(service, dispatch_context)
|
||||||
|
|
||||||
assert result == {
|
assert result.data == {
|
||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
f"field_{field.id}": Decimal("10"),
|
f"field_{field.id}": Decimal("10"),
|
||||||
|
|
Loading…
Add table
Reference in a new issue