1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-27 06:00:37 +00:00

Resolve "Add database + application templates"

This commit is contained in:
Jérémie Pardou 2024-05-02 14:25:42 +00:00
parent 36b4bed039
commit b17daa1e49
136 changed files with 11517 additions and 7901 deletions
backend
src/baserow
templates
tests/baserow
changelog/entries/unreleased/feature
enterprise
backend
src/baserow_enterprise
tests/baserow_enterprise_tests
web-frontend/modules/baserow_enterprise/builder/components/elements
premium/backend
src/baserow_premium/api/views
calendar
kanban
tests/baserow_premium_tests
web-frontend/modules/builder

View file

@ -127,6 +127,7 @@ class AllApplicationsView(APIView):
).data ).data
for application in applications for application in applications
] ]
return Response(data) return Response(data)
@ -184,7 +185,6 @@ class ApplicationsView(APIView):
ListApplicationsWorkspaceOperationType.type, ListApplicationsWorkspaceOperationType.type,
workspace=workspace, workspace=workspace,
context=workspace, context=workspace,
allow_if_template=True,
) )
applications = ( applications = (
@ -198,7 +198,6 @@ class ApplicationsView(APIView):
ListApplicationsWorkspaceOperationType.type, ListApplicationsWorkspaceOperationType.type,
applications, applications,
workspace=workspace, workspace=workspace,
allow_if_template=True,
) )
applications = specific_iterator( applications = specific_iterator(

View file

@ -1065,6 +1065,7 @@ PERMISSION_MANAGERS = [
"core", "core",
"setting_operation", "setting_operation",
"staff", "staff",
"allow_if_template",
"allow_public_builder", "allow_public_builder",
"element_visibility", "element_visibility",
"member", "member",

View file

@ -60,7 +60,6 @@ class BuilderSerializer(serializers.ModelSerializer):
ListPagesBuilderOperationType.type, ListPagesBuilderOperationType.type,
pages, pages,
workspace=instance.workspace, workspace=instance.workspace,
allow_if_template=True,
) )
return PageSerializer(pages, many=True).data return PageSerializer(pages, many=True).data

View file

@ -135,21 +135,43 @@ class BuilderApplicationType(ApplicationType):
be imported via the `import_serialized`. be imported via the `import_serialized`.
""" """
self.cache = {}
serialized_integrations = [ serialized_integrations = [
IntegrationHandler().export_integration(i) IntegrationHandler().export_integration(
i,
files_zip=files_zip,
storage=storage,
cache=self.cache,
)
for i in IntegrationHandler().get_integrations(builder) for i in IntegrationHandler().get_integrations(builder)
] ]
serialized_user_sources = [ serialized_user_sources = [
UserSourceHandler().export_user_source(us) UserSourceHandler().export_user_source(
us,
files_zip=files_zip,
storage=storage,
cache=self.cache,
)
for us in UserSourceHandler().get_user_sources(builder) for us in UserSourceHandler().get_user_sources(builder)
] ]
pages = builder.page_set.all().prefetch_related("element_set", "datasource_set") pages = builder.page_set.all().prefetch_related("element_set", "datasource_set")
serialized_pages = [PageHandler().export_page(p) for p in pages] serialized_pages = [
PageHandler().export_page(
p,
files_zip=files_zip,
storage=storage,
cache=self.cache,
)
for p in pages
]
serialized_theme = ThemeHandler().export_theme(builder) serialized_theme = ThemeHandler().export_theme(
builder,
)
serialized_favicon_file = UserFileHandler().export_user_file( serialized_favicon_file = UserFileHandler().export_user_file(
builder.favicon_file, builder.favicon_file,
@ -158,7 +180,10 @@ class BuilderApplicationType(ApplicationType):
) )
serialized_builder = super().export_serialized( serialized_builder = super().export_serialized(
builder, import_export_config, files_zip, storage builder,
import_export_config,
files_zip=files_zip,
storage=storage,
) )
return BuilderDict( return BuilderDict(

View file

@ -154,10 +154,21 @@ class BuilderConfig(AppConfig):
from .domains.permission_manager import AllowPublicBuilderManagerType from .domains.permission_manager import AllowPublicBuilderManagerType
from .elements.permission_manager import ElementVisibilityPermissionManager from .elements.permission_manager import ElementVisibilityPermissionManager
from .permission_manager import AllowIfTemplatePermissionManagerType
permission_manager_type_registry.register(AllowPublicBuilderManagerType()) permission_manager_type_registry.register(AllowPublicBuilderManagerType())
permission_manager_type_registry.register(ElementVisibilityPermissionManager()) permission_manager_type_registry.register(ElementVisibilityPermissionManager())
prev_manager = permission_manager_type_registry.get(
AllowIfTemplatePermissionManagerType.type
)
permission_manager_type_registry.unregister(
AllowIfTemplatePermissionManagerType.type
)
permission_manager_type_registry.register(
AllowIfTemplatePermissionManagerType(prev_manager)
)
from .elements.element_types import ( from .elements.element_types import (
ButtonElementType, ButtonElementType,
CheckboxElementType, CheckboxElementType,

View file

@ -361,6 +361,7 @@ class DataSourceHandler:
data_source: DataSource, data_source: DataSource,
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
) -> DataSourceDict: ) -> DataSourceDict:
""" """
Serializes the given data source. Serializes the given data source.
@ -374,7 +375,9 @@ class DataSourceHandler:
serialized_service = None serialized_service = None
if data_source.service: if data_source.service:
serialized_service = ServiceHandler().export_service(data_source.service) serialized_service = ServiceHandler().export_service(
data_source.service, files_zip=files_zip, storage=storage, cache=cache
)
return DataSourceDict( return DataSourceDict(
id=data_source.id, id=data_source.id,
@ -390,6 +393,7 @@ class DataSourceHandler:
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
): ):
""" """
Creates an instance using the serialized version previously exported with Creates an instance using the serialized version previously exported with
@ -426,6 +430,7 @@ class DataSourceHandler:
id_mapping, id_mapping,
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
cache=cache,
import_formula=import_formula, import_formula=import_formula,
) )

View file

@ -48,6 +48,7 @@ from baserow.contrib.builder.pages.models import Page
from baserow.contrib.builder.types import ElementDict from baserow.contrib.builder.types import ElementDict
from baserow.core.formula.types import BaserowFormula from baserow.core.formula.types import BaserowFormula
from baserow.core.registry import T from baserow.core.registry import T
from baserow.core.user_files.handler import UserFileHandler
class ColumnElementType(ContainerElementTypeMixin, ElementType): class ColumnElementType(ContainerElementTypeMixin, ElementType):
@ -184,14 +185,29 @@ class FormContainerElementType(ContainerElementTypeMixin, ElementType):
return child_types_allowed return child_types_allowed
def import_serialized(self, page, serialized_values, id_mapping): def import_serialized(
self,
page,
serialized_values,
id_mapping,
files_zip=None,
storage=None,
cache=None,
):
serialized_copy = serialized_values.copy() serialized_copy = serialized_values.copy()
if serialized_copy["submit_button_label"]: if serialized_copy["submit_button_label"]:
serialized_copy["submit_button_label"] = import_formula( serialized_copy["submit_button_label"] = import_formula(
serialized_copy["submit_button_label"], id_mapping serialized_copy["submit_button_label"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().import_serialized(
page,
serialized_copy,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
class TableElementType(CollectionElementWithFieldsTypeMixin, ElementType): class TableElementType(CollectionElementWithFieldsTypeMixin, ElementType):
@ -287,14 +303,29 @@ class HeadingElementType(ElementType):
def get_pytest_params(self, pytest_data_fixture): def get_pytest_params(self, pytest_data_fixture):
return {"value": "'Corporis perspiciatis'", "level": 2, "alignment": "left"} return {"value": "'Corporis perspiciatis'", "level": 2, "alignment": "left"}
def import_serialized(self, page, serialized_values, id_mapping): def import_serialized(
self,
page,
serialized_values,
id_mapping,
files_zip=None,
storage=None,
cache=None,
):
serialized_copy = serialized_values.copy() serialized_copy = serialized_values.copy()
if serialized_copy["value"]: if serialized_copy["value"]:
serialized_copy["value"] = import_formula( serialized_copy["value"] = import_formula(
serialized_copy["value"], id_mapping serialized_copy["value"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().import_serialized(
page,
serialized_copy,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
class TextElementType(ElementType): class TextElementType(ElementType):
@ -341,14 +372,29 @@ class TextElementType(ElementType):
), ),
} }
def import_serialized(self, page, serialized_values, id_mapping): def import_serialized(
self,
page,
serialized_values,
id_mapping,
files_zip=None,
storage=None,
cache=None,
):
serialized_copy = serialized_values.copy() serialized_copy = serialized_values.copy()
if serialized_copy["value"]: if serialized_copy["value"]:
serialized_copy["value"] = import_formula( serialized_copy["value"] = import_formula(
serialized_copy["value"], id_mapping serialized_copy["value"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().import_serialized(
page,
serialized_copy,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
class NavigationElementManager: class NavigationElementManager:
@ -540,7 +586,14 @@ class LinkElementType(ElementType):
button_color: str button_color: str
def deserialize_property( def deserialize_property(
self, prop_name: str, value: Any, id_mapping: Dict[str, Any] self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> Any: ) -> Any:
if prop_name == "value": if prop_name == "value":
return import_formula(value, id_mapping) return import_formula(value, id_mapping)
@ -548,9 +601,15 @@ class LinkElementType(ElementType):
return super().deserialize_property( return super().deserialize_property(
prop_name, prop_name,
NavigationElementManager().deserialize_property( NavigationElementManager().deserialize_property(
prop_name, value, id_mapping prop_name,
value,
id_mapping,
), ),
id_mapping, id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
) )
@property @property
@ -720,22 +779,56 @@ class ImageElementType(ElementType):
overrides.update(super().request_serializer_field_overrides) overrides.update(super().request_serializer_field_overrides)
return overrides return overrides
def import_serialized(self, page, serialized_values, id_mapping, **kwargs): def serialize_property(
serialized_copy = serialized_values.copy() self,
if serialized_copy["image_url"]: element: Element,
serialized_copy["image_url"] = import_formula( prop_name: BaserowFormula,
serialized_copy["image_url"], id_mapping files_zip=None,
) storage=None,
if serialized_copy["alt_text"]: cache=None,
serialized_copy["alt_text"] = import_formula( ):
serialized_copy["alt_text"], id_mapping if prop_name == "image_file_id":
) return UserFileHandler().export_user_file(
if serialized_copy["image_url"]: element.image_file, files_zip=files_zip, storage=storage, cache=cache
serialized_copy["image_url"] = import_formula(
serialized_copy["image_url"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().serialize_property(
element, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def deserialize_property(
self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> Any:
if prop_name == "image_url":
return import_formula(value, id_mapping)
if prop_name == "alt_text":
return import_formula(value, id_mapping)
if prop_name == "image_file_id":
user_file = UserFileHandler().import_user_file(
value, files_zip=files_zip, storage=storage
)
if user_file:
return user_file.id
return None
return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
class InputElementType(FormElementTypeMixin, ElementType, abc.ABC): class InputElementType(FormElementTypeMixin, ElementType, abc.ABC):
@ -826,7 +919,15 @@ class InputTextElementType(InputElementType):
return overrides return overrides
def import_serialized(self, page, serialized_values, id_mapping): def import_serialized(
self,
page,
serialized_values,
id_mapping,
files_zip=None,
storage=None,
cache=None,
):
serialized_copy = serialized_values.copy() serialized_copy = serialized_values.copy()
if serialized_copy["label"]: if serialized_copy["label"]:
serialized_copy["label"] = import_formula( serialized_copy["label"] = import_formula(
@ -841,7 +942,14 @@ class InputTextElementType(InputElementType):
serialized_copy["placeholder"], id_mapping serialized_copy["placeholder"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().import_serialized(
page,
serialized_copy,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def get_pytest_params(self, pytest_data_fixture): def get_pytest_params(self, pytest_data_fixture):
return { return {
@ -923,14 +1031,29 @@ class ButtonElementType(ElementType):
def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]: def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]:
return {"value": "'Some value'"} return {"value": "'Some value'"}
def import_serialized(self, page, serialized_values, id_mapping): def import_serialized(
self,
page,
serialized_values,
id_mapping,
files_zip=None,
storage=None,
cache=None,
):
serialized_copy = serialized_values.copy() serialized_copy = serialized_values.copy()
if serialized_copy["value"]: if serialized_copy["value"]:
serialized_copy["value"] = import_formula( serialized_copy["value"] = import_formula(
serialized_copy["value"], id_mapping serialized_copy["value"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().import_serialized(
page,
serialized_copy,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
class CheckboxElementType(InputElementType): class CheckboxElementType(InputElementType):
@ -970,7 +1093,15 @@ class CheckboxElementType(InputElementType):
return overrides return overrides
def import_serialized(self, page, serialized_values, id_mapping): def import_serialized(
self,
page,
serialized_values,
id_mapping,
files_zip=None,
storage=None,
cache=None,
):
serialized_copy = serialized_values.copy() serialized_copy = serialized_values.copy()
if serialized_copy["label"]: if serialized_copy["label"]:
serialized_copy["label"] = import_formula( serialized_copy["label"] = import_formula(
@ -981,7 +1112,14 @@ class CheckboxElementType(InputElementType):
serialized_copy["default_value"], id_mapping serialized_copy["default_value"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().import_serialized(
page,
serialized_copy,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def get_pytest_params(self, pytest_data_fixture): def get_pytest_params(self, pytest_data_fixture):
return { return {
@ -1059,17 +1197,33 @@ class DropdownElementType(FormElementTypeMixin, ElementType):
"options": DropdownOptionSerializer(many=True, required=False), "options": DropdownOptionSerializer(many=True, required=False),
} }
def serialize_property(self, element: DropdownElement, prop_name: str): def serialize_property(
self,
element: DropdownElement,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
if prop_name == "options": if prop_name == "options":
return [ return [
self.serialize_option(option) self.serialize_option(option)
for option in element.dropdownelementoption_set.all() for option in element.dropdownelementoption_set.all()
] ]
return super().serialize_property(element, prop_name) return super().serialize_property(
element, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def deserialize_property( def deserialize_property(
self, prop_name: str, value: Any, id_mapping: Dict[str, Any] self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> Any: ) -> Any:
if prop_name == "default_value": if prop_name == "default_value":
return import_formula(value, id_mapping) return import_formula(value, id_mapping)
@ -1077,17 +1231,33 @@ class DropdownElementType(FormElementTypeMixin, ElementType):
if prop_name == "placeholder": if prop_name == "placeholder":
return import_formula(value, id_mapping) return import_formula(value, id_mapping)
return super().deserialize_property(prop_name, value, id_mapping) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def import_serialized( def import_serialized(
self, self,
parent: Any, parent: Any,
serialized_values: Dict[str, Any], serialized_values: Dict[str, Any],
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
) -> T: ) -> T:
dropdown_element = super().import_serialized( dropdown_element = super().import_serialized(
parent, serialized_values, id_mapping parent,
serialized_values,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
options = [] options = []
@ -1100,9 +1270,22 @@ class DropdownElementType(FormElementTypeMixin, ElementType):
return dropdown_element return dropdown_element
def create_instance_from_serialized(self, serialized_values: Dict[str, Any]) -> T: def create_instance_from_serialized(
self,
serialized_values: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> T:
serialized_values.pop("options", None) serialized_values.pop("options", None)
return super().create_instance_from_serialized(serialized_values) return super().create_instance_from_serialized(
serialized_values,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def serialize_option(self, option: DropdownElementOption) -> Dict: def serialize_option(self, option: DropdownElementOption) -> Dict:
return { return {
@ -1202,7 +1385,15 @@ class IFrameElementType(ElementType):
return overrides return overrides
def import_serialized(self, page, serialized_values, id_mapping): def import_serialized(
self,
page,
serialized_values,
id_mapping,
files_zip=None,
storage=None,
cache=None,
):
serialized_copy = serialized_values.copy() serialized_copy = serialized_values.copy()
if serialized_copy["url"]: if serialized_copy["url"]:
serialized_copy["url"] = import_formula(serialized_copy["url"], id_mapping) serialized_copy["url"] = import_formula(serialized_copy["url"], id_mapping)
@ -1211,7 +1402,14 @@ class IFrameElementType(ElementType):
serialized_copy["embed"], id_mapping serialized_copy["embed"], id_mapping
) )
return super().import_serialized(page, serialized_copy, id_mapping) return super().import_serialized(
page,
serialized_copy,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def get_pytest_params(self, pytest_data_fixture): def get_pytest_params(self, pytest_data_fixture):
return { return {

View file

@ -486,6 +486,7 @@ class ElementHandler:
element: Element, element: Element,
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
): ):
""" """
Serializes the given element. Serializes the given element.
@ -496,7 +497,9 @@ class ElementHandler:
:return: The serialized version. :return: The serialized version.
""" """
return element.get_type().export_serialized(element) return element.get_type().export_serialized(
element, files_zip=files_zip, storage=storage, cache=cache
)
def import_element( def import_element(
self, self,
@ -505,6 +508,7 @@ class ElementHandler:
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
) -> Element: ) -> Element:
""" """
Creates an instance using the serialized version previously exported with Creates an instance using the serialized version previously exported with
@ -524,7 +528,12 @@ class ElementHandler:
element_type = element_type_registry.get(serialized_element["type"]) element_type = element_type_registry.get(serialized_element["type"])
created_instance = element_type.import_serialized( created_instance = element_type.import_serialized(
page, serialized_element, id_mapping page,
serialized_element,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
id_mapping["builder_page_elements"][ id_mapping["builder_page_elements"][

View file

@ -177,18 +177,32 @@ class CollectionElementTypeMixin:
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
) -> Any: ) -> Any:
if prop_name == "data_source_id" and value: if prop_name == "data_source_id" and value:
return id_mapping["builder_data_sources"][value] return id_mapping["builder_data_sources"][value]
return super().deserialize_property(prop_name, value, id_mapping) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def import_serialized( def import_serialized(
self, self,
parent: Any, parent: Any,
serialized_values: Dict[str, Any], serialized_values: Dict[str, Any],
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
): ):
""" """
@ -210,6 +224,9 @@ class CollectionElementTypeMixin:
serialized_values, serialized_values,
id_mapping, id_mapping,
data_source_id=actual_data_source_id, data_source_id=actual_data_source_id,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs, **kwargs,
) )
@ -234,7 +251,15 @@ class CollectionElementWithFieldsTypeMixin(CollectionElementTypeMixin):
class SerializedDict(CollectionElementTypeMixin.SerializedDict): class SerializedDict(CollectionElementTypeMixin.SerializedDict):
fields: List[Dict] fields: List[Dict]
def serialize_property(self, element: CollectionElementSubClass, prop_name: str): def serialize_property(
self,
element: CollectionElementSubClass,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
**kwargs,
):
""" """
You can customize the behavior of the serialization of a property with this You can customize the behavior of the serialization of a property with this
hook. hook.
@ -246,7 +271,14 @@ class CollectionElementWithFieldsTypeMixin(CollectionElementTypeMixin):
for f in element.fields.all() for f in element.fields.all()
] ]
return super().serialize_property(element, prop_name) return super().serialize_property(
element,
prop_name,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def after_create(self, instance: CollectionElementSubClass, values): def after_create(self, instance: CollectionElementSubClass, values):
default_fields = [ default_fields = [
@ -293,12 +325,25 @@ class CollectionElementWithFieldsTypeMixin(CollectionElementTypeMixin):
def before_delete(self, instance: CollectionElementSubClass): def before_delete(self, instance: CollectionElementSubClass):
instance.fields.all().delete() instance.fields.all().delete()
def create_instance_from_serialized(self, serialized_values: Dict[str, Any]): def create_instance_from_serialized(
self,
serialized_values: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs,
):
"""Deals with the fields""" """Deals with the fields"""
fields = serialized_values.pop("fields", []) fields = serialized_values.pop("fields", [])
instance = super().create_instance_from_serialized(serialized_values) instance = super().create_instance_from_serialized(
serialized_values,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
# Add the field order # Add the field order
for i, f in enumerate(fields): for i, f in enumerate(fields):
@ -316,6 +361,9 @@ class CollectionElementWithFieldsTypeMixin(CollectionElementTypeMixin):
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
) -> Any: ) -> Any:
if prop_name == "fields": if prop_name == "fields":
@ -327,7 +375,15 @@ class CollectionElementWithFieldsTypeMixin(CollectionElementTypeMixin):
for f in value for f in value
] ]
return super().deserialize_property(prop_name, value, id_mapping) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
class FormElementTypeMixin: class FormElementTypeMixin:

View file

@ -1,6 +1,8 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Type, TypedDict, TypeVar, Union from typing import Any, Dict, List, Optional, Type, TypedDict, TypeVar, Union
from zipfile import ZipFile
from django.core.files.storage import Storage
from django.db import models from django.db import models
from rest_framework import serializers from rest_framework import serializers
@ -99,7 +101,14 @@ class ElementType(
:param instance: The to be deleted element instance. :param instance: The to be deleted element instance.
""" """
def serialize_property(self, element: Element, prop_name: str): def serialize_property(
self,
element: Element,
prop_name: str,
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
):
""" """
You can customize the behavior of the serialization of a property with this You can customize the behavior of the serialization of a property with this
hook. hook.
@ -108,10 +117,19 @@ class ElementType(
if prop_name == "order": if prop_name == "order":
return str(element.order) return str(element.order)
return super().serialize_property(element, prop_name) return super().serialize_property(
element, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def deserialize_property( def deserialize_property(
self, prop_name: str, value: Any, id_mapping: Dict[str, Any] self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
**kwargs,
) -> Any: ) -> Any:
""" """
This hooks allow to customize the deserialization of a property. This hooks allow to customize the deserialization of a property.

View file

@ -347,6 +347,7 @@ class PageHandler:
page: Page, page: Page,
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
) -> List[PageDict]: ) -> List[PageDict]:
""" """
Serializes the given page. Serializes the given page.
@ -359,14 +360,16 @@ class PageHandler:
# Get serialized version of all elements of the current page # Get serialized version of all elements of the current page
serialized_elements = [ serialized_elements = [
ElementHandler().export_element(e, files_zip=files_zip, storage=storage) ElementHandler().export_element(
e, files_zip=files_zip, storage=storage, cache=cache
)
for e in ElementHandler().get_elements(page=page) for e in ElementHandler().get_elements(page=page)
] ]
# Get serialized versions of all workflow actions of the current page # Get serialized versions of all workflow actions of the current page
serialized_workflow_actions = [ serialized_workflow_actions = [
BuilderWorkflowActionHandler().export_workflow_action( BuilderWorkflowActionHandler().export_workflow_action(
wa, files_zip=files_zip, storage=storage wa, files_zip=files_zip, storage=storage, cache=cache
) )
for wa in BuilderWorkflowActionHandler().get_workflow_actions(page=page) for wa in BuilderWorkflowActionHandler().get_workflow_actions(page=page)
] ]
@ -374,7 +377,7 @@ class PageHandler:
# Get serialized version of all data_sources for the current page # Get serialized version of all data_sources for the current page
serialized_data_sources = [ serialized_data_sources = [
DataSourceHandler().export_data_source( DataSourceHandler().export_data_source(
ds, files_zip=files_zip, storage=storage ds, files_zip=files_zip, storage=storage, cache=cache
) )
for ds in DataSourceHandler().get_data_sources(page=page) for ds in DataSourceHandler().get_data_sources(page=page)
] ]
@ -413,6 +416,7 @@ class PageHandler:
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
progress: Optional[ChildProgressBuilder] = None, progress: Optional[ChildProgressBuilder] = None,
cache: Optional[Dict[str, any]] = None,
): ):
""" """
Import multiple pages at once. Especially useful when we have dependencies Import multiple pages at once. Especially useful when we have dependencies
@ -440,6 +444,7 @@ class PageHandler:
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
progress=progress, progress=progress,
cache=cache,
) )
imported_pages.append([page_instance, serialized_page]) imported_pages.append([page_instance, serialized_page])
@ -451,6 +456,7 @@ class PageHandler:
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
progress=progress, progress=progress,
cache=cache,
) )
for page_instance, serialized_page in imported_pages: for page_instance, serialized_page in imported_pages:
@ -461,6 +467,7 @@ class PageHandler:
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
progress=progress, progress=progress,
cache=cache,
) )
for page_instance, serialized_page in imported_pages: for page_instance, serialized_page in imported_pages:
@ -471,6 +478,7 @@ class PageHandler:
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
progress=progress, progress=progress,
cache=cache,
) )
return [i[0] for i in imported_pages] return [i[0] for i in imported_pages]
@ -483,6 +491,7 @@ class PageHandler:
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
progress: Optional[ChildProgressBuilder] = None, progress: Optional[ChildProgressBuilder] = None,
cache: Optional[Dict[str, any]] = None,
): ):
""" """
Creates an instance using the serialized version previously exported with Creates an instance using the serialized version previously exported with
@ -504,6 +513,7 @@ class PageHandler:
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
progress=progress, progress=progress,
cache=cache,
)[0] )[0]
def import_page_only( def import_page_only(
@ -513,6 +523,7 @@ class PageHandler:
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
progress: Optional[ChildProgressBuilder] = None, progress: Optional[ChildProgressBuilder] = None,
): ):
if "builder_pages" not in id_mapping: if "builder_pages" not in id_mapping:
@ -540,6 +551,7 @@ class PageHandler:
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
progress: Optional[ChildProgressBuilder] = None, progress: Optional[ChildProgressBuilder] = None,
cache: Optional[Dict[str, any]] = None,
): ):
""" """
Import all page data sources. Import all page data sources.
@ -560,6 +572,7 @@ class PageHandler:
id_mapping, id_mapping,
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
cache=cache,
) )
progress.increment(state=IMPORT_SERIALIZED_IMPORTING) progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
@ -571,6 +584,7 @@ class PageHandler:
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
progress: Optional[ChildProgressBuilder] = None, progress: Optional[ChildProgressBuilder] = None,
cache: Optional[Dict[str, any]] = None,
): ):
""" """
Import all page elements, dealing with the potential incorrect order regarding Import all page elements, dealing with the potential incorrect order regarding
@ -626,6 +640,7 @@ class PageHandler:
id_mapping, id_mapping,
files_zip=files_zip, files_zip=files_zip,
storage=storage, storage=storage,
cache=cache,
) )
) )
was_imported = True was_imported = True
@ -642,6 +657,7 @@ class PageHandler:
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
progress: Optional[ChildProgressBuilder] = None, progress: Optional[ChildProgressBuilder] = None,
cache: Optional[Dict[str, any]] = None,
): ):
""" """
Import all page workflow_actions. Import all page workflow_actions.
@ -660,6 +676,11 @@ class PageHandler:
for serialized_workflow_action in serialized_workflow_actions: for serialized_workflow_action in serialized_workflow_actions:
BuilderWorkflowActionHandler().import_workflow_action( BuilderWorkflowActionHandler().import_workflow_action(
page, serialized_workflow_action, id_mapping page,
serialized_workflow_action,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
progress.increment(state=IMPORT_SERIALIZED_IMPORTING) progress.increment(state=IMPORT_SERIALIZED_IMPORTING)

View file

@ -0,0 +1,41 @@
from django.contrib.auth import get_user_model
from baserow.contrib.builder.data_sources.operations import (
DispatchDataSourceOperationType,
ListDataSourcesPageOperationType,
)
from baserow.contrib.builder.elements.operations import ListElementsPageOperationType
from baserow.contrib.builder.operations import ListPagesBuilderOperationType
from baserow.contrib.builder.workflow_actions.operations import (
ListBuilderWorkflowActionsPageOperationType,
)
from baserow.core.permission_manager import (
AllowIfTemplatePermissionManagerType as CoreAllowIfTemplatePermissionManagerType,
)
from baserow.core.registries import PermissionManagerType
User = get_user_model()
class AllowIfTemplatePermissionManagerType(CoreAllowIfTemplatePermissionManagerType):
"""
Allows read operation on templates.
"""
BUILDER_OPERATION_ALLOWED_ON_TEMPLATES = [
ListPagesBuilderOperationType.type,
ListElementsPageOperationType.type,
ListBuilderWorkflowActionsPageOperationType.type,
DispatchDataSourceOperationType.type,
ListDataSourcesPageOperationType.type,
]
@property
def OPERATION_ALLOWED_ON_TEMPLATES(self):
return (
self.prev_manager_type.OPERATION_ALLOWED_ON_TEMPLATES
+ self.BUILDER_OPERATION_ALLOWED_ON_TEMPLATES
)
def __init__(self, prev_manager_type: PermissionManagerType):
self.prev_manager_type = prev_manager_type

View file

@ -287,7 +287,7 @@ def load_test_data():
"config": { "config": {
"navigation_type": "page", "navigation_type": "page",
"navigate_to_page_id": product_detail.id, "navigate_to_page_id": product_detail.id,
"navigate_to_url": None, "navigate_to_url": "",
"page_parameters": [ "page_parameters": [
{"name": "id", "value": "get('current_record.id')"}, {"name": "id", "value": "get('current_record.id')"},
{ {

View file

@ -68,6 +68,7 @@ class BuilderWorkflowActionHandler(WorkflowActionHandler):
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
): ):
""" """
Creates an instance using the serialized version previously exported with Creates an instance using the serialized version previously exported with
@ -86,7 +87,12 @@ class BuilderWorkflowActionHandler(WorkflowActionHandler):
serialized_workflow_action["type"] serialized_workflow_action["type"]
) )
return workflow_action_type.import_serialized( return workflow_action_type.import_serialized(
page, serialized_workflow_action, id_mapping page,
serialized_workflow_action,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
def order_workflow_actions( def order_workflow_actions(

View file

@ -32,7 +32,14 @@ class BuilderWorkflowActionType(WorkflowActionType, PublicCustomFieldsInstanceMi
return super().prepare_values(values, user, instance) return super().prepare_values(values, user, instance)
def deserialize_property( def deserialize_property(
self, prop_name: str, value: Any, id_mapping: Dict[str, Any] self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs
) -> Any: ) -> Any:
""" """
This hooks allow to customize the deserialization of a property. This hooks allow to customize the deserialization of a property.
@ -51,7 +58,15 @@ class BuilderWorkflowActionType(WorkflowActionType, PublicCustomFieldsInstanceMi
if prop_name == "element_id": if prop_name == "element_id":
return id_mapping["builder_page_elements"][value] return id_mapping["builder_page_elements"][value]
return value return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs
)
class BuilderWorkflowActionTypeRegistry( class BuilderWorkflowActionTypeRegistry(

View file

@ -60,7 +60,16 @@ class NotificationWorkflowActionType(BuilderWorkflowActionType):
def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]: def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]:
return {"title": "'hello'", "description": "'there'"} return {"title": "'hello'", "description": "'there'"}
def deserialize_property(self, prop_name, value, id_mapping: Dict) -> Any: def deserialize_property(
self,
prop_name,
value,
id_mapping: Dict,
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> Any:
""" """
Migrate the formulas. Migrate the formulas.
""" """
@ -71,7 +80,15 @@ class NotificationWorkflowActionType(BuilderWorkflowActionType):
if prop_name == "description": if prop_name == "description":
return import_formula(value, id_mapping) return import_formula(value, id_mapping)
return super().deserialize_property(prop_name, value, id_mapping) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
class OpenPageWorkflowActionType(BuilderWorkflowActionType): class OpenPageWorkflowActionType(BuilderWorkflowActionType):
@ -116,7 +133,16 @@ class OpenPageWorkflowActionType(BuilderWorkflowActionType):
def get_pytest_params(self, pytest_data_fixture): def get_pytest_params(self, pytest_data_fixture):
return NavigationElementManager().get_pytest_params(pytest_data_fixture) return NavigationElementManager().get_pytest_params(pytest_data_fixture)
def deserialize_property(self, prop_name, value, id_mapping: Dict) -> Any: def deserialize_property(
self,
prop_name,
value,
id_mapping: Dict,
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> Any:
""" """
Migrate the formulas. Migrate the formulas.
""" """
@ -124,12 +150,19 @@ class OpenPageWorkflowActionType(BuilderWorkflowActionType):
if prop_name == "url": # TODO remove in the next release if prop_name == "url": # TODO remove in the next release
return import_formula(value, id_mapping) return import_formula(value, id_mapping)
if prop_name == "description":
return import_formula(value, id_mapping)
return super().deserialize_property( return super().deserialize_property(
prop_name, prop_name,
NavigationElementManager().deserialize_property( NavigationElementManager().deserialize_property(
prop_name, value, id_mapping prop_name, value, id_mapping
), ),
id_mapping, id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
) )
@ -167,12 +200,27 @@ class RefreshDataSourceWorkflowAction(BuilderWorkflowActionType):
def allowed_fields(self): def allowed_fields(self):
return super().allowed_fields + ["data_source_id"] return super().allowed_fields + ["data_source_id"]
def deserialize_property(self, prop_name, value, id_mapping: Dict) -> Any: def deserialize_property(
self,
prop_name,
value,
id_mapping: Dict,
files_zip=None,
storage=None,
cache=None,
) -> Any:
data_sources = id_mapping.get("builder_data_sources", {}) data_sources = id_mapping.get("builder_data_sources", {})
if prop_name == "data_source_id" and value in data_sources: if prop_name == "data_source_id" and value in data_sources:
return data_sources[value] return data_sources[value]
return super().deserialize_property(prop_name, value, id_mapping) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
class BuilderWorkflowServiceActionType(BuilderWorkflowActionType): class BuilderWorkflowServiceActionType(BuilderWorkflowActionType):
@ -201,7 +249,14 @@ class BuilderWorkflowServiceActionType(BuilderWorkflowActionType):
service_type = service_type_registry.get_by_model(pytest_params["service"]) service_type = service_type_registry.get_by_model(pytest_params["service"])
return {"service": service_type.export_serialized(pytest_params["service"])} return {"service": service_type.export_serialized(pytest_params["service"])}
def serialize_property(self, workflow_action: WorkflowAction, prop_name: str): def serialize_property(
self,
workflow_action: WorkflowAction,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
""" """
You can customize the behavior of the serialization of a property with this You can customize the behavior of the serialization of a property with this
hook. hook.
@ -209,11 +264,27 @@ class BuilderWorkflowServiceActionType(BuilderWorkflowActionType):
if prop_name == "service": if prop_name == "service":
service = workflow_action.service.specific service = workflow_action.service.specific
return service.get_type().export_serialized(service) return service.get_type().export_serialized(
return super().serialize_property(workflow_action, prop_name) service, files_zip=files_zip, storage=storage, cache=cache
)
return super().serialize_property(
workflow_action,
prop_name,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def deserialize_property( def deserialize_property(
self, prop_name: str, value: Any, id_mapping: Dict[str, Any] self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> Any: ) -> Any:
""" """
If the workflow action has a relation to a service, this method will If the workflow action has a relation to a service, this method will
@ -242,7 +313,15 @@ class BuilderWorkflowServiceActionType(BuilderWorkflowActionType):
id_mapping, id_mapping,
import_formula=import_formula, import_formula=import_formula,
) )
return super().deserialize_property(prop_name, value, id_mapping) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
class UpsertRowWorkflowActionType(BuilderWorkflowServiceActionType): class UpsertRowWorkflowActionType(BuilderWorkflowServiceActionType):

View file

@ -176,7 +176,6 @@ class FieldsView(APIView):
ListFieldsOperationType.type, ListFieldsOperationType.type,
workspace=table.database.workspace, workspace=table.database.workspace,
context=table, context=table,
allow_if_template=True,
) )
TokenHandler().check_table_permissions( TokenHandler().check_table_permissions(

View file

@ -46,7 +46,6 @@ class DatabaseSerializer(serializers.ModelSerializer):
ListTablesDatabaseTableOperationType.type, ListTablesDatabaseTableOperationType.type,
tables, tables,
workspace=instance.workspace, workspace=instance.workspace,
allow_if_template=True,
) )
return TableSerializer(tables, many=True).data return TableSerializer(tables, many=True).data

View file

@ -259,7 +259,6 @@ class GalleryViewView(APIView):
ListRowsDatabaseTableOperationType.type, ListRowsDatabaseTableOperationType.type,
workspace=workspace, workspace=workspace,
context=view.table, context=view.table,
allow_if_template=True,
) )
search = query_params.get("search") search = query_params.get("search")

View file

@ -346,7 +346,6 @@ class GridViewView(APIView):
ListRowsDatabaseTableOperationType.type, ListRowsDatabaseTableOperationType.type,
workspace=workspace, workspace=workspace,
context=view.table, context=view.table,
allow_if_template=True,
) )
field_ids = get_include_exclude_field_ids( field_ids = get_include_exclude_field_ids(
view.table, include_fields, exclude_fields view.table, include_fields, exclude_fields

View file

@ -283,7 +283,6 @@ class ViewsView(APIView):
ListViewsOperationType.type, ListViewsOperationType.type,
workspace=table.database.workspace, workspace=table.database.workspace,
context=table, context=table,
allow_if_template=True,
) )
views = ViewHandler().list_views( views = ViewHandler().list_views(

View file

@ -724,10 +724,21 @@ class DatabaseConfig(AppConfig):
from baserow.core.registries import permission_manager_type_registry from baserow.core.registries import permission_manager_type_registry
from .permission_manager import AllowIfTemplatePermissionManagerType
from .tokens.permission_manager import TokenPermissionManagerType from .tokens.permission_manager import TokenPermissionManagerType
permission_manager_type_registry.register(TokenPermissionManagerType()) permission_manager_type_registry.register(TokenPermissionManagerType())
prev_manager = permission_manager_type_registry.get(
AllowIfTemplatePermissionManagerType.type
)
permission_manager_type_registry.unregister(
AllowIfTemplatePermissionManagerType.type
)
permission_manager_type_registry.register(
AllowIfTemplatePermissionManagerType(prev_manager)
)
from baserow.core.registries import subject_type_registry from baserow.core.registries import subject_type_registry
from .tokens.subjects import TokenSubjectType from .tokens.subjects import TokenSubjectType

View file

@ -427,9 +427,9 @@ class FieldDependencyHandler:
if dependency_field.table_id != field.table_id: if dependency_field.table_id != field.table_id:
perm_checks.append( perm_checks.append(
PermissionCheck( PermissionCheck(
actor=user, user,
operation_name=field_operation_name, field_operation_name,
context=dependency_field, dependency_field,
) )
) )

View file

@ -0,0 +1,49 @@
from django.contrib.auth import get_user_model
from baserow.contrib.database.fields.operations import ListFieldsOperationType
from baserow.contrib.database.operations import ListTablesDatabaseTableOperationType
from baserow.contrib.database.rows.operations import ReadDatabaseRowOperationType
from baserow.contrib.database.table.operations import ListRowsDatabaseTableOperationType
from baserow.contrib.database.views.operations import (
ListAggregationsViewOperationType,
ListViewDecorationOperationType,
ListViewsOperationType,
ReadAggregationsViewOperationType,
ReadViewFieldOptionsOperationType,
ReadViewOperationType,
)
from baserow.core.permission_manager import (
AllowIfTemplatePermissionManagerType as CoreAllowIfTemplatePermissionManagerType,
)
from baserow.core.registries import PermissionManagerType
User = get_user_model()
class AllowIfTemplatePermissionManagerType(CoreAllowIfTemplatePermissionManagerType):
"""
Allows read operation on templates.
"""
DATABASE_OPERATION_ALLOWED_ON_TEMPLATES = [
ListTablesDatabaseTableOperationType.type,
ListFieldsOperationType.type,
ListRowsDatabaseTableOperationType.type,
ListViewsOperationType.type,
ReadDatabaseRowOperationType.type,
ReadViewOperationType.type,
ReadViewFieldOptionsOperationType.type,
ListViewDecorationOperationType.type,
ListAggregationsViewOperationType.type,
ReadAggregationsViewOperationType.type,
]
@property
def OPERATION_ALLOWED_ON_TEMPLATES(self):
return (
self.prev_manager_type.OPERATION_ALLOWED_ON_TEMPLATES
+ self.DATABASE_OPERATION_ALLOWED_ON_TEMPLATES
)
def __init__(self, prev_manager_type: PermissionManagerType):
self.prev_manager_type = prev_manager_type

View file

@ -80,9 +80,15 @@ class SearchHandler(
@classmethod @classmethod
def get_default_search_mode_for_table(cls, table: "Table") -> str: def get_default_search_mode_for_table(cls, table: "Table") -> str:
# Template table indexes are not created to save space so we can only use compat
# search here.
if table.database.workspace.has_template():
return SearchModes.MODE_COMPAT
search_mode = settings.DEFAULT_SEARCH_MODE search_mode = settings.DEFAULT_SEARCH_MODE
if table.tsvectors_are_supported: if table.tsvectors_are_supported:
search_mode = SearchModes.MODE_FT_WITH_COUNT search_mode = SearchModes.MODE_FT_WITH_COUNT
return search_mode return search_mode
@classmethod @classmethod

View file

@ -508,7 +508,6 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
ListViewsOperationType.type, ListViewsOperationType.type,
views, views,
table.database.workspace, table.database.workspace,
allow_if_template=True,
) )
views = views.select_related("content_type", "table") views = views.select_related("content_type", "table")
@ -637,7 +636,6 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
ReadViewOperationType.type, ReadViewOperationType.type,
workspace=view.table.database.workspace, workspace=view.table.database.workspace,
context=view, context=view,
allow_if_template=True,
) )
return view return view
@ -1098,12 +1096,12 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
""" """
workspace = view.table.database.workspace workspace = view.table.database.workspace
CoreHandler().check_permissions( CoreHandler().check_permissions(
user, user,
ReadViewFieldOptionsOperationType.type, ReadViewFieldOptionsOperationType.type,
workspace=workspace, workspace=workspace,
context=view, context=view,
allow_if_template=True,
) )
view_type = view_type_registry.get_by_model(view) view_type = view_type_registry.get_by_model(view)
return view_type return view_type
@ -2747,7 +2745,6 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
ListAggregationsViewOperationType.type, ListAggregationsViewOperationType.type,
workspace=view.table.database.workspace, workspace=view.table.database.workspace,
context=view, context=view,
allow_if_template=True,
raise_permission_exceptions=True, raise_permission_exceptions=True,
) )
@ -2892,7 +2889,6 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
ReadAggregationsViewOperationType.type, ReadAggregationsViewOperationType.type,
workspace=view.table.database.workspace, workspace=view.table.database.workspace,
context=view, context=view,
allow_if_template=True,
) )
if model is None: if model is None:

View file

@ -47,7 +47,14 @@ class LocalBaserowIntegrationType(IntegrationType):
return super().prepare_values(values, user) return super().prepare_values(values, user)
def serialize_property(self, integration: Integration, prop_name: str): def serialize_property(
self,
integration: Integration,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
""" """
Replace the authorized user property with it's username. Better when loading the Replace the authorized user property with it's username. Better when loading the
data later. data later.
@ -58,13 +65,25 @@ class LocalBaserowIntegrationType(IntegrationType):
return integration.authorized_user.username return integration.authorized_user.username
return None return None
return super().serialize_property(integration, prop_name) return super().serialize_property(
integration, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def after_template_install(
self, user: AbstractUser, instance: LocalBaserowIntegration
):
"""Add the user who installed the template as authorized user"""
instance.authorized_user = user
instance.save()
def import_serialized( def import_serialized(
self, self,
application: Application, application: Application,
serialized_values: Dict[str, Any], serialized_values: Dict[str, Any],
id_mapping: Dict, id_mapping: Dict,
files_zip=None,
storage=None,
cache=None, cache=None,
) -> LocalBaserowIntegration: ) -> LocalBaserowIntegration:
""" """
@ -94,7 +113,12 @@ class LocalBaserowIntegrationType(IntegrationType):
) )
return super().import_serialized( return super().import_serialized(
application, serialized_values, id_mapping, cache application,
serialized_values,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
def enhance_queryset(self, queryset): def enhance_queryset(self, queryset):
@ -118,10 +142,7 @@ class LocalBaserowIntegrationType(IntegrationType):
in this list. in this list.
""" """
if ( if not integration.application.workspace_id:
not integration.application.workspace_id
or not integration.specific.authorized_user
):
return [] return []
user = integration.specific.authorized_user user = integration.specific.authorized_user

View file

@ -25,7 +25,6 @@ from rest_framework.serializers import ListSerializer, Serializer
from baserow.contrib.builder.data_providers.exceptions import ( from baserow.contrib.builder.data_providers.exceptions import (
DataProviderChunkInvalidException, DataProviderChunkInvalidException,
) )
from baserow.contrib.builder.formula_importer import import_formula
from baserow.contrib.database.api.fields.serializers import ( from baserow.contrib.database.api.fields.serializers import (
DurationFieldSerializer, DurationFieldSerializer,
FieldSerializer, FieldSerializer,
@ -221,7 +220,14 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
return resolved_values return resolved_values
def serialize_property(self, service: ServiceSubClass, prop_name: str): def serialize_property(
self,
service: ServiceSubClass,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
""" """
Responsible for serializing the `filters` and `sortings` properties. Responsible for serializing the `filters` and `sortings` properties.
@ -249,10 +255,20 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
for s in service.service_sorts.all() for s in service.service_sorts.all()
] ]
return super().serialize_property(service, prop_name) return super().serialize_property(
service, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def deserialize_property( def deserialize_property(
self, prop_name: str, value: Any, id_mapping: Dict[str, Any], **kwargs self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
import_formula: Callable[[str, Dict[str, Any]], str] = lambda x, y: x,
**kwargs,
): ):
""" """
Get the view, table and field IDs from the mapping if they exists. Get the view, table and field IDs from the mapping if they exists.
@ -270,9 +286,20 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
for item in value for item in value
] ]
return super().deserialize_property(prop_name, value, id_mapping, **kwargs) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
import_formula=import_formula,
**kwargs,
)
def create_instance_from_serialized(self, serialized_values): def create_instance_from_serialized(
self, serialized_values, files_zip=None, storage=None, cache=None, **kwargs
):
""" """
Responsible for creating the `filters` and `sortings`. Responsible for creating the `filters` and `sortings`.
@ -285,7 +312,13 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
filters = serialized_values.pop("filters", []) filters = serialized_values.pop("filters", [])
sortings = serialized_values.pop("sortings", []) sortings = serialized_values.pop("sortings", [])
service = super().create_instance_from_serialized(serialized_values) service = super().create_instance_from_serialized(
serialized_values,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
# Create filters # Create filters
LocalBaserowTableServiceFilter.objects.bulk_create( LocalBaserowTableServiceFilter.objects.bulk_create(
@ -467,6 +500,10 @@ class LocalBaserowViewServiceType(LocalBaserowTableServiceType):
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
import_formula: Callable[[str, Dict[str, Any]], str] = lambda x, y: x,
**kwargs, **kwargs,
): ):
""" """
@ -476,7 +513,16 @@ class LocalBaserowViewServiceType(LocalBaserowTableServiceType):
if prop_name == "view_id" and "database_views" in id_mapping: if prop_name == "view_id" and "database_views" in id_mapping:
return id_mapping["database_views"].get(value, None) return id_mapping["database_views"].get(value, None)
return super().deserialize_property(prop_name, value, id_mapping, **kwargs) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
import_formula=import_formula,
**kwargs,
)
def prepare_values( def prepare_values(
self, self,
@ -627,6 +673,8 @@ class LocalBaserowListRowsUserServiceType(
return path return path
original_field_id = int(field_dbname[6:]) original_field_id = int(field_dbname[6:])
# Here if the mapping is not found, let's keep the current field Id.
field_id = id_mapping.get("database_fields", {}).get( field_id = id_mapping.get("database_fields", {}).get(
original_field_id, original_field_id original_field_id, original_field_id
) )
@ -638,6 +686,9 @@ class LocalBaserowListRowsUserServiceType(
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
import_formula: Callable[[str, Dict[str, Any]], str] = lambda x, y: x, import_formula: Callable[[str, Dict[str, Any]], str] = lambda x, y: x,
**kwargs, **kwargs,
): ):
@ -674,7 +725,14 @@ class LocalBaserowListRowsUserServiceType(
] ]
return super().deserialize_property( return super().deserialize_property(
prop_name, value, id_mapping, import_formula=import_formula, **kwargs prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
import_formula=import_formula,
**kwargs,
) )
def dispatch_data( def dispatch_data(
@ -852,6 +910,8 @@ class LocalBaserowGetRowUserServiceType(
return path return path
original_field_id = int(field_dbname[6:]) original_field_id = int(field_dbname[6:])
# Here if the mapping is not found, let's keep the current field Id.
field_id = id_mapping.get("database_fields", {}).get( field_id = id_mapping.get("database_fields", {}).get(
original_field_id, original_field_id original_field_id, original_field_id
) )
@ -863,6 +923,9 @@ class LocalBaserowGetRowUserServiceType(
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
import_formula: Callable[[str, Dict[str, Any]], str] = lambda x, y: x, import_formula: Callable[[str, Dict[str, Any]], str] = lambda x, y: x,
**kwargs, **kwargs,
): ):
@ -896,7 +959,14 @@ class LocalBaserowGetRowUserServiceType(
return import_formula(value, id_mapping) return import_formula(value, id_mapping)
return super().deserialize_property( return super().deserialize_property(
prop_name, value, id_mapping, import_formula=import_formula, **kwargs prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
import_formula=import_formula,
**kwargs,
) )
def dispatch_transform( def dispatch_transform(
@ -1103,7 +1173,14 @@ class LocalBaserowUpsertRowServiceType(LocalBaserowTableServiceType):
bulk_field_mappings bulk_field_mappings
) )
def serialize_property(self, service: LocalBaserowUpsertRow, prop_name: str): def serialize_property(
self,
service: LocalBaserowUpsertRow,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
""" """
You can customize the behavior of the serialization of a property with this You can customize the behavior of the serialization of a property with this
hook. hook.
@ -1118,10 +1195,20 @@ class LocalBaserowUpsertRowServiceType(LocalBaserowTableServiceType):
for m in service.field_mappings.all() for m in service.field_mappings.all()
] ]
return super().serialize_property(service, prop_name) return super().serialize_property(
service, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def deserialize_property( def deserialize_property(
self, prop_name: str, value: Any, id_mapping: Dict[str, Any], **kwargs self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
import_formula: Callable[[str, Dict[str, Any]], str] = lambda x, y: x,
**kwargs,
): ):
""" """
Responsible for deserializing the `field_mappings`, if they're present. Responsible for deserializing the `field_mappings`, if they're present.
@ -1149,9 +1236,20 @@ class LocalBaserowUpsertRowServiceType(LocalBaserowTableServiceType):
for item in value for item in value
] ]
return super().deserialize_property(prop_name, value, id_mapping, **kwargs) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
import_formula=import_formula,
**kwargs,
)
def create_instance_from_serialized(self, serialized_values): def create_instance_from_serialized(
self, serialized_values, files_zip=None, storage=None, cache=None, **kwargs
):
""" """
Responsible for creating the service, and then if `field_mappings` Responsible for creating the service, and then if `field_mappings`
are present, creating them in bulk. are present, creating them in bulk.
@ -1162,7 +1260,13 @@ class LocalBaserowUpsertRowServiceType(LocalBaserowTableServiceType):
field_mappings = serialized_values.pop("field_mappings", []) field_mappings = serialized_values.pop("field_mappings", [])
service = super().create_instance_from_serialized(serialized_values) service = super().create_instance_from_serialized(
serialized_values,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
# Create the field mappings # Create the field mappings
LocalBaserowTableServiceFieldMapping.objects.bulk_create( LocalBaserowTableServiceFieldMapping.objects.bulk_create(
@ -1358,6 +1462,8 @@ class LocalBaserowUpsertRowServiceType(LocalBaserowTableServiceType):
return path return path
original_field_id = int(field_dbname[6:]) original_field_id = int(field_dbname[6:])
# Here if the mapping is not found, let's keep the current field Id.
field_id = id_mapping.get("database_fields", {}).get( field_id = id_mapping.get("database_fields", {}).get(
original_field_id, original_field_id original_field_id, original_field_id
) )

View file

@ -60,12 +60,20 @@ class AppAuthProviderHandler(BaseAuthProviderHandler):
) )
@classmethod @classmethod
def export_app_auth_provider(cls, app_auth_provider: AppAuthProviderType): def export_app_auth_provider(
cls,
app_auth_provider: AppAuthProviderType,
files_zip=None,
storage=None,
cache=None,
):
""" """
Export an app auth provider. Export an app auth provider.
""" """
return app_auth_provider.get_type().export_serialized(app_auth_provider) return app_auth_provider.get_type().export_serialized(
app_auth_provider, files_zip=files_zip, storage=storage, cache=cache
)
@classmethod @classmethod
def import_app_auth_provider( def import_app_auth_provider(
@ -75,6 +83,7 @@ class AppAuthProviderHandler(BaseAuthProviderHandler):
id_mapping: Dict, id_mapping: Dict,
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache=None,
): ):
""" """
Imports a serialized app_auth_provider. Imports a serialized app_auth_provider.
@ -87,7 +96,12 @@ class AppAuthProviderHandler(BaseAuthProviderHandler):
serialized_app_auth_provider["type"] serialized_app_auth_provider["type"]
) )
app_auth_provider = app_auth_provider_type.import_serialized( app_auth_provider = app_auth_provider_type.import_serialized(
user_source, serialized_app_auth_provider, id_mapping user_source,
serialized_app_auth_provider,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
id_mapping["app_auth_providers"][ id_mapping["app_auth_providers"][

View file

@ -40,6 +40,7 @@ class CoreConfig(AppConfig):
formula_runtime_function_registry.register(RuntimeAdd()) formula_runtime_function_registry.register(RuntimeAdd())
from baserow.core.permission_manager import ( from baserow.core.permission_manager import (
AllowIfTemplatePermissionManagerType,
BasicPermissionManagerType, BasicPermissionManagerType,
CorePermissionManagerType, CorePermissionManagerType,
StaffOnlyPermissionManagerType, StaffOnlyPermissionManagerType,
@ -66,6 +67,9 @@ class CoreConfig(AppConfig):
permission_manager_type_registry.register( permission_manager_type_registry.register(
StaffOnlySettingOperationPermissionManagerType() StaffOnlySettingOperationPermissionManagerType()
) )
permission_manager_type_registry.register(
AllowIfTemplatePermissionManagerType()
)
from .object_scopes import ( from .object_scopes import (
ApplicationObjectScopeType, ApplicationObjectScopeType,
@ -390,6 +394,8 @@ class CoreConfig(AppConfig):
plugin_dir.register(HerokuExternalFileStorageConfiguredHealthCheck) plugin_dir.register(HerokuExternalFileStorageConfiguredHealthCheck)
plugin_dir.register(DefaultFileStorageHealthCheck) plugin_dir.register(DefaultFileStorageHealthCheck)
import baserow.core.integrations.receivers # noqa: F403, F401
# noinspection PyPep8Naming # noinspection PyPep8Naming
def start_sync_templates_task_after_migrate(sender, **kwargs): def start_sync_templates_task_after_migrate(sender, **kwargs):

View file

@ -9,7 +9,7 @@ from zipfile import ZIP_DEFLATED, ZipFile
from django.conf import settings from django.conf import settings
from django.contrib.auth import get_user_model from django.contrib.auth import get_user_model
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser, AnonymousUser
from django.core.files.storage import Storage, default_storage from django.core.files.storage import Storage, default_storage
from django.db import OperationalError, transaction from django.db import OperationalError, transaction
from django.db.models import Count, Prefetch, Q, QuerySet from django.db.models import Count, Prefetch, Q, QuerySet
@ -293,7 +293,6 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
context: Optional[ContextObject] = None, context: Optional[ContextObject] = None,
include_trash: bool = False, include_trash: bool = False,
raise_permission_exceptions: bool = True, raise_permission_exceptions: bool = True,
allow_if_template: bool = False,
) -> bool: ) -> bool:
""" """
Checks whether a specific Actor has the Permission to execute an Operation Checks whether a specific Actor has the Permission to execute an Operation
@ -321,8 +320,6 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
:param raise_permission_exceptions: Raise an exception when the permission is :param raise_permission_exceptions: Raise an exception when the permission is
disallowed when `True`. Return `False` instead when `False`. disallowed when `True`. Return `False` instead when `False`.
`True` by default. `True` by default.
:param allow_if_template: If true and if the workspace is related to a template,
then True is always returned and no exception will be raised.
:raise PermissionException: If the operation is disallowed. :raise PermissionException: If the operation is disallowed.
:return: `True` if the operation is permitted or `False` if the operation is :return: `True` if the operation is permitted or `False` if the operation is
disallowed AND raise_permission_exceptions is `False`. disallowed AND raise_permission_exceptions is `False`.
@ -331,9 +328,6 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
if settings.DEBUG or settings.TESTS: if settings.DEBUG or settings.TESTS:
self._ensure_context_matches_operation(context, operation_name) self._ensure_context_matches_operation(context, operation_name)
if allow_if_template and workspace and workspace.has_template():
return True
check = PermissionCheck(actor, operation_name, context) check = PermissionCheck(actor, operation_name, context)
allowed = self.check_multiple_permissions( allowed = self.check_multiple_permissions(
@ -438,7 +432,6 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
operation_name: str, operation_name: str,
queryset: QuerySet, queryset: QuerySet,
workspace: Optional[Workspace] = None, workspace: Optional[Workspace] = None,
allow_if_template: Optional[bool] = False,
) -> QuerySet: ) -> QuerySet:
""" """
filters a given queryset accordingly to the actor permissions in the specified filters a given queryset accordingly to the actor permissions in the specified
@ -454,13 +447,11 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
object that are in the same `ObjectScopeType` as the one described in the object that are in the same `ObjectScopeType` as the one described in the
`OperationType` corresponding to the given `operation_name`. `OperationType` corresponding to the given `operation_name`.
:param workspace: An optional workspace into which the operation occurs. :param workspace: An optional workspace into which the operation occurs.
:param allow_if_template: If true and if the workspace is related to a template,
then we don't want to filter on the queryset.
:return: The queryset, potentially filtered. :return: The queryset, potentially filtered.
""" """
if allow_if_template and workspace and workspace.has_template(): if actor is None:
return queryset actor = AnonymousUser
for permission_manager_name in settings.PERMISSION_MANAGERS: for permission_manager_name in settings.PERMISSION_MANAGERS:
permission_manager_type = permission_manager_type_registry.get( permission_manager_type = permission_manager_type_registry.get(
@ -469,10 +460,24 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
if not permission_manager_type.actor_is_supported(actor): if not permission_manager_type.actor_is_supported(actor):
continue continue
queryset = permission_manager_type.filter_queryset( filtered_queryset = permission_manager_type.filter_queryset(
actor, operation_name, queryset, workspace=workspace actor, operation_name, queryset, workspace=workspace
) )
if filtered_queryset is None:
continue
# a permission can return a tuple in which case the second value
# indicate whether it should be the last permission manager to be applied.
# If True, then no other permission manager are applied and the queryset
# is returned.
if isinstance(filtered_queryset, tuple):
queryset, stop = filtered_queryset
if stop:
break
else:
queryset = filtered_queryset
return queryset return queryset
def get_workspace_for_update(self, workspace_id: int) -> WorkspaceForUpdate: def get_workspace_for_update(self, workspace_id: int) -> WorkspaceForUpdate:

View file

@ -231,8 +231,16 @@ class IntegrationHandler:
queryset=Integration.objects.filter(application=application) queryset=Integration.objects.filter(application=application)
) )
def export_integration(self, integration): def export_integration(
return integration.get_type().export_serialized(integration) self,
integration: Integration,
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
):
return integration.get_type().export_serialized(
integration, files_zip=files_zip, storage=storage, cache=cache
)
def import_integration( def import_integration(
self, self,
@ -248,7 +256,12 @@ class IntegrationHandler:
integration_type = integration_type_registry.get(serialized_integration["type"]) integration_type = integration_type_registry.get(serialized_integration["type"])
integration = integration_type.import_serialized( integration = integration_type.import_serialized(
application, serialized_integration, id_mapping, cache=cache application,
serialized_integration,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
id_mapping["integrations"][serialized_integration["id"]] = integration.id id_mapping["integrations"][serialized_integration["id"]] = integration.id

View file

@ -0,0 +1,12 @@
from django.dispatch import receiver
from baserow.core.signals import application_created
@receiver(application_created)
def execute_integration_post_template_install_hooks(
sender, application, user, **kwargs
):
if application.installed_from_template is not None:
for integration in application.integrations.all():
integration.get_type().after_template_install(user, integration.specific)

View file

@ -57,20 +57,43 @@ class IntegrationType(
return values return values
def serialize_property(self, integration: Integration, prop_name: str): def serialize_property(
self,
integration: Integration,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
if prop_name == "order": if prop_name == "order":
return str(integration.order) return str(integration.order)
return super().serialize_property(integration, prop_name) return super().serialize_property(
integration, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def import_serialized( def import_serialized(
self, self,
parent: Any, parent: Any,
serialized_values: Dict[str, Any], serialized_values: Dict[str, Any],
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None, cache=None,
) -> IntegrationSubClass: ) -> IntegrationSubClass:
return super().import_serialized(parent, serialized_values, id_mapping) return super().import_serialized(
parent,
serialized_values,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def after_template_install(self, user: AbstractUser, instance: Integration):
"""
Hook to trigger some post template installation logic.
"""
def get_context_data(self, instance: Integration) -> Optional[Dict]: def get_context_data(self, instance: Integration) -> Optional[Dict]:
""" """

View file

@ -6,9 +6,21 @@ from baserow.core.handler import CoreHandler
class Command(BaseCommand): class Command(BaseCommand):
help = ( help = (
"Synchronizes all the templates stored in the database with the JSON files in " "Synchronizes all the templates stored in the database with the JSON files in "
"the templates directory. This command must be ran everytime a template " "the templates directory. This command must be ran every time a template "
"changes." "changes."
) )
def add_arguments(self, parser):
parser.add_argument(
"search",
type=str,
help="The search pattern to load only some templates.",
)
def handle(self, *args, **options): def handle(self, *args, **options):
CoreHandler().sync_templates() search_glob = options["search"]
if search_glob:
CoreHandler().sync_templates(template_search_glob=search_glob)
else:
CoreHandler().sync_templates()

View file

@ -1,5 +1,6 @@
import secrets import secrets
from datetime import datetime, timezone from datetime import datetime, timezone
from functools import lru_cache
from django.conf import settings from django.conf import settings
from django.contrib.auth import get_user_model from django.contrib.auth import get_user_model
@ -27,6 +28,7 @@ from .mixins import (
ParentWorkspaceTrashableModelMixin, ParentWorkspaceTrashableModelMixin,
PolymorphicContentTypeMixin, PolymorphicContentTypeMixin,
TrashableModelMixin, TrashableModelMixin,
WithRegistry,
) )
from .notifications.models import Notification from .notifications.models import Notification
from .services.models import Service from .services.models import Service
@ -268,6 +270,7 @@ class Workspace(HierarchicalModelMixin, TrashableModelMixin, CreatedAndUpdatedOn
return self.application_set(manager="objects_and_trash") return self.application_set(manager="objects_and_trash")
@lru_cache
def has_template(self): def has_template(self):
return self.template_set.all().exists() return self.template_set.all().exists()
@ -387,6 +390,7 @@ class Application(
OrderableMixin, OrderableMixin,
PolymorphicContentTypeMixin, PolymorphicContentTypeMixin,
GroupToWorkspaceCompatModelMixin, GroupToWorkspaceCompatModelMixin,
WithRegistry,
models.Model, models.Model,
): ):
workspace = models.ForeignKey(Workspace, on_delete=models.CASCADE, null=True) workspace = models.ForeignKey(Workspace, on_delete=models.CASCADE, null=True)
@ -408,6 +412,12 @@ class Application(
class Meta: class Meta:
ordering = ("order",) ordering = ("order",)
@staticmethod
def get_type_registry():
from .registries import application_type_registry
return application_type_registry
@classmethod @classmethod
def get_last_order(cls, workspace): def get_last_order(cls, workspace):
queryset = Application.objects.filter(workspace=workspace) queryset = Application.objects.filter(workspace=workspace)

View file

@ -3,12 +3,19 @@ from typing import List
from django.contrib.auth import get_user_model from django.contrib.auth import get_user_model
from baserow.core.handler import CoreHandler from baserow.core.handler import CoreHandler
from baserow.core.models import WorkspaceUser from baserow.core.integrations.operations import (
ListIntegrationsApplicationOperationType,
)
from baserow.core.models import Workspace, WorkspaceUser
from baserow.core.notifications.operations import ( from baserow.core.notifications.operations import (
ClearNotificationsOperationType, ClearNotificationsOperationType,
ListNotificationsOperationType, ListNotificationsOperationType,
MarkNotificationAsReadOperationType, MarkNotificationAsReadOperationType,
) )
from baserow.core.user_sources.operations import (
ListUserSourcesApplicationOperationType,
LoginUserSourceOperationType,
)
from .exceptions import ( from .exceptions import (
IsNotAdminError, IsNotAdminError,
@ -22,6 +29,7 @@ from .operations import (
DeleteWorkspaceInvitationOperationType, DeleteWorkspaceInvitationOperationType,
DeleteWorkspaceOperationType, DeleteWorkspaceOperationType,
DeleteWorkspaceUserOperationType, DeleteWorkspaceUserOperationType,
ListApplicationsWorkspaceOperationType,
ListInvitationsWorkspaceOperationType, ListInvitationsWorkspaceOperationType,
ListWorkspacesOperationType, ListWorkspacesOperationType,
ListWorkspaceUsersWorkspaceOperationType, ListWorkspaceUsersWorkspaceOperationType,
@ -32,7 +40,7 @@ from .operations import (
UpdateWorkspaceUserOperationType, UpdateWorkspaceUserOperationType,
) )
from .registries import PermissionManagerType from .registries import PermissionManagerType
from .subjects import UserSubjectType from .subjects import AnonymousUserSubjectType, UserSubjectType
User = get_user_model() User = get_user_model()
@ -88,6 +96,53 @@ class StaffOnlyPermissionManagerType(PermissionManagerType):
} }
class AllowIfTemplatePermissionManagerType(PermissionManagerType):
"""
Allows read operation on templates.
"""
type = "allow_if_template"
supported_actor_types = [UserSubjectType.type, AnonymousUserSubjectType.type]
OPERATION_ALLOWED_ON_TEMPLATES = [
ListApplicationsWorkspaceOperationType.type,
ListIntegrationsApplicationOperationType.type,
ListUserSourcesApplicationOperationType.type,
LoginUserSourceOperationType.type,
]
def check_multiple_permissions(self, checks, workspace=None, include_trash=False):
result = {}
has_template = workspace and workspace.has_template()
for check in checks:
if (
has_template
and check.operation_name in self.OPERATION_ALLOWED_ON_TEMPLATES
):
result[check] = True
return result
def get_permissions_object(self, actor, workspace=None):
return {
"allowed_operations_on_templates": self.OPERATION_ALLOWED_ON_TEMPLATES,
"workspace_template_ids": list(
Workspace.objects.exclude(template=None).values_list("id", flat=True)
),
}
def filter_queryset(
self,
actor,
operation_name,
queryset,
workspace=None,
):
has_template = workspace and workspace.has_template()
if has_template and operation_name in self.OPERATION_ALLOWED_ON_TEMPLATES:
return queryset, True
class WorkspaceMemberOnlyPermissionManagerType(PermissionManagerType): class WorkspaceMemberOnlyPermissionManagerType(PermissionManagerType):
""" """
To be able to operate on a workspace, the user must at least belongs To be able to operate on a workspace, the user must at least belongs
@ -96,12 +151,42 @@ class WorkspaceMemberOnlyPermissionManagerType(PermissionManagerType):
type = "member" type = "member"
supported_actor_types = [UserSubjectType.type] supported_actor_types = [UserSubjectType.type]
ALWAYS_ALLOWED_OPERATIONS: List[str] = [ actor_cache_key = "_in_workspace_cache"
ALWAYS_ALLOWED_OPERATION_FOR_WORKSPACE_MEMBERS: List[str] = [
ClearNotificationsOperationType.type, ClearNotificationsOperationType.type,
ListNotificationsOperationType.type, ListNotificationsOperationType.type,
MarkNotificationAsReadOperationType.type, MarkNotificationAsReadOperationType.type,
] ]
def is_actor_in_workspace(self, actor, workspace, callback=None):
"""
Check is an actor is in a workspace. This method cache the result on the actor
to prevent extra queries when used multiple times in a row. This is the case
when we check the permission first then we filter the queryset for instance.
:param actor: the actor to check.
:param workspace: the workspace to check the actor belongs to.
:param callback: an optional callback to check whether the actor belongs to the
workspace. By default a query is made if not provided.
"""
# Add cache to prevent another query during the filtering if any
if not hasattr(actor, self.actor_cache_key):
setattr(actor, self.actor_cache_key, {})
if workspace.id not in getattr(actor, self.actor_cache_key):
if callback is not None:
in_workspace = callback()
else:
in_workspace = WorkspaceUser.objects.filter(
user_id=actor.id, workspace_id=workspace.id
).exists()
getattr(actor, self.actor_cache_key)[workspace.id] = in_workspace
return getattr(actor, self.actor_cache_key, {}).get(workspace.id, False)
def check_multiple_permissions(self, checks, workspace=None, include_trash=False): def check_multiple_permissions(self, checks, workspace=None, include_trash=False):
if workspace is None: if workspace is None:
return {} return {}
@ -115,25 +200,42 @@ class WorkspaceMemberOnlyPermissionManagerType(PermissionManagerType):
) )
permission_by_check = {} permission_by_check = {}
def check_workspace(actor):
return lambda: actor.id in user_ids_in_workspace
for check in checks: for check in checks:
if check.actor.id not in user_ids_in_workspace: if self.is_actor_in_workspace(
check.actor, workspace, check_workspace(check.actor)
):
if (
check.operation_name
in self.ALWAYS_ALLOWED_OPERATION_FOR_WORKSPACE_MEMBERS
):
permission_by_check[check] = True
else:
permission_by_check[check] = UserNotInWorkspace(check.actor, workspace) permission_by_check[check] = UserNotInWorkspace(check.actor, workspace)
elif check.operation_name in self.ALWAYS_ALLOWED_OPERATIONS:
permission_by_check[check] = True
return permission_by_check return permission_by_check
def get_permissions_object(self, actor, workspace=None): def get_permissions_object(self, actor, workspace=None):
# Check if the user is a member of this workspace """Check if the user is a member of this workspace"""
if (
workspace if workspace and self.is_actor_in_workspace(actor, workspace):
and WorkspaceUser.objects.filter(
user_id=actor.id, workspace_id=workspace.id
).exists()
):
return None return None
return False return False
def filter_queryset(
self,
actor,
operation_name,
queryset,
workspace=None,
):
if workspace and not self.is_actor_in_workspace(actor, workspace):
return queryset.none(), True
class BasicPermissionManagerType(PermissionManagerType): class BasicPermissionManagerType(PermissionManagerType):
""" """

View file

@ -654,8 +654,6 @@ class PermissionManagerType(abc.ABC, Instance):
:return: The queryset potentially filtered. :return: The queryset potentially filtered.
""" """
return queryset
def get_roles(self) -> List: def get_roles(self) -> List:
""" """
Get all the roles available for your permissions system Get all the roles available for your permissions system

View file

@ -16,8 +16,10 @@ from typing import (
Union, Union,
ValuesView, ValuesView,
) )
from zipfile import ZipFile
from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import Storage
from django.db import models from django.db import models
from rest_framework import serializers from rest_framework import serializers
@ -372,7 +374,7 @@ class EasyImportExportMixin(Generic[T], ABC):
""" """
# Describe the properties to serialize # Describe the properties to serialize
SerializedDict: TypedDict SerializedDict: Type[TypedDict]
# The parent property name for the model # The parent property name for the model
parent_property_name: str parent_property_name: str
@ -383,7 +385,14 @@ class EasyImportExportMixin(Generic[T], ABC):
# The model class to create # The model class to create
model_class: Type[T] model_class: Type[T]
def serialize_property(self, instance: T, prop_name: str) -> Any: def serialize_property(
self,
instance: T,
prop_name: str,
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
) -> Any:
""" """
You can customize the behavior of the serialization of a property with this You can customize the behavior of the serialization of a property with this
hook. hook.
@ -401,6 +410,9 @@ class EasyImportExportMixin(Generic[T], ABC):
def export_serialized( def export_serialized(
self, self,
instance: T, instance: T,
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
) -> Dict[str, Any]: ) -> Dict[str, Any]:
""" """
Exports the instance to a serialized dict that can be imported by the Exports the instance to a serialized dict that can be imported by the
@ -413,7 +425,16 @@ class EasyImportExportMixin(Generic[T], ABC):
property_names = self.SerializedDict.__annotations__.keys() property_names = self.SerializedDict.__annotations__.keys()
serialized = self.SerializedDict( serialized = self.SerializedDict(
**{key: self.serialize_property(instance, key) for key in property_names} **{
key: self.serialize_property(
instance,
key,
files_zip=files_zip,
storage=storage,
cache=cache,
)
for key in property_names
}
) )
return serialized return serialized
@ -423,6 +444,9 @@ class EasyImportExportMixin(Generic[T], ABC):
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
**kwargs, **kwargs,
) -> Any: ) -> Any:
""" """
@ -437,7 +461,14 @@ class EasyImportExportMixin(Generic[T], ABC):
return value return value
def create_instance_from_serialized(self, serialized_values: Dict[str, Any]) -> T: def create_instance_from_serialized(
self,
serialized_values: Dict[str, Any],
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
**kwargs,
) -> T:
""" """
Create the instance related to the given serialized values. Create the instance related to the given serialized values.
Allow to hook into instance creation while still having the serialized values. Allow to hook into instance creation while still having the serialized values.
@ -456,6 +487,9 @@ class EasyImportExportMixin(Generic[T], ABC):
parent: Any, parent: Any,
serialized_values: Dict[str, Any], serialized_values: Dict[str, Any],
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict[str, any]] = None,
**kwargs, **kwargs,
) -> T: ) -> T:
""" """
@ -468,7 +502,6 @@ class EasyImportExportMixin(Generic[T], ABC):
:param serialized_values: The dict containing the serialized values. :param serialized_values: The dict containing the serialized values.
:param id_mapping: Used to mapped object ids from export to newly created :param id_mapping: Used to mapped object ids from export to newly created
instances. instances.
:param kwargs: extra parameters used to deserialize a property.
:return: The created instance. :return: The created instance.
""" """
@ -479,7 +512,13 @@ class EasyImportExportMixin(Generic[T], ABC):
for name in self.SerializedDict.__annotations__.keys(): for name in self.SerializedDict.__annotations__.keys():
if name in serialized_values and name != f"{self.parent_property_name}_id": if name in serialized_values and name != f"{self.parent_property_name}_id":
deserialized_properties[name] = self.deserialize_property( deserialized_properties[name] = self.deserialize_property(
name, serialized_values[name], id_mapping, **kwargs name,
serialized_values[name],
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
) )
# Remove id key # Remove id key
@ -491,7 +530,13 @@ class EasyImportExportMixin(Generic[T], ABC):
# Add the parent # Add the parent
deserialized_properties[self.parent_property_name] = parent deserialized_properties[self.parent_property_name] = parent
created_instance = self.create_instance_from_serialized(deserialized_properties) created_instance = self.create_instance_from_serialized(
deserialized_properties,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
# Add the created instance to the mapping # Add the created instance to the mapping
id_mapping[self.id_mapping_name][originale_instance_id] = created_instance.id id_mapping[self.id_mapping_name][originale_instance_id] = created_instance.id

View file

@ -214,8 +214,19 @@ class ServiceHandler:
return service.get_type().dispatch(service, dispatch_context) return service.get_type().dispatch(service, dispatch_context)
def export_service(self, service): def export_service(
return service.get_type().export_serialized(service) self,
service,
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
):
return service.get_type().export_serialized(
service,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def import_service( def import_service(
self, self,
@ -225,9 +236,16 @@ class ServiceHandler:
import_formula: Optional[Callable[[str, Dict[str, Any]], str]] = None, import_formula: Optional[Callable[[str, Dict[str, Any]], str]] = None,
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
): ):
service_type = service_type_registry.get(serialized_service["type"]) service_type = service_type_registry.get(serialized_service["type"])
return service_type.import_serialized( return service_type.import_serialized(
integration, serialized_service, id_mapping, import_formula=import_formula integration,
serialized_service,
id_mapping,
cache=cache,
files_zip=files_zip,
storage=storage,
import_formula=import_formula,
) )

View file

@ -1,8 +1,10 @@
from abc import ABC from abc import ABC
from enum import Enum from enum import Enum
from typing import Any, Dict, Optional, Tuple, Type, TypeVar from typing import Any, Callable, Dict, Optional, Tuple, Type, TypeVar
from zipfile import ZipFile
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser
from django.core.files.storage import Storage
from rest_framework.exceptions import ValidationError as DRFValidationError from rest_framework.exceptions import ValidationError as DRFValidationError
@ -45,7 +47,7 @@ class ServiceType(
SerializedDict: Type[ServiceDictSubClass] SerializedDict: Type[ServiceDictSubClass]
parent_property_name = "integration" parent_property_name = "integration"
id_mapping_name = "builder_services" id_mapping_name = "services"
# The maximum number of records this service is able to return. # The maximum number of records this service is able to return.
# By default, the maximum is `None`, which is unlimited. # By default, the maximum is `None`, which is unlimited.
@ -229,6 +231,10 @@ class ServiceType(
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
import_formula: Callable[[str, Dict[str, Any]], str] = None,
**kwargs, **kwargs,
) -> Any: ) -> Any:
""" """
@ -241,10 +247,18 @@ class ServiceType(
:return: the deserialized version for this property. :return: the deserialized version for this property.
""" """
if "import_formula" not in kwargs: if import_formula is None:
raise ValueError("Missing import formula function.") raise ValueError("Missing import formula function.")
return value return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
ServiceTypeSubClass = TypeVar("ServiceTypeSubClass", bound=ServiceType) ServiceTypeSubClass = TypeVar("ServiceTypeSubClass", bound=ServiceType)

View file

@ -24,10 +24,16 @@ SerializationProcessorScope = Union["Database", "Table", "Builder"]
class PermissionCheck(NamedTuple): class PermissionCheck(NamedTuple):
actor: Actor original_actor: Actor
operation_name: str operation_name: str
context: Optional[ContextObject] = None context: Optional[ContextObject] = None
@property
def actor(self) -> Actor:
from django.contrib.auth.models import AnonymousUser
return self.original_actor or AnonymousUser
class PermissionObjectResult(TypedDict): class PermissionObjectResult(TypedDict):
name: str name: str

View file

@ -267,24 +267,40 @@ class UserSourceHandler:
queryset=UserSource.objects.filter(application=application) queryset=UserSource.objects.filter(application=application)
) )
def export_user_source(self, user_source): def export_user_source(
return user_source.get_type().export_serialized(user_source) self,
user_source,
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
):
return user_source.get_type().export_serialized(
user_source,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def import_user_source( def import_user_source(
self, self,
application, application,
serialized_user_source, serialized_user_source,
id_mapping, id_mapping,
cache: Optional[Dict] = None,
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
): ):
if "user_sources" not in id_mapping: if "user_sources" not in id_mapping:
id_mapping["user_sources"] = {} id_mapping["user_sources"] = {}
user_source_type = user_source_type_registry.get(serialized_user_source["type"]) user_source_type = user_source_type_registry.get(serialized_user_source["type"])
user_source = user_source_type.import_serialized( user_source = user_source_type.import_serialized(
application, serialized_user_source, id_mapping, cache=cache application,
serialized_user_source,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
id_mapping["user_sources"][serialized_user_source["id"]] = user_source.id id_mapping["user_sources"][serialized_user_source["id"]] = user_source.id

View file

@ -97,25 +97,39 @@ class UserSourceType(
user_source.auth_providers.all().delete() user_source.auth_providers.all().delete()
self.after_create(user, user_source, values) self.after_create(user, user_source, values)
def serialize_property(self, instance: UserSource, prop_name: str): def serialize_property(
self,
instance: UserSource,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
if prop_name == "order": if prop_name == "order":
return str(instance.order) return str(instance.order)
if prop_name == "auth_providers": if prop_name == "auth_providers":
return [ return [
ap.get_type().export_serialized(ap) ap.get_type().export_serialized(
ap, files_zip=files_zip, storage=storage, cache=cache
)
for ap in AppAuthProviderHandler.list_app_auth_providers_for_user_source( for ap in AppAuthProviderHandler.list_app_auth_providers_for_user_source(
instance instance
) )
] ]
return super().serialize_property(instance, prop_name) return super().serialize_property(
instance, prop_name, files_zip=files_zip, storage=storage, cache=cache
)
def deserialize_property( def deserialize_property(
self, self,
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
) -> Any: ) -> Any:
if prop_name == "integration_id" and value: if prop_name == "integration_id" and value:
@ -129,13 +143,24 @@ class UserSourceType(
else: else:
return value return value
return super().deserialize_property(prop_name, value, id_mapping, **kwargs) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def import_serialized( def import_serialized(
self, self,
parent: Any, parent: Any,
serialized_values: Dict[str, Any], serialized_values: Dict[str, Any],
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
) -> UserSourceSubClass: ) -> UserSourceSubClass:
""" """
@ -153,7 +178,12 @@ class UserSourceType(
auth_provider["type"] auth_provider["type"]
) )
auth_provider_type.import_serialized( auth_provider_type.import_serialized(
created_user_source, auth_provider, id_mapping created_user_source,
auth_provider,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
) )
return created_user_source return created_user_source

View file

@ -1,5 +1,5 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Iterable, Optional, Type, cast from typing import Dict, Iterable, Optional, Type, cast
from zipfile import ZipFile from zipfile import ZipFile
from django.core.files.storage import Storage from django.core.files.storage import Storage
@ -134,6 +134,7 @@ class WorkflowActionHandler(ABC):
workflow_action, workflow_action,
files_zip: Optional[ZipFile] = None, files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None, storage: Optional[Storage] = None,
cache: Optional[Dict] = None,
): ):
""" """
Serializes the given workflow action. Serializes the given workflow action.
@ -144,4 +145,6 @@ class WorkflowActionHandler(ABC):
:return: The serialized version. :return: The serialized version.
""" """
return workflow_action.get_type().export_serialized(workflow_action) return workflow_action.get_type().export_serialized(
workflow_action, files_zip=files_zip, storage=storage, cache=cache
)

View file

@ -11,7 +11,14 @@ from baserow.core.workflow_actions.types import WorkflowActionDictSubClass
class WorkflowActionType(Instance, ModelInstanceMixin, EasyImportExportMixin, ABC): class WorkflowActionType(Instance, ModelInstanceMixin, EasyImportExportMixin, ABC):
SerializedDict: Type[WorkflowActionDictSubClass] SerializedDict: Type[WorkflowActionDictSubClass]
def serialize_property(self, workflow_action: WorkflowAction, prop_name: str): def serialize_property(
self,
workflow_action: WorkflowAction,
prop_name: str,
files_zip=None,
storage=None,
cache=None,
):
""" """
You can customize the behavior of the serialization of a property with this You can customize the behavior of the serialization of a property with this
hook. hook.
@ -20,7 +27,13 @@ class WorkflowActionType(Instance, ModelInstanceMixin, EasyImportExportMixin, AB
if prop_name == "type": if prop_name == "type":
return self.type return self.type
return getattr(workflow_action, prop_name) return super().serialize_property(
workflow_action,
prop_name,
files_zip=files_zip,
storage=storage,
cache=cache,
)
def prepare_values( def prepare_values(
self, self,

View file

@ -1,4 +1,5 @@
from baserow.contrib.integrations.local_baserow.models import LocalBaserowIntegration from baserow.contrib.integrations.local_baserow.models import LocalBaserowIntegration
from baserow.core.integrations.registries import integration_type_registry
class IntegrationFixtures: class IntegrationFixtures:
@ -12,6 +13,10 @@ class IntegrationFixtures:
integration = self.create_integration(LocalBaserowIntegration, **kwargs) integration = self.create_integration(LocalBaserowIntegration, **kwargs)
return integration return integration
def create_integration_with_first_type(self, **kwargs):
first_type = list(integration_type_registry.get_all())[0]
return self.create_integration(first_type.model_class, **kwargs)
def create_integration(self, model_class, user=None, application=None, **kwargs): def create_integration(self, model_class, user=None, application=None, **kwargs):
if not application: if not application:
if user is None: if user is None:

File diff suppressed because it is too large Load diff

View file

@ -98,7 +98,6 @@ def test_list_applications(api_client, data_fixture, django_assert_num_queries):
assert args[1] == ListApplicationsWorkspaceOperationType.type assert args[1] == ListApplicationsWorkspaceOperationType.type
assert isinstance(args[2], QuerySet) assert isinstance(args[2], QuerySet)
assert kwargs["workspace"] == workspace_1 assert kwargs["workspace"] == workspace_1
assert kwargs["allow_if_template"] is True
assert response_json[0]["id"] == application_1.id assert response_json[0]["id"] == application_1.id
assert response_json[0]["type"] == "database" assert response_json[0]["type"] == "database"
@ -149,6 +148,7 @@ def test_list_applications(api_client, data_fixture, django_assert_num_queries):
assert response.status_code == HTTP_401_UNAUTHORIZED assert response.status_code == HTTP_401_UNAUTHORIZED
data_fixture.create_template(workspace=workspace_1) data_fixture.create_template(workspace=workspace_1)
workspace_1.has_template.cache_clear()
url = reverse("api:applications:list", kwargs={"workspace_id": workspace_1.id}) url = reverse("api:applications:list", kwargs={"workspace_id": workspace_1.id})
response = api_client.get(url) response = api_client.get(url)
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK
@ -189,11 +189,37 @@ def test_list_applications(api_client, data_fixture, django_assert_num_queries):
HTTP_AUTHORIZATION=f"JWT {token}", HTTP_AUTHORIZATION=f"JWT {token}",
) )
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK
assert len(query_for_n_tables.captured_queries) == len(
# the n +1 should have less or equal (because of some caching) queries
assert len(query_for_n_tables.captured_queries) >= len(
query_for_n_plus_one_tables.captured_queries query_for_n_plus_one_tables.captured_queries
) )
@pytest.mark.django_db(transaction=True)
def test_list_applications_with_permissions(api_client, data_fixture):
user, token = data_fixture.create_user_and_token(
email="test@test.nl", password="password", first_name="Test1"
)
workspace_1 = data_fixture.create_workspace(user=user)
workspace_2 = data_fixture.create_workspace(user=user)
database_1 = data_fixture.create_database_application(
workspace=workspace_1, order=1
)
database_2 = data_fixture.create_database_application(
workspace=workspace_2, order=1
)
response = api_client.get(
reverse("api:applications:list"), **{"HTTP_AUTHORIZATION": f"JWT {token}"}
)
response_json = response.json()
assert len(response_json) == 2
assert [a["id"] for a in response_json] == [database_1.id, database_2.id]
@pytest.mark.django_db @pytest.mark.django_db
def test_create_application(api_client, data_fixture): def test_create_application(api_client, data_fixture):
user, token = data_fixture.create_user_and_token() user, token = data_fixture.create_user_and_token()

View file

@ -20,13 +20,13 @@ def test_get_integrations(api_client, data_fixture):
database = data_fixture.create_database_application(workspace=workspace) database = data_fixture.create_database_application(workspace=workspace)
data_fixture.create_database_table(database=database) data_fixture.create_database_table(database=database)
integration1 = data_fixture.create_local_baserow_integration( integration1 = data_fixture.create_local_baserow_integration(
application=application application=application, authorized_user=user
) )
integration2 = data_fixture.create_local_baserow_integration( integration2 = data_fixture.create_local_baserow_integration(
application=application application=application, authorized_user=user
) )
integration3 = data_fixture.create_local_baserow_integration( integration3 = data_fixture.create_local_baserow_integration(
application=application application=application, authorized_user=user
) )
data_fixture.create_local_baserow_integration() data_fixture.create_local_baserow_integration()

View file

@ -77,7 +77,6 @@ def test_list_applications(
assert args[1] == ListApplicationsWorkspaceOperationType.type assert args[1] == ListApplicationsWorkspaceOperationType.type
assert isinstance(args[2], QuerySet) assert isinstance(args[2], QuerySet)
assert kwargs["workspace"] == workspace_1 assert kwargs["workspace"] == workspace_1
assert kwargs["allow_if_template"] is True
assert response_json[0]["id"] == application_1.id assert response_json[0]["id"] == application_1.id
assert response_json[0]["type"] == "database" assert response_json[0]["type"] == "database"

View file

@ -214,7 +214,6 @@ def test_get_data_sources(data_fixture, stub_check_permissions):
queryset, queryset,
workspace=None, workspace=None,
context=None, context=None,
allow_if_template=False,
): ):
return queryset.exclude(id=data_source1.id) return queryset.exclude(id=data_source1.id)

View file

@ -97,7 +97,6 @@ def test_get_domains_partial_permissions(data_fixture, stub_check_permissions):
queryset, queryset,
workspace=None, workspace=None,
context=None, context=None,
allow_if_template=False,
): ):
return queryset.exclude(id=domain_without_access.id) return queryset.exclude(id=domain_without_access.id)

View file

@ -195,7 +195,6 @@ def test_get_elements(data_fixture, stub_check_permissions):
queryset, queryset,
workspace=None, workspace=None,
context=None, context=None,
allow_if_template=False,
): ):
return queryset.exclude(id=element1.id) return queryset.exclude(id=element1.id)

View file

@ -1,4 +1,9 @@
from collections import defaultdict from collections import defaultdict
from io import BytesIO
from tempfile import tempdir
from zipfile import ZIP_DEFLATED, ZipFile
from django.core.files.storage import FileSystemStorage
import pytest import pytest
from rest_framework.exceptions import ValidationError from rest_framework.exceptions import ValidationError
@ -7,6 +12,7 @@ from baserow.contrib.builder.elements.element_types import (
CheckboxElementType, CheckboxElementType,
DropdownElementType, DropdownElementType,
IFrameElementType, IFrameElementType,
ImageElementType,
InputTextElementType, InputTextElementType,
) )
from baserow.contrib.builder.elements.handler import ElementHandler from baserow.contrib.builder.elements.handler import ElementHandler
@ -19,6 +25,7 @@ from baserow.contrib.builder.elements.models import (
DropdownElementOption, DropdownElementOption,
HeadingElement, HeadingElement,
IFrameElement, IFrameElement,
ImageElement,
InputTextElement, InputTextElement,
LinkElement, LinkElement,
) )
@ -28,6 +35,7 @@ from baserow.contrib.builder.elements.registries import (
) )
from baserow.contrib.builder.elements.service import ElementService from baserow.contrib.builder.elements.service import ElementService
from baserow.contrib.builder.pages.service import PageService from baserow.contrib.builder.pages.service import PageService
from baserow.core.user_files.handler import UserFileHandler
from baserow.core.utils import MirrorDict from baserow.core.utils import MirrorDict
@ -336,3 +344,47 @@ def test_iframe_element_import_export_formula(data_fixture):
expected_formula = f"get('data_source.{data_source_2.id}.field_1')" expected_formula = f"get('data_source.{data_source_2.id}.field_1')"
assert imported_element.url == expected_formula assert imported_element.url == expected_formula
assert imported_element.embed == expected_formula assert imported_element.embed == expected_formula
@pytest.mark.django_db
def test_image_element_import_export(data_fixture, fake):
user = data_fixture.create_user()
page = data_fixture.create_builder_page()
data_source_1 = data_fixture.create_builder_local_baserow_get_row_data_source()
data_source_2 = data_fixture.create_builder_local_baserow_get_row_data_source()
element_type = ImageElementType()
zip_buffer = BytesIO()
storage = FileSystemStorage(location=str(tempdir), base_url="http://localhost")
image_file = UserFileHandler().upload_user_file(
user, "test.jpg", BytesIO(fake.image()), storage=storage
)
element_to_export = data_fixture.create_builder_element(
ImageElement,
image_source_type="upload",
image_file=image_file,
image_url=f"get('data_source.{data_source_1.id}.field_1')",
)
with ZipFile(zip_buffer, "a", ZIP_DEFLATED, False) as zip_file:
serialized = element_type.export_serialized(
element_to_export, files_zip=zip_file, storage=storage
)
# After applying the ID mapping the imported formula should have updated
# the data source IDs
id_mapping = {"builder_data_sources": {data_source_1.id: data_source_2.id}}
# Let check if the file is actually imported from the zip_file
image_file.delete()
with ZipFile(zip_buffer, "r", ZIP_DEFLATED, False) as files_zip:
imported_element = element_type.import_serialized(
page, serialized, id_mapping, files_zip=files_zip, storage=storage
)
expected_formula = f"get('data_source.{data_source_2.id}.field_1')"
assert imported_element.image_url == expected_formula
assert imported_element.image_file_id != element_to_export.image_file_id

View file

@ -0,0 +1,266 @@
from django.contrib.auth.models import AnonymousUser
from django.test.utils import override_settings
import pytest
from baserow.contrib.builder.data_sources.models import DataSource
from baserow.contrib.builder.data_sources.operations import (
DispatchDataSourceOperationType,
ListDataSourcesPageOperationType,
)
from baserow.contrib.builder.elements.models import Element
from baserow.contrib.builder.elements.operations import ListElementsPageOperationType
from baserow.contrib.builder.operations import ListPagesBuilderOperationType
from baserow.contrib.builder.pages.models import Page
from baserow.contrib.builder.workflow_actions.models import BuilderWorkflowAction
from baserow.contrib.builder.workflow_actions.operations import (
ListBuilderWorkflowActionsPageOperationType,
)
from baserow.core.handler import CoreHandler
from baserow.core.types import PermissionCheck
@pytest.mark.django_db
@pytest.mark.django_db
@override_settings(
PERMISSION_MANAGERS=[
"core",
"setting_operation",
"staff",
"allow_if_template",
"member",
"token",
"role",
"basic",
]
)
def test_allow_if_template_permission_manager(data_fixture):
buser = data_fixture.create_user(username="Auth user")
workspace_0 = data_fixture.create_workspace(user=buser)
workspace_1 = data_fixture.create_workspace()
application_1 = data_fixture.create_builder_application(workspace=workspace_1)
page_1 = data_fixture.create_builder_page(builder=application_1)
element_1 = data_fixture.create_builder_text_element(page=page_1)
workflow_action_1 = data_fixture.create_local_baserow_update_row_workflow_action(
element=element_1, page=page_1
)
data_source_1 = data_fixture.create_builder_local_baserow_get_row_data_source(
builder=application_1
)
workspace_2 = data_fixture.create_workspace()
data_fixture.create_template(workspace=workspace_2)
application_2 = data_fixture.create_builder_application(workspace=workspace_2)
page_2 = data_fixture.create_builder_page(builder=application_2)
element_2 = data_fixture.create_builder_text_element(page=page_2)
workflow_action_2 = data_fixture.create_local_baserow_update_row_workflow_action(
element=element_2, page=page_2
)
data_source_2 = data_fixture.create_builder_local_baserow_get_row_data_source(
builder=application_2
)
template = [
workspace_2,
application_2,
page_2,
element_2,
workflow_action_2,
data_source_2,
]
checks = []
for user in [
buser,
AnonymousUser(),
]:
for perm_type, scope in [
(ListPagesBuilderOperationType.type, application_1),
(ListElementsPageOperationType.type, page_1),
(ListBuilderWorkflowActionsPageOperationType.type, page_1),
(DispatchDataSourceOperationType.type, data_source_1),
(ListDataSourcesPageOperationType.type, application_1),
]:
checks.append(PermissionCheck(user, perm_type, scope))
result_1 = CoreHandler().check_multiple_permissions(checks, workspace_1)
list_result_1 = [
(
c.actor.username or "Anonymous",
c.operation_name,
"template" if c.context in template else "Not a template",
result_1.get(c, None),
)
for c in checks
]
checks = []
for user in [
buser,
AnonymousUser(),
]:
for perm_type, scope in [
(ListPagesBuilderOperationType.type, application_2),
(ListElementsPageOperationType.type, page_2),
(ListBuilderWorkflowActionsPageOperationType.type, page_2),
(DispatchDataSourceOperationType.type, data_source_2),
(ListDataSourcesPageOperationType.type, application_2),
]:
checks.append(PermissionCheck(user, perm_type, scope))
result_2 = CoreHandler().check_multiple_permissions(checks, workspace_2)
list_result_2 = [
(
c.actor.username or "Anonymous",
c.operation_name,
"template" if c.context in template else "Not a template",
result_2.get(c, None),
)
for c in checks
]
list_result = list_result_1 + list_result_2
assert list_result == [
("Auth user", "builder.list_pages", "Not a template", False),
("Auth user", "builder.page.list_elements", "Not a template", False),
("Auth user", "builder.page.list_workflow_actions", "Not a template", False),
("Auth user", "builder.page.data_source.dispatch", "Not a template", False),
("Auth user", "builder.page.list_data_sources", "Not a template", False),
("Anonymous", "builder.list_pages", "Not a template", False),
("Anonymous", "builder.page.list_elements", "Not a template", False),
("Anonymous", "builder.page.list_workflow_actions", "Not a template", False),
("Anonymous", "builder.page.data_source.dispatch", "Not a template", False),
("Anonymous", "builder.page.list_data_sources", "Not a template", False),
("Auth user", "builder.list_pages", "template", True),
("Auth user", "builder.page.list_elements", "template", True),
("Auth user", "builder.page.list_workflow_actions", "template", True),
("Auth user", "builder.page.data_source.dispatch", "template", True),
("Auth user", "builder.page.list_data_sources", "template", True),
("Anonymous", "builder.list_pages", "template", True),
("Anonymous", "builder.page.list_elements", "template", True),
("Anonymous", "builder.page.list_workflow_actions", "template", True),
("Anonymous", "builder.page.data_source.dispatch", "template", True),
("Anonymous", "builder.page.list_data_sources", "template", True),
]
@pytest.mark.django_db
@pytest.mark.django_db
@override_settings(
PERMISSION_MANAGERS=[
"core",
"setting_operation",
"staff",
"allow_if_template",
"member",
"token",
"role",
"basic",
]
)
def test_allow_if_template_permission_manager_filter_queryset(data_fixture):
user = data_fixture.create_user(username="Auth user")
workspace_0 = data_fixture.create_workspace(user=user)
workspace_1 = data_fixture.create_workspace()
application_1 = data_fixture.create_builder_application(workspace=workspace_1)
page_1 = data_fixture.create_builder_page(builder=application_1)
element_1 = data_fixture.create_builder_text_element(page=page_1)
workflow_action_1 = data_fixture.create_local_baserow_update_row_workflow_action(
element=element_1, page=page_1
)
data_source_1 = data_fixture.create_builder_local_baserow_get_row_data_source(
page=page_1
)
workspace_2 = data_fixture.create_workspace()
data_fixture.create_template(workspace=workspace_2)
application_2 = data_fixture.create_builder_application(workspace=workspace_2)
page_2 = data_fixture.create_builder_page(builder=application_2)
element_2 = data_fixture.create_builder_text_element(page=page_2)
workflow_action_2 = data_fixture.create_local_baserow_update_row_workflow_action(
element=element_2, page=page_2
)
data_source_2 = data_fixture.create_builder_local_baserow_get_row_data_source(
page=page_2
)
tests_w1 = [
(
ListPagesBuilderOperationType.type,
Page.objects.filter(builder__workspace=workspace_1),
),
(
ListElementsPageOperationType.type,
Element.objects.filter(page__builder__workspace=workspace_1),
),
(
ListBuilderWorkflowActionsPageOperationType.type,
BuilderWorkflowAction.objects.filter(page__builder__workspace=workspace_1),
),
(
ListDataSourcesPageOperationType.type,
DataSource.objects.filter(page__builder__workspace=workspace_1),
),
]
for operation_name, queryset in tests_w1:
assert (
sorted(
[
a.id
for a in CoreHandler().filter_queryset(
user,
operation_name,
queryset,
workspace=workspace_1,
)
]
)
== []
)
tests_w1 = [
(
ListPagesBuilderOperationType.type,
Page.objects.filter(builder__workspace=workspace_2),
[page_2.id],
),
(
ListElementsPageOperationType.type,
Element.objects.filter(page__builder__workspace=workspace_2),
[element_2.id],
),
(
ListBuilderWorkflowActionsPageOperationType.type,
BuilderWorkflowAction.objects.filter(page__builder__workspace=workspace_2),
[workflow_action_2.id],
),
(
ListDataSourcesPageOperationType.type,
DataSource.objects.filter(page__builder__workspace=workspace_2),
[data_source_2.id],
),
]
for operation_name, queryset, expected in tests_w1:
assert (
sorted(
[
a.id
for a in CoreHandler().filter_queryset(
user,
operation_name,
queryset,
workspace=workspace_2,
)
]
)
== expected
), operation_name

View file

@ -224,12 +224,16 @@ def test_refresh_data_source_returns_value_from_id_mapping(mock_deserialize):
# id_mapping is valid but value is empty # id_mapping is valid but value is empty
( (
"", "",
{"builder_data_sources": {"foo": "bar"}}, {
"builder_data_sources": {"foo": "bar"},
},
), ),
# value is valid but id_mapping doesn't have matching value # value is valid but id_mapping doesn't have matching value
( (
"foo", "foo",
{"builder_data_sources": {"baz": "bar"}}, {
"builder_data_sources": {"baz": "bar"},
},
), ),
], ],
) )
@ -246,4 +250,6 @@ def test_refresh_data_source_returns_value_from_super_method(
result = action.deserialize_property(*args) result = action.deserialize_property(*args)
assert result is mock_result assert result is mock_result
mock_deserialize.assert_called_once_with(*args) mock_deserialize.assert_called_once_with(
*args, files_zip=None, cache=None, storage=None
)

View file

@ -85,6 +85,7 @@ def test_list_fields(api_client, data_fixture):
assert response.status_code == HTTP_401_UNAUTHORIZED assert response.status_code == HTTP_401_UNAUTHORIZED
data_fixture.create_template(workspace=table_1.database.workspace) data_fixture.create_template(workspace=table_1.database.workspace)
table_1.database.workspace.has_template.cache_clear()
url = reverse("api:database:fields:list", kwargs={"table_id": table_1.id}) url = reverse("api:database:fields:list", kwargs={"table_id": table_1.id})
response = api_client.get(url) response = api_client.get(url)
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK

View file

@ -1746,7 +1746,7 @@ def test_patch_form_view_field_options_conditions_create_num_queries(
@pytest.mark.django_db @pytest.mark.django_db
def test_patch_form_view_field_options_conditions_update_num_queries( def test_patch_form_view_field_options_conditions_update_num_queries(
api_client, data_fixture, django_assert_num_queries api_client, data_fixture, django_assert_num_queries, bypass_check_permissions
): ):
user, token = data_fixture.create_user_and_token() user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user) table = data_fixture.create_database_table(user=user)
@ -1905,7 +1905,7 @@ def test_patch_form_view_field_options_conditions_update_num_queries(
@pytest.mark.django_db @pytest.mark.django_db
def test_patch_form_view_field_options_conditions_delete_num_queries( def test_patch_form_view_field_options_conditions_delete_num_queries(
api_client, data_fixture, django_assert_max_num_queries api_client, data_fixture, django_assert_max_num_queries, bypass_check_permissions
): ):
user, token = data_fixture.create_user_and_token() user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user) table = data_fixture.create_database_table(user=user)
@ -1984,7 +1984,7 @@ def test_patch_form_view_field_options_conditions_delete_num_queries(
@pytest.mark.django_db @pytest.mark.django_db
def test_patch_form_view_field_options_condition_groups_delete_num_queries( def test_patch_form_view_field_options_condition_groups_delete_num_queries(
api_client, data_fixture, django_assert_max_num_queries api_client, data_fixture, django_assert_max_num_queries, bypass_check_permissions
): ):
user, token = data_fixture.create_user_and_token() user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user) table = data_fixture.create_database_table(user=user)
@ -2687,7 +2687,7 @@ def test_user_can_update_form_to_receive_notification(api_client, data_fixture):
@pytest.mark.django_db() @pytest.mark.django_db()
def test_loading_form_views_does_not_increase_the_number_of_queries( def test_loading_form_views_does_not_increase_the_number_of_queries(
api_client, data_fixture api_client, data_fixture, bypass_check_permissions
): ):
user, token = data_fixture.create_user_and_token() user, token = data_fixture.create_user_and_token()

View file

@ -115,6 +115,7 @@ def test_list_rows(api_client, data_fixture):
assert response.status_code == HTTP_401_UNAUTHORIZED assert response.status_code == HTTP_401_UNAUTHORIZED
data_fixture.create_template(workspace=gallery.table.database.workspace) data_fixture.create_template(workspace=gallery.table.database.workspace)
gallery.table.database.workspace.has_template.cache_clear()
url = reverse("api:database:views:gallery:list", kwargs={"view_id": gallery.id}) url = reverse("api:database:views:gallery:list", kwargs={"view_id": gallery.id})
response = api_client.get(url) response = api_client.get(url)
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK

View file

@ -203,6 +203,7 @@ def test_list_rows(api_client, data_fixture):
assert response.status_code == HTTP_401_UNAUTHORIZED assert response.status_code == HTTP_401_UNAUTHORIZED
data_fixture.create_template(workspace=grid.table.database.workspace) data_fixture.create_template(workspace=grid.table.database.workspace)
grid.table.database.workspace.has_template.cache_clear()
url = reverse("api:database:views:grid:list", kwargs={"view_id": grid.id}) url = reverse("api:database:views:grid:list", kwargs={"view_id": grid.id})
response = api_client.get(url) response = api_client.get(url)
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK

View file

@ -90,6 +90,7 @@ def test_list_views(api_client, data_fixture):
assert response.status_code == HTTP_401_UNAUTHORIZED assert response.status_code == HTTP_401_UNAUTHORIZED
data_fixture.create_template(workspace=table_1.database.workspace) data_fixture.create_template(workspace=table_1.database.workspace)
table_1.database.workspace.has_template.cache_clear()
url = reverse("api:database:views:list", kwargs={"table_id": table_1.id}) url = reverse("api:database:views:list", kwargs={"table_id": table_1.id})
response = api_client.get(url) response = api_client.get(url)
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK
@ -511,6 +512,7 @@ def test_get_view_field_options_as_template(api_client, data_fixture):
assert response.status_code == HTTP_401_UNAUTHORIZED assert response.status_code == HTTP_401_UNAUTHORIZED
data_fixture.create_template(workspace=grid.table.database.workspace) data_fixture.create_template(workspace=grid.table.database.workspace)
grid.table.database.workspace.has_template.cache_clear()
url = reverse("api:database:views:field_options", kwargs={"view_id": grid.id}) url = reverse("api:database:views:field_options", kwargs={"view_id": grid.id})
response = api_client.get(url) response = api_client.get(url)
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK

View file

@ -23,12 +23,30 @@ def test_escape_query():
def test_get_default_search_mode_for_table_with_tsvectors_supported(): def test_get_default_search_mode_for_table_with_tsvectors_supported():
mock_table = Mock(tsvectors_are_supported=True) mock_table = Mock(tsvectors_are_supported=True)
mock_table.database = Mock()
mock_table.database.workspace = Mock()
mock_table.database.workspace.has_template = lambda: False
assert ( assert (
SearchHandler.get_default_search_mode_for_table(mock_table) SearchHandler.get_default_search_mode_for_table(mock_table)
== SearchModes.MODE_FT_WITH_COUNT == SearchModes.MODE_FT_WITH_COUNT
) )
def test_get_default_search_mode_for_table_with_tsvectors_for_templates():
mock_table = Mock(tsvectors_are_supported=True)
mock_table.database = Mock()
mock_table.database.workspace = Mock()
mock_table.database.workspace.has_template = lambda: True
assert (
SearchHandler.get_default_search_mode_for_table(mock_table)
== SearchModes.MODE_COMPAT
)
def test_get_default_search_mode_for_table_with_tsvectors_unsupported(): def test_get_default_search_mode_for_table_with_tsvectors_unsupported():
mock_table = Mock(tsvectors_are_supported=False) mock_table = Mock(tsvectors_are_supported=False)
assert ( assert (

View file

@ -0,0 +1,341 @@
from django.contrib.auth.models import AnonymousUser
from django.test.utils import override_settings
import pytest
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.fields.operations import ListFieldsOperationType
from baserow.contrib.database.operations import ListTablesDatabaseTableOperationType
from baserow.contrib.database.rows.operations import ReadDatabaseRowOperationType
from baserow.contrib.database.table.models import Table
from baserow.contrib.database.table.operations import ListRowsDatabaseTableOperationType
from baserow.contrib.database.views.handler import ViewHandler
from baserow.contrib.database.views.models import View, ViewDecoration
from baserow.contrib.database.views.operations import (
ListAggregationsViewOperationType,
ListViewDecorationOperationType,
ListViewsOperationType,
ReadViewOperationType,
)
from baserow.core.handler import CoreHandler
from baserow.core.types import PermissionCheck
@pytest.mark.django_db
@pytest.mark.django_db
@override_settings(
PERMISSION_MANAGERS=[
"core",
"setting_operation",
"staff",
"allow_if_template",
"member",
"token",
"role",
"basic",
]
)
def test_allow_if_template_permission_manager(data_fixture):
buser = data_fixture.create_user(username="Auth user")
workspace_0 = data_fixture.create_workspace(user=buser)
workspace_1 = data_fixture.create_workspace()
application_1 = data_fixture.create_database_application(workspace=workspace_1)
table_1, (field_1,), (row_1,) = data_fixture.build_table(
database=application_1,
columns=[
("Name", "number"),
],
rows=[
[1],
],
)
view_1 = data_fixture.create_grid_view(table=table_1)
decoration_1 = data_fixture.create_view_decoration(view=view_1)
ViewHandler().update_field_options(
view=view_1,
field_options={
field_1.id: {
"aggregation_type": "sum",
"aggregation_raw_type": "sum",
}
},
)
workspace_2 = data_fixture.create_workspace()
data_fixture.create_template(workspace=workspace_2)
application_2 = data_fixture.create_database_application(workspace=workspace_2)
table_2, (field_2,), (row_2,) = data_fixture.build_table(
database=application_2,
columns=[
("Name", "number"),
],
rows=[
[1],
],
)
view_2 = data_fixture.create_grid_view(table=table_2)
decoration_2 = data_fixture.create_view_decoration(view=view_2)
ViewHandler().update_field_options(
view=view_2,
field_options={
field_2.id: {
"aggregation_type": "sum",
"aggregation_raw_type": "sum",
}
},
)
template = [
workspace_2,
application_2,
table_2,
field_2,
row_2,
view_2,
]
checks = []
for user in [
buser,
AnonymousUser(),
]:
for perm_type, scope in [
(ListTablesDatabaseTableOperationType.type, application_1),
(ListFieldsOperationType.type, table_1),
(ListRowsDatabaseTableOperationType.type, table_1),
(ListViewsOperationType.type, table_1),
(ReadDatabaseRowOperationType.type, row_1),
(ReadViewOperationType.type, view_1),
(ListViewDecorationOperationType.type, view_1),
(ListAggregationsViewOperationType.type, view_1),
]:
checks.append(PermissionCheck(user, perm_type, scope))
result_1 = CoreHandler().check_multiple_permissions(checks, workspace_1)
list_result_1 = [
(
c.actor.username or "Anonymous",
c.operation_name,
"template" if c.context in template else "Not a template",
result_1.get(c, None),
)
for c in checks
]
checks = []
for user in [
buser,
AnonymousUser(),
]:
for perm_type, scope in [
(ListTablesDatabaseTableOperationType.type, application_1),
(ListFieldsOperationType.type, table_2),
(ListRowsDatabaseTableOperationType.type, table_2),
(ListViewsOperationType.type, table_2),
(ReadDatabaseRowOperationType.type, row_2),
(ReadViewOperationType.type, view_2),
(ListViewDecorationOperationType.type, view_2),
(ListAggregationsViewOperationType.type, view_2),
]:
checks.append(PermissionCheck(user, perm_type, scope))
result_2 = CoreHandler().check_multiple_permissions(checks, workspace_2)
list_result_2 = [
(
c.actor.username or "Anonymous",
c.operation_name,
"template" if c.context in template else "Not a template",
result_2.get(c, None),
)
for c in checks
]
list_result = list_result_1 + list_result_2
assert list_result == [
("Auth user", "database.list_tables", "Not a template", False),
("Auth user", "database.table.list_fields", "Not a template", False),
("Auth user", "database.table.list_rows", "Not a template", False),
("Auth user", "database.table.list_views", "Not a template", False),
("Auth user", "database.table.read_row", "Not a template", False),
("Auth user", "database.table.view.read", "Not a template", False),
("Auth user", "database.table.view.list_decoration", "Not a template", False),
("Auth user", "database.table.view.list_aggregations", "Not a template", False),
("Anonymous", "database.list_tables", "Not a template", False),
("Anonymous", "database.table.list_fields", "Not a template", False),
("Anonymous", "database.table.list_rows", "Not a template", False),
("Anonymous", "database.table.list_views", "Not a template", False),
("Anonymous", "database.table.read_row", "Not a template", False),
("Anonymous", "database.table.view.read", "Not a template", False),
("Anonymous", "database.table.view.list_decoration", "Not a template", False),
("Anonymous", "database.table.view.list_aggregations", "Not a template", False),
("Auth user", "database.list_tables", "Not a template", True),
("Auth user", "database.table.list_fields", "template", True),
("Auth user", "database.table.list_rows", "template", True),
("Auth user", "database.table.list_views", "template", True),
("Auth user", "database.table.read_row", "template", True),
("Auth user", "database.table.view.read", "template", True),
("Auth user", "database.table.view.list_decoration", "template", True),
("Auth user", "database.table.view.list_aggregations", "template", True),
("Anonymous", "database.list_tables", "Not a template", True),
("Anonymous", "database.table.list_fields", "template", True),
("Anonymous", "database.table.list_rows", "template", True),
("Anonymous", "database.table.list_views", "template", True),
("Anonymous", "database.table.read_row", "template", True),
("Anonymous", "database.table.view.read", "template", True),
("Anonymous", "database.table.view.list_decoration", "template", True),
("Anonymous", "database.table.view.list_aggregations", "template", True),
]
@pytest.mark.django_db
@pytest.mark.django_db
@override_settings(
PERMISSION_MANAGERS=[
"core",
"setting_operation",
"staff",
"allow_if_template",
"member",
"token",
"role",
"basic",
]
)
def test_allow_if_template_permission_manager_filter_queryset(data_fixture):
user = data_fixture.create_user(username="Auth user")
workspace_0 = data_fixture.create_workspace(user=user)
workspace_1 = data_fixture.create_workspace()
application_1 = data_fixture.create_database_application(workspace=workspace_1)
table_1, (field_1,), (row_1,) = data_fixture.build_table(
database=application_1,
columns=[
("Name", "number"),
],
rows=[
[1],
],
)
view_1 = data_fixture.create_grid_view(table=table_1)
decoration_1 = data_fixture.create_view_decoration(view=view_1)
ViewHandler().update_field_options(
view=view_1,
field_options={
field_1.id: {
"aggregation_type": "sum",
"aggregation_raw_type": "sum",
}
},
)
workspace_2 = data_fixture.create_workspace()
data_fixture.create_template(workspace=workspace_2)
application_2 = data_fixture.create_database_application(workspace=workspace_2)
table_2, (field_2,), (row_2,) = data_fixture.build_table(
database=application_2,
columns=[
("Name", "number"),
],
rows=[
[1],
],
)
view_2 = data_fixture.create_grid_view(table=table_2)
decoration_2 = data_fixture.create_view_decoration(view=view_2)
ViewHandler().update_field_options(
view=view_2,
field_options={
field_2.id: {
"aggregation_type": "sum",
"aggregation_raw_type": "sum",
}
},
)
model_1 = table_1.get_model()
model_2 = table_2.get_model()
tests_w1 = [
(
ListTablesDatabaseTableOperationType.type,
Table.objects.filter(database__workspace=workspace_1),
),
(
ListFieldsOperationType.type,
Field.objects.filter(table__database__workspace=workspace_1),
),
(
ListRowsDatabaseTableOperationType.type,
model_1.objects.all(),
),
(
ListViewsOperationType.type,
View.objects.filter(table__database__workspace=workspace_1),
),
(
ListViewDecorationOperationType.type,
ViewDecoration.objects.filter(view__table__database__workspace=workspace_1),
),
]
for operation_name, queryset in tests_w1:
assert (
sorted(
[
a.id
for a in CoreHandler().filter_queryset(
user,
operation_name,
queryset,
workspace=workspace_1,
)
]
)
== []
)
tests_w1 = [
(
ListTablesDatabaseTableOperationType.type,
Table.objects.filter(database__workspace=workspace_2),
[table_2.id],
),
(
ListFieldsOperationType.type,
Field.objects.filter(table__database__workspace=workspace_2),
[field_2.id],
),
(ListRowsDatabaseTableOperationType.type, model_2.objects.all(), [row_2.id]),
(
ListViewsOperationType.type,
View.objects.filter(table__database__workspace=workspace_2),
[view_2.id],
),
(
ListViewDecorationOperationType.type,
ViewDecoration.objects.filter(view__table__database__workspace=workspace_2),
[decoration_2.id],
),
]
for operation_name, queryset, expected in tests_w1:
assert (
sorted(
[
a.id
for a in CoreHandler().filter_queryset(
user,
operation_name,
queryset,
workspace=workspace_2,
)
]
)
== expected
), operation_name

View file

@ -79,7 +79,7 @@ def test_get_local_baserow_databases(data_fixture):
@pytest.mark.django_db @pytest.mark.django_db
def test_get_local_baserow_databases_number_of_queries( def test_get_local_baserow_databases_number_of_queries(
data_fixture, django_assert_num_queries data_fixture, django_assert_num_queries, bypass_check_permissions
): ):
user = data_fixture.create_user() user = data_fixture.create_user()
workspace = data_fixture.create_workspace(user=user) workspace = data_fixture.create_workspace(user=user)
@ -156,7 +156,9 @@ def test_get_local_baserow_databases_performance(data_fixture, api_client, profi
@pytest.mark.django_db @pytest.mark.django_db
def test_get_integrations_serializer(api_client, data_fixture): def test_get_integrations_serializer(
api_client, data_fixture, bypass_check_permissions
):
user, token = data_fixture.create_user_and_token() user, token = data_fixture.create_user_and_token()
workspace = data_fixture.create_workspace(user=user) workspace = data_fixture.create_workspace(user=user)
application = data_fixture.create_builder_application(workspace=workspace) application = data_fixture.create_builder_application(workspace=workspace)

View file

@ -200,7 +200,6 @@ def test_get_integrations(data_fixture, stub_check_permissions):
queryset, queryset,
workspace=None, workspace=None,
context=None, context=None,
allow_if_template=False,
): ):
return queryset.exclude(id=integration1.id) return queryset.exclude(id=integration1.id)

View file

@ -6,6 +6,7 @@ from django.test.utils import override_settings
import pytest import pytest
from baserow.contrib.database.models import Database
from baserow.contrib.database.operations import ListTablesDatabaseTableOperationType from baserow.contrib.database.operations import ListTablesDatabaseTableOperationType
from baserow.core.exceptions import ( from baserow.core.exceptions import (
PermissionDenied, PermissionDenied,
@ -13,9 +14,16 @@ from baserow.core.exceptions import (
UserNotInWorkspace, UserNotInWorkspace,
) )
from baserow.core.handler import CoreHandler from baserow.core.handler import CoreHandler
from baserow.core.integrations.models import Integration
from baserow.core.integrations.operations import (
ListIntegrationsApplicationOperationType,
UpdateIntegrationOperationType,
)
from baserow.core.operations import ( from baserow.core.operations import (
CreateApplicationsWorkspaceOperationType,
ListApplicationsWorkspaceOperationType, ListApplicationsWorkspaceOperationType,
ListWorkspacesOperationType, ListWorkspacesOperationType,
UpdateApplicationOperationType,
UpdateSettingsOperationType, UpdateSettingsOperationType,
UpdateWorkspaceOperationType, UpdateWorkspaceOperationType,
) )
@ -33,6 +41,12 @@ from baserow.core.registries import (
permission_manager_type_registry, permission_manager_type_registry,
) )
from baserow.core.types import PermissionCheck from baserow.core.types import PermissionCheck
from baserow.core.user_sources.models import UserSource
from baserow.core.user_sources.operations import (
ListUserSourcesApplicationOperationType,
LoginUserSourceOperationType,
UpdateUserSourceOperationType,
)
@pytest.mark.django_db @pytest.mark.django_db
@ -41,6 +55,7 @@ from baserow.core.types import PermissionCheck
"core", "core",
"setting_operation", "setting_operation",
"staff", "staff",
"allow_if_template",
"member", "member",
"token", "token",
"basic", "basic",
@ -138,7 +153,6 @@ def test_check_permissions(data_fixture):
UpdateWorkspaceOperationType.type, UpdateWorkspaceOperationType.type,
workspace=user_workspace_2.workspace, workspace=user_workspace_2.workspace,
context=user_workspace_2.workspace, context=user_workspace_2.workspace,
allow_if_template=True,
) )
assert CoreHandler().check_permissions( assert CoreHandler().check_permissions(
@ -146,7 +160,6 @@ def test_check_permissions(data_fixture):
UpdateWorkspaceOperationType.type, UpdateWorkspaceOperationType.type,
workspace=user_workspace_3.workspace, workspace=user_workspace_3.workspace,
context=user_workspace_3.workspace, context=user_workspace_3.workspace,
allow_if_template=True,
) )
with pytest.raises(PermissionDenied): with pytest.raises(PermissionDenied):
@ -154,11 +167,95 @@ def test_check_permissions(data_fixture):
AnonymousUser(), AnonymousUser(),
ListApplicationsWorkspaceOperationType.type, ListApplicationsWorkspaceOperationType.type,
workspace=user_workspace.workspace, workspace=user_workspace.workspace,
allow_if_template=True,
context=user_workspace.workspace, context=user_workspace.workspace,
) )
@pytest.mark.django_db(transaction=True)
def test_workspace_member_permission_manager(data_fixture, django_assert_num_queries):
user = data_fixture.create_user(
email="test@test.nl", password="password", first_name="Test1"
)
workspace_1 = data_fixture.create_workspace(user=user)
workspace_2 = data_fixture.create_workspace()
database_1 = data_fixture.create_database_application(
workspace=workspace_1, order=1
)
database_2 = data_fixture.create_database_application(
workspace=workspace_2, order=1
)
perm_manager = WorkspaceMemberOnlyPermissionManagerType()
checks = [
PermissionCheck(user, UpdateApplicationOperationType.type, database_1),
PermissionCheck(user, ListApplicationsWorkspaceOperationType.type, workspace_1),
]
result = perm_manager.check_multiple_permissions(checks, workspace_1)
list_result = [
(
c.actor.username,
c.operation_name,
(
result.get(c, None)
if not isinstance(result.get(c, None), Exception)
else False
),
)
for c in checks
]
assert list_result == [
("test@test.nl", "application.update", None),
("test@test.nl", "workspace.list_applications", None),
]
checks = [
PermissionCheck(user, UpdateApplicationOperationType.type, database_2),
PermissionCheck(user, ListApplicationsWorkspaceOperationType.type, workspace_2),
]
result = perm_manager.check_multiple_permissions(checks, workspace_2)
list_result = [
(
c.actor.username,
c.operation_name,
(
result.get(c, None)
if not isinstance(result.get(c, None), Exception)
else False
),
)
for c in checks
]
assert list_result == [
("test@test.nl", "application.update", False),
("test@test.nl", "workspace.list_applications", False),
]
try:
perm_manager.check_permissions(
user, ListApplicationsWorkspaceOperationType.type, workspace_2, workspace_2
)
except Exception: # noqa:W0718
...
with django_assert_num_queries(0):
filtered = perm_manager.filter_queryset(
user,
ListApplicationsWorkspaceOperationType.type,
Database.objects.all(),
workspace_2,
)
assert isinstance(filtered, tuple)
assert len(filtered[0]) == 0
@pytest.mark.django_db @pytest.mark.django_db
def test_check_multiple_permissions(data_fixture): def test_check_multiple_permissions(data_fixture):
admin = data_fixture.create_user(is_staff=True) admin = data_fixture.create_user(is_staff=True)
@ -390,6 +487,33 @@ def test_get_permissions(data_fixture):
"is_staff": True, "is_staff": True,
}, },
}, },
{
"name": "allow_if_template",
"permissions": {
"allowed_operations_on_templates": [
"workspace.list_applications",
"application.list_integrations",
"application.list_user_sources",
"application.user_source.login",
"database.list_tables",
"database.table.list_fields",
"database.table.list_rows",
"database.table.list_views",
"database.table.read_row",
"database.table.view.read",
"database.table.view.read_field_options",
"database.table.view.list_decoration",
"database.table.view.list_aggregations",
"database.table.view.read_aggregation",
"builder.list_pages",
"builder.page.list_elements",
"builder.page.list_workflow_actions",
"builder.page.data_source.dispatch",
"builder.page.list_data_sources",
],
"workspace_template_ids": [],
},
},
{"name": "member", "permissions": False}, {"name": "member", "permissions": False},
] ]
@ -412,6 +536,33 @@ def test_get_permissions(data_fixture):
"is_staff": True, "is_staff": True,
}, },
}, },
{
"name": "allow_if_template",
"permissions": {
"allowed_operations_on_templates": [
"workspace.list_applications",
"application.list_integrations",
"application.list_user_sources",
"application.user_source.login",
"database.list_tables",
"database.table.list_fields",
"database.table.list_rows",
"database.table.list_views",
"database.table.read_row",
"database.table.view.read",
"database.table.view.read_field_options",
"database.table.view.list_decoration",
"database.table.view.list_aggregations",
"database.table.view.read_aggregation",
"builder.list_pages",
"builder.page.list_elements",
"builder.page.list_workflow_actions",
"builder.page.data_source.dispatch",
"builder.page.list_data_sources",
],
"workspace_template_ids": [],
},
},
{ {
"name": "basic", "name": "basic",
"permissions": { "permissions": {
@ -451,6 +602,33 @@ def test_get_permissions(data_fixture):
"is_staff": False, "is_staff": False,
}, },
}, },
{
"name": "allow_if_template",
"permissions": {
"allowed_operations_on_templates": [
"workspace.list_applications",
"application.list_integrations",
"application.list_user_sources",
"application.user_source.login",
"database.list_tables",
"database.table.list_fields",
"database.table.list_rows",
"database.table.list_views",
"database.table.read_row",
"database.table.view.read",
"database.table.view.read_field_options",
"database.table.view.list_decoration",
"database.table.view.list_aggregations",
"database.table.view.read_aggregation",
"builder.list_pages",
"builder.page.list_elements",
"builder.page.list_workflow_actions",
"builder.page.data_source.dispatch",
"builder.page.list_data_sources",
],
"workspace_template_ids": [],
},
},
{"name": "member", "permissions": False}, {"name": "member", "permissions": False},
] ]
@ -473,6 +651,33 @@ def test_get_permissions(data_fixture):
"is_staff": False, "is_staff": False,
}, },
}, },
{
"name": "allow_if_template",
"permissions": {
"allowed_operations_on_templates": [
"workspace.list_applications",
"application.list_integrations",
"application.list_user_sources",
"application.user_source.login",
"database.list_tables",
"database.table.list_fields",
"database.table.list_rows",
"database.table.list_views",
"database.table.read_row",
"database.table.view.read",
"database.table.view.read_field_options",
"database.table.view.list_decoration",
"database.table.view.list_aggregations",
"database.table.view.read_aggregation",
"builder.list_pages",
"builder.page.list_elements",
"builder.page.list_workflow_actions",
"builder.page.data_source.dispatch",
"builder.page.list_data_sources",
],
"workspace_template_ids": [],
},
},
{ {
"name": "basic", "name": "basic",
"permissions": { "permissions": {
@ -512,6 +717,33 @@ def test_get_permissions(data_fixture):
"is_staff": False, "is_staff": False,
}, },
}, },
{
"name": "allow_if_template",
"permissions": {
"allowed_operations_on_templates": [
"workspace.list_applications",
"application.list_integrations",
"application.list_user_sources",
"application.user_source.login",
"database.list_tables",
"database.table.list_fields",
"database.table.list_rows",
"database.table.list_views",
"database.table.read_row",
"database.table.view.read",
"database.table.view.read_field_options",
"database.table.view.list_decoration",
"database.table.view.list_aggregations",
"database.table.view.read_aggregation",
"builder.list_pages",
"builder.page.list_elements",
"builder.page.list_workflow_actions",
"builder.page.data_source.dispatch",
"builder.page.list_data_sources",
],
"workspace_template_ids": [],
},
},
{"name": "member", "permissions": False}, {"name": "member", "permissions": False},
] ]
@ -534,6 +766,33 @@ def test_get_permissions(data_fixture):
"is_staff": False, "is_staff": False,
}, },
}, },
{
"name": "allow_if_template",
"permissions": {
"allowed_operations_on_templates": [
"workspace.list_applications",
"application.list_integrations",
"application.list_user_sources",
"application.user_source.login",
"database.list_tables",
"database.table.list_fields",
"database.table.list_rows",
"database.table.list_views",
"database.table.read_row",
"database.table.view.read",
"database.table.view.read_field_options",
"database.table.view.list_decoration",
"database.table.view.list_aggregations",
"database.table.view.read_aggregation",
"builder.list_pages",
"builder.page.list_elements",
"builder.page.list_workflow_actions",
"builder.page.data_source.dispatch",
"builder.page.list_data_sources",
],
"workspace_template_ids": [],
},
},
{ {
"name": "basic", "name": "basic",
"permissions": { "permissions": {
@ -573,10 +832,269 @@ def test_get_permissions(data_fixture):
"is_staff": False, "is_staff": False,
}, },
}, },
{
"name": "allow_if_template",
"permissions": {
"allowed_operations_on_templates": [
"workspace.list_applications",
"application.list_integrations",
"application.list_user_sources",
"application.user_source.login",
"database.list_tables",
"database.table.list_fields",
"database.table.list_rows",
"database.table.list_views",
"database.table.read_row",
"database.table.view.read",
"database.table.view.read_field_options",
"database.table.view.list_decoration",
"database.table.view.list_aggregations",
"database.table.view.read_aggregation",
"builder.list_pages",
"builder.page.list_elements",
"builder.page.list_workflow_actions",
"builder.page.data_source.dispatch",
"builder.page.list_data_sources",
],
"workspace_template_ids": [],
},
},
{"name": "member", "permissions": False}, {"name": "member", "permissions": False},
] ]
@pytest.mark.django_db
@pytest.mark.django_db
@override_settings(
PERMISSION_MANAGERS=[
"core",
"setting_operation",
"staff",
"allow_if_template",
"member",
"token",
"basic",
]
)
def test_allow_if_template_permission_manager(data_fixture):
buser = data_fixture.create_user(username="Auth user")
workspace_0 = data_fixture.create_workspace(user=buser)
workspace_1 = data_fixture.create_workspace()
application_1 = data_fixture.create_builder_application(workspace=workspace_1)
integration_1 = data_fixture.create_integration_with_first_type(
application=application_1
)
user_source_1 = data_fixture.create_user_source_with_first_type(
application=application_1
)
workspace_2 = data_fixture.create_workspace()
data_fixture.create_template(workspace=workspace_2)
application_2 = data_fixture.create_builder_application(workspace=workspace_2)
integration_2 = data_fixture.create_integration_with_first_type(
application=application_2
)
user_source_2 = data_fixture.create_user_source_with_first_type(
application=application_2
)
template = [workspace_2, application_2, integration_2, user_source_2]
checks = []
for user in [
buser,
AnonymousUser(),
]:
for perm_type, scope in [
(ListApplicationsWorkspaceOperationType.type, workspace_1),
(ListIntegrationsApplicationOperationType.type, application_1),
(ListUserSourcesApplicationOperationType.type, application_1),
(LoginUserSourceOperationType.type, user_source_1),
(CreateApplicationsWorkspaceOperationType.type, workspace_1),
(UpdateIntegrationOperationType.type, integration_1),
(UpdateUserSourceOperationType.type, user_source_1),
]:
checks.append(PermissionCheck(user, perm_type, scope))
result_1 = CoreHandler().check_multiple_permissions(checks, workspace_1)
list_result_1 = [
(
c.actor.username or "Anonymous",
c.operation_name,
"template" if c.context in template else "Not a template",
result_1.get(c, None),
)
for c in checks
]
checks = []
for user in [
buser,
AnonymousUser(),
]:
for perm_type, scope in [
(ListApplicationsWorkspaceOperationType.type, workspace_2),
(ListIntegrationsApplicationOperationType.type, application_2),
(ListUserSourcesApplicationOperationType.type, application_2),
(LoginUserSourceOperationType.type, user_source_2),
(CreateApplicationsWorkspaceOperationType.type, workspace_2),
(UpdateIntegrationOperationType.type, integration_2),
(UpdateUserSourceOperationType.type, user_source_2),
]:
checks.append(PermissionCheck(user, perm_type, scope))
result_2 = CoreHandler().check_multiple_permissions(checks, workspace_2)
list_result_2 = [
(
c.actor.username or "Anonymous",
c.operation_name,
"template" if c.context in template else "Not a template",
result_2.get(c, None),
)
for c in checks
]
list_result = list_result_1 + list_result_2
assert list_result == [
("Auth user", "workspace.list_applications", "Not a template", False),
("Auth user", "application.list_integrations", "Not a template", False),
("Auth user", "application.list_user_sources", "Not a template", False),
("Auth user", "application.user_source.login", "Not a template", False),
("Auth user", "workspace.create_application", "Not a template", False),
("Auth user", "application.integration.update", "Not a template", False),
("Auth user", "application.user_source.update", "Not a template", False),
("Anonymous", "workspace.list_applications", "Not a template", False),
("Anonymous", "application.list_integrations", "Not a template", False),
("Anonymous", "application.list_user_sources", "Not a template", False),
("Anonymous", "application.user_source.login", "Not a template", False),
("Anonymous", "workspace.create_application", "Not a template", False),
("Anonymous", "application.integration.update", "Not a template", False),
("Anonymous", "application.user_source.update", "Not a template", False),
("Auth user", "workspace.list_applications", "template", True),
("Auth user", "application.list_integrations", "template", True),
("Auth user", "application.list_user_sources", "template", True),
("Auth user", "application.user_source.login", "template", True),
("Auth user", "workspace.create_application", "template", False),
("Auth user", "application.integration.update", "template", False),
("Auth user", "application.user_source.update", "template", False),
("Anonymous", "workspace.list_applications", "template", True),
("Anonymous", "application.list_integrations", "template", True),
("Anonymous", "application.list_user_sources", "template", True),
("Anonymous", "application.user_source.login", "template", True),
("Anonymous", "workspace.create_application", "template", False),
("Anonymous", "application.integration.update", "template", False),
("Anonymous", "application.user_source.update", "template", False),
]
@pytest.mark.django_db
@pytest.mark.django_db
@override_settings(
PERMISSION_MANAGERS=[
"core",
"setting_operation",
"staff",
"allow_if_template",
"member",
"token",
"basic",
]
)
def test_allow_if_template_permission_manager_filter_queryset(data_fixture):
user = data_fixture.create_user(username="Auth user")
workspace_0 = data_fixture.create_workspace(user=user)
workspace_1 = data_fixture.create_workspace()
application_1 = data_fixture.create_builder_application(workspace=workspace_1)
integration_1 = data_fixture.create_integration_with_first_type(
application=application_1
)
user_source_1 = data_fixture.create_user_source_with_first_type(
application=application_1
)
workspace_2 = data_fixture.create_workspace()
data_fixture.create_template(workspace=workspace_2)
application_2 = data_fixture.create_builder_application(workspace=workspace_2)
integration_2 = data_fixture.create_integration_with_first_type(
application=application_2
)
user_source_2 = data_fixture.create_user_source_with_first_type(
application=application_2
)
tests_w1 = [
(
ListApplicationsWorkspaceOperationType.type,
workspace_1.application_set.all(),
),
(
ListIntegrationsApplicationOperationType.type,
Integration.objects.filter(application__workspace=workspace_1),
),
(
ListUserSourcesApplicationOperationType.type,
UserSource.objects.filter(application__workspace=workspace_1),
),
]
for operation_name, queryset in tests_w1:
assert (
sorted(
[
a.id
for a in CoreHandler().filter_queryset(
user,
operation_name,
queryset,
workspace=workspace_1,
)
]
)
== []
)
tests_w1 = [
(
ListApplicationsWorkspaceOperationType.type,
workspace_2.application_set.all(),
[application_2.id],
),
(
ListIntegrationsApplicationOperationType.type,
Integration.objects.filter(application__workspace=workspace_2),
[integration_2.id],
),
(
ListUserSourcesApplicationOperationType.type,
UserSource.objects.filter(application__workspace=workspace_2),
[user_source_2.id],
),
]
for operation_name, queryset, expected in tests_w1:
assert (
sorted(
[
a.id
for a in CoreHandler().filter_queryset(
user,
operation_name,
queryset,
workspace=workspace_2,
)
]
)
== expected
), operation_name
@pytest.mark.django_db @pytest.mark.django_db
def test_all_operations_are_registered(): def test_all_operations_are_registered():
def get_all_subclasses(cls): def get_all_subclasses(cls):

View file

@ -5,13 +5,15 @@ from baserow.core.service import CoreService
@pytest.mark.django_db @pytest.mark.django_db
def test_list_applications_in_workspace(data_fixture, django_assert_num_queries): def test_list_applications_in_workspace(
data_fixture, django_assert_num_queries, bypass_check_permissions
):
user = data_fixture.create_user() user = data_fixture.create_user()
workspace = data_fixture.create_workspace(user=user) workspace = data_fixture.create_workspace(user=user)
application = data_fixture.create_database_application(workspace=workspace) application = data_fixture.create_database_application(workspace=workspace)
application_in_another_workspace = data_fixture.create_database_application() application_in_another_workspace = data_fixture.create_database_application()
with django_assert_num_queries(2): with django_assert_num_queries(1):
applications = CoreService().list_applications_in_workspace(user, workspace.id) applications = CoreService().list_applications_in_workspace(user, workspace.id)
specific_applications = specific_iterator(applications) specific_applications = specific_iterator(applications)

View file

@ -243,7 +243,6 @@ def test_get_user_sources(data_fixture, stub_check_permissions):
queryset, queryset,
workspace=None, workspace=None,
context=None, context=None,
allow_if_template=False,
): ):
return queryset.exclude(id=user_source1.id) return queryset.exclude(id=user_source1.id)

View file

@ -0,0 +1,7 @@
{
"type": "feature",
"message": "Templates are now compatible with application builder",
"issue_number": 2387,
"bullet_points": [],
"created_at": "2024-04-11"
}

View file

@ -74,12 +74,23 @@ class AuthFormElementType(ElementType):
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Any], id_mapping: Dict[str, Any],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
) -> Any: ) -> Any:
if prop_name == "user_source_id" and value: if prop_name == "user_source_id" and value:
return id_mapping["user_sources"][value] return id_mapping["user_sources"][value]
return super().deserialize_property(prop_name, value, id_mapping) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def get_pytest_params(self, pytest_data_fixture): def get_pytest_params(self, pytest_data_fixture):
return { return {

View file

@ -172,6 +172,37 @@ class LocalBaserowPasswordAppAuthProviderType(AppAuthProviderType):
return bool(auth_provider.password_field_id) return bool(auth_provider.password_field_id)
def deserialize_property(
self,
prop_name: str,
value: Any,
id_mapping: Dict[str, Dict[int, int]],
files_zip=None,
storage=None,
cache=None,
**kwargs,
) -> Any:
"""
Map password field id.
"""
if (
prop_name == "password_field_id"
and value
and "database_fields" in id_mapping
):
return id_mapping["database_fields"][value]
return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def authenticate( def authenticate(
self, self,
auth_provider: AuthProviderModelSubClass, auth_provider: AuthProviderModelSubClass,

View file

@ -290,22 +290,33 @@ class LocalBaserowUserSourceType(UserSourceType):
prop_name: str, prop_name: str,
value: Any, value: Any,
id_mapping: Dict[str, Dict[int, int]], id_mapping: Dict[str, Dict[int, int]],
files_zip=None,
storage=None,
cache=None,
**kwargs, **kwargs,
) -> Any: ) -> Any:
""" """
Map table, email_field and name_field ids. Map table, email_field and name_field ids.
""" """
if prop_name == "table_id": if prop_name == "table_id" and value and "database_tables" in id_mapping:
return id_mapping.get("database_tables", {}).get(value, value) return id_mapping["database_tables"][value]
if prop_name == "email_field_id": if prop_name == "email_field_id" and value and "database_fields" in id_mapping:
return id_mapping.get("database_fields", {}).get(value, value) return id_mapping["database_fields"][value]
if prop_name == "name_field_id": if prop_name == "name_field_id" and value and "database_fields" in id_mapping:
return id_mapping.get("database_fields", {}).get(value, value) return id_mapping["database_fields"][value]
return super().deserialize_property(prop_name, value, id_mapping, **kwargs) return super().deserialize_property(
prop_name,
value,
id_mapping,
files_zip=files_zip,
storage=storage,
cache=cache,
**kwargs,
)
def get_user_model(self, user_source): def get_user_model(self, user_source):
# Use table handler to exclude trashed table # Use table handler to exclude trashed table

View file

@ -452,7 +452,8 @@ class RoleAssignmentHandler:
for actor in actors: for actor in actors:
workspace_level_role = self.get_role_by_uid( workspace_level_role = self.get_role_by_uid(
user_permissions_by_id[actor.id], use_fallback=True user_permissions_by_id.get(actor.id, NO_ACCESS_ROLE_UID),
use_fallback=True,
) )
if workspace_level_role.uid == NO_ROLE_LOW_PRIORITY_ROLE_UID: if workspace_level_role.uid == NO_ROLE_LOW_PRIORITY_ROLE_UID:
# Low priority role -> Use team role or NO_ACCESS if no team role # Low priority role -> Use team role or NO_ACCESS if no team role

View file

@ -319,7 +319,7 @@ class RolePermissionManagerType(PermissionManagerType):
""" """
if workspace is None or not self.is_enabled(workspace): if workspace is None or not self.is_enabled(workspace):
return queryset return
operation_type = operation_type_registry.get(operation_name) operation_type = operation_type_registry.get(operation_name)

View file

@ -1,3 +1,5 @@
from collections import defaultdict
from django.urls import reverse from django.urls import reverse
import pytest import pytest
@ -5,8 +7,10 @@ from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from baserow.contrib.database.fields.handler import FieldHandler from baserow.contrib.database.fields.handler import FieldHandler
from baserow.core.user_sources.exceptions import UserSourceImproperlyConfigured from baserow.core.user_sources.exceptions import UserSourceImproperlyConfigured
from baserow.core.user_sources.handler import UserSourceHandler
from baserow.core.user_sources.registries import user_source_type_registry from baserow.core.user_sources.registries import user_source_type_registry
from baserow.core.user_sources.service import UserSourceService from baserow.core.user_sources.service import UserSourceService
from baserow.core.utils import MirrorDict
from baserow_enterprise.integrations.local_baserow.models import ( from baserow_enterprise.integrations.local_baserow.models import (
LocalBaserowPasswordAppAuthProvider, LocalBaserowPasswordAppAuthProvider,
) )
@ -317,3 +321,68 @@ def test_local_baserow_user_source_authentication_improperly_configured(
user_source_type.authenticate( user_source_type.authenticate(
user_source, email="test@baserow.io", password="super not secret" user_source, email="test@baserow.io", password="super not secret"
) )
@pytest.mark.django_db
def test_import_local_baserow_password_app_auth_provider(data_fixture):
user = data_fixture.create_user()
workspace = data_fixture.create_workspace(user=user)
application = data_fixture.create_builder_application(workspace=workspace)
database = data_fixture.create_database_application(workspace=workspace)
integration = data_fixture.create_local_baserow_integration(
application=application, user=user
)
table_from_same_workspace1, fields, rows = data_fixture.build_table(
user=user,
database=database,
columns=[
("Email", "text"),
("Name", "text"),
("Password", "password"),
],
rows=[
["test@baserow.io", "Test", "password"],
],
)
email_field, name_field, password_field = fields
TO_IMPORT = {
"email_field_id": 42,
"id": 28,
"integration_id": 42,
"name": "Test name",
"name_field_id": 43,
"order": "1.00000000000000000000",
"table_id": 42,
"type": "local_baserow",
"auth_providers": [
{
"id": 42,
"type": "local_baserow_password",
"domain": None,
"enabled": True,
"password_field_id": 44,
}
],
}
id_mapping = defaultdict(MirrorDict)
id_mapping["integrations"] = {42: integration.id}
id_mapping["database_tables"] = {42: table_from_same_workspace1.id}
id_mapping["database_fields"] = {
42: email_field.id,
43: name_field.id,
44: password_field.id,
}
imported_instance = UserSourceHandler().import_user_source(
application, TO_IMPORT, id_mapping
)
assert (
imported_instance.auth_providers.first().specific.password_field_id
== password_field.id
)

View file

@ -883,42 +883,42 @@ def test_check_multiple_permissions(data_fixture, enterprise_data_fixture):
checks = [ checks = [
PermissionCheck( PermissionCheck(
actor=user_2, original_actor=user_2,
operation_name=ReadApplicationOperationType.type, operation_name=ReadApplicationOperationType.type,
context=database1, context=database1,
), ),
PermissionCheck( PermissionCheck(
actor=user_2, original_actor=user_2,
operation_name=ReadDatabaseTableOperationType.type, operation_name=ReadDatabaseTableOperationType.type,
context=table12, context=table12,
), ),
PermissionCheck( PermissionCheck(
actor=user_2, original_actor=user_2,
operation_name=ReadDatabaseTableOperationType.type, operation_name=ReadDatabaseTableOperationType.type,
context=table21, context=table21,
), ),
PermissionCheck( PermissionCheck(
actor=user_3, original_actor=user_3,
operation_name=DeleteApplicationOperationType.type, operation_name=DeleteApplicationOperationType.type,
context=workspace, context=workspace,
), ),
PermissionCheck( PermissionCheck(
actor=user_3, original_actor=user_3,
operation_name=ReadApplicationOperationType.type, operation_name=ReadApplicationOperationType.type,
context=database2, context=database2,
), ),
PermissionCheck( PermissionCheck(
actor=user_4, original_actor=user_4,
operation_name=ReadApplicationOperationType.type, operation_name=ReadApplicationOperationType.type,
context=database1, context=database1,
), ),
PermissionCheck( PermissionCheck(
actor=user_4, original_actor=user_4,
operation_name=ReadDatabaseTableOperationType.type, operation_name=ReadDatabaseTableOperationType.type,
context=table12, context=table12,
), ),
PermissionCheck( PermissionCheck(
actor=user_4, original_actor=user_4,
operation_name=ReadDatabaseTableOperationType.type, operation_name=ReadDatabaseTableOperationType.type,
context=table21, context=table21,
), ),
@ -1753,14 +1753,16 @@ def test_check_multiple_permissions_perf(
for op in db_op: for op in db_op:
checks.append( checks.append(
PermissionCheck( PermissionCheck(
actor=user, operation_name=op.type, context=db.application_ptr original_actor=user,
operation_name=op.type,
context=db.application_ptr,
) )
) )
for table in tables: for table in tables:
for op in table_op: for op in table_op:
checks.append( checks.append(
PermissionCheck( PermissionCheck(
actor=user, operation_name=op.type, context=table original_actor=user, operation_name=op.type, context=table
) )
) )

View file

@ -92,7 +92,7 @@ export default {
return this.$registry.get('userSource', this.selectedUserSource.type) return this.$registry.get('userSource', this.selectedUserSource.type)
}, },
isAuthenticated() { isAuthenticated() {
return this.$store.getters['userSourceUser/isAuthenticated'] return this.$store.getters['userSourceUser/isAuthenticated'](this.builder)
}, },
loginOptions() { loginOptions() {
if (!this.selectedUserSourceType) { if (!this.selectedUserSourceType) {
@ -131,7 +131,9 @@ export default {
}), }),
async onLogin(event) { async onLogin(event) {
if (this.isAuthenticated) { if (this.isAuthenticated) {
await this.$store.dispatch('userSourceUser/logoff') await this.$store.dispatch('userSourceUser/logoff', {
application: this.builder,
})
} }
this.$v.$touch() this.$v.$touch()
@ -143,6 +145,7 @@ export default {
this.hideError() this.hideError()
try { try {
await this.$store.dispatch('userSourceUser/authenticate', { await this.$store.dispatch('userSourceUser/authenticate', {
application: this.builder,
userSource: this.selectedUserSource, userSource: this.selectedUserSource,
credentials: { credentials: {
email: this.values.email, email: this.values.email,

View file

@ -169,7 +169,6 @@ class CalendarViewView(APIView):
ListRowsDatabaseTableOperationType.type, ListRowsDatabaseTableOperationType.type,
workspace=workspace, workspace=workspace,
context=view.table, context=view.table,
allow_if_template=True,
) )
date_field = view.date_field date_field = view.date_field

View file

@ -226,7 +226,6 @@ class KanbanViewView(APIView):
ListRowsDatabaseTableOperationType.type, ListRowsDatabaseTableOperationType.type,
workspace=workspace, workspace=workspace,
context=view.table, context=view.table,
allow_if_template=True,
) )
single_select_option_field = view.single_select_field single_select_option_field = view.single_select_field

View file

@ -77,6 +77,8 @@ def test_list_without_valid_premium_license(api_client, premium_data_fixture):
premium_data_fixture.create_template(workspace=calendar.table.database.workspace) premium_data_fixture.create_template(workspace=calendar.table.database.workspace)
calendar.table.database.workspace.has_template.cache_clear()
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"}) response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK
@ -276,9 +278,11 @@ def test_list_all_rows(api_client, premium_data_fixture):
for datetime in datetimes: for datetime in datetimes:
model.objects.create( model.objects.create(
**{ **{
f"field_{date_field.id}": datetime.replace(tzinfo=timezone.utc) f"field_{date_field.id}": (
if datetime is not None datetime.replace(tzinfo=timezone.utc)
else None, if datetime is not None
else None
),
} }
) )

View file

@ -39,6 +39,7 @@ def test_list_without_valid_premium_license(api_client, premium_data_fixture):
# The kanban view should work if it's a template. # The kanban view should work if it's a template.
premium_data_fixture.create_template(workspace=kanban.table.database.workspace) premium_data_fixture.create_template(workspace=kanban.table.database.workspace)
kanban.table.database.workspace.has_template.cache_clear()
url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id}) url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id})
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"}) response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
assert response.status_code == HTTP_200_OK assert response.status_code == HTTP_200_OK

View file

@ -118,9 +118,18 @@ def test_row_comment_can_only_be_updated_by_author(premium_data_fixture):
other_user = premium_data_fixture.create_user( other_user = premium_data_fixture.create_user(
first_name="other_user", has_active_premium_license=True first_name="other_user", has_active_premium_license=True
) )
other_user_in_same_workspace = premium_data_fixture.create_user(
first_name="other_user_same_workspace", has_active_premium_license=True
)
table, fields, rows = premium_data_fixture.build_table( table, fields, rows = premium_data_fixture.build_table(
columns=[("text", "text")], rows=["first row"], user=user columns=[("text", "text")], rows=["first row"], user=user
) )
CoreHandler().add_user_to_workspace(
table.database.workspace, other_user_in_same_workspace
)
message = premium_data_fixture.create_comment_message_from_plain_text("Test") message = premium_data_fixture.create_comment_message_from_plain_text("Test")
with freeze_time("2020-01-01 12:00"): with freeze_time("2020-01-01 12:00"):
@ -135,7 +144,9 @@ def test_row_comment_can_only_be_updated_by_author(premium_data_fixture):
CoreHandler().add_user_to_workspace(table.database.workspace, other_user) CoreHandler().add_user_to_workspace(table.database.workspace, other_user)
with pytest.raises(UserNotRowCommentAuthorException): with pytest.raises(UserNotRowCommentAuthorException):
RowCommentHandler.update_comment(other_user, c, updated_message) RowCommentHandler.update_comment(
other_user_in_same_workspace, c, updated_message
)
with freeze_time("2020-01-01 12:01"): with freeze_time("2020-01-01 12:01"):
updated_comment = RowCommentHandler.update_comment(user, c, updated_message) updated_comment = RowCommentHandler.update_comment(user, c, updated_message)
@ -185,9 +196,18 @@ def test_row_comment_can_only_be_deleted_by_author(premium_data_fixture):
other_user = premium_data_fixture.create_user( other_user = premium_data_fixture.create_user(
first_name="other_user", has_active_premium_license=True first_name="other_user", has_active_premium_license=True
) )
other_user_in_same_workspace = premium_data_fixture.create_user(
first_name="other_user_same_workspace", has_active_premium_license=True
)
table, fields, rows = premium_data_fixture.build_table( table, fields, rows = premium_data_fixture.build_table(
columns=[("text", "text")], rows=["first row"], user=user columns=[("text", "text")], rows=["first row"], user=user
) )
CoreHandler().add_user_to_workspace(
table.database.workspace, other_user_in_same_workspace
)
message = premium_data_fixture.create_comment_message_from_plain_text("Test") message = premium_data_fixture.create_comment_message_from_plain_text("Test")
with freeze_time("2020-01-01 12:00"): with freeze_time("2020-01-01 12:00"):
@ -196,10 +216,8 @@ def test_row_comment_can_only_be_deleted_by_author(premium_data_fixture):
with pytest.raises(UserNotInWorkspace): with pytest.raises(UserNotInWorkspace):
RowCommentHandler.delete_comment(other_user, c) RowCommentHandler.delete_comment(other_user, c)
CoreHandler().add_user_to_workspace(table.database.workspace, other_user)
with pytest.raises(UserNotRowCommentAuthorException): with pytest.raises(UserNotRowCommentAuthorException):
RowCommentHandler.delete_comment(other_user, c) RowCommentHandler.delete_comment(other_user_in_same_workspace, c)
with freeze_time("2020-01-01 12:01"): with freeze_time("2020-01-01 12:01"):
RowCommentHandler.delete_comment(user, c) RowCommentHandler.delete_comment(user, c)

View file

@ -2,6 +2,8 @@ import { ApplicationType } from '@baserow/modules/core/applicationTypes'
import BuilderForm from '@baserow/modules/builder/components/form/BuilderForm' import BuilderForm from '@baserow/modules/builder/components/form/BuilderForm'
import SidebarComponentBuilder from '@baserow/modules/builder/components/sidebar/SidebarComponentBuilder' import SidebarComponentBuilder from '@baserow/modules/builder/components/sidebar/SidebarComponentBuilder'
import { populatePage } from '@baserow/modules/builder/store/page' import { populatePage } from '@baserow/modules/builder/store/page'
import PageTemplate from '@baserow/modules/builder/components/page/PageTemplate'
import PageTemplateSidebar from '@baserow/modules/builder/components/page/PageTemplateSidebar'
export class BuilderApplicationType extends ApplicationType { export class BuilderApplicationType extends ApplicationType {
static getType() { static getType() {
@ -34,6 +36,14 @@ export class BuilderApplicationType extends ApplicationType {
return SidebarComponentBuilder return SidebarComponentBuilder
} }
getTemplateSidebarComponent() {
return PageTemplateSidebar
}
getTemplatesPageComponent() {
return PageTemplate
}
populate(application) { populate(application) {
const values = super.populate(application) const values = super.populate(application)
values.pages = values.pages.map(populatePage) values.pages = values.pages.map(populatePage)

View file

@ -18,11 +18,12 @@
</div> </div>
<InsertElementButton <InsertElementButton
v-show="isSelected" v-show="isSelected"
v-if="canCreate"
class="element-preview__insert element-preview__insert--top" class="element-preview__insert element-preview__insert--top"
@click="showAddElementModal(PLACEMENTS.BEFORE)" @click="showAddElementModal(PLACEMENTS.BEFORE)"
/> />
<ElementMenu <ElementMenu
v-if="isSelected" v-if="isSelected && canUpdate"
:placements="placements" :placements="placements"
:placements-disabled="placementsDisabled" :placements-disabled="placementsDisabled"
:is-duplicating="isDuplicating" :is-duplicating="isDuplicating"
@ -37,10 +38,12 @@
<InsertElementButton <InsertElementButton
v-show="isSelected" v-show="isSelected"
v-if="canCreate"
class="element-preview__insert element-preview__insert--bottom" class="element-preview__insert element-preview__insert--bottom"
@click="showAddElementModal(PLACEMENTS.AFTER)" @click="showAddElementModal(PLACEMENTS.AFTER)"
/> />
<AddElementModal <AddElementModal
v-if="canCreate"
ref="addElementModal" ref="addElementModal"
:element-types-allowed="elementTypesAllowed" :element-types-allowed="elementTypesAllowed"
:page="page" :page="page"
@ -71,7 +74,7 @@ export default {
InsertElementButton, InsertElementButton,
PageElement, PageElement,
}, },
inject: ['builder', 'page', 'mode'], inject: ['workspace', 'builder', 'page', 'mode'],
props: { props: {
element: { element: {
type: Object, type: Object,
@ -106,9 +109,13 @@ export default {
isVisible() { isVisible() {
switch (this.element.visibility) { switch (this.element.visibility) {
case 'logged-in': case 'logged-in':
return this.$store.getters['userSourceUser/isAuthenticated'] return this.$store.getters['userSourceUser/isAuthenticated'](
this.builder
)
case 'not-logged': case 'not-logged':
return !this.$store.getters['userSourceUser/isAuthenticated'] return !this.$store.getters['userSourceUser/isAuthenticated'](
this.builder
)
default: default:
return true return true
} }
@ -138,6 +145,20 @@ export default {
elementTypesAllowed() { elementTypesAllowed() {
return this.parentElementType?.childElementTypes || null return this.parentElementType?.childElementTypes || null
}, },
canCreate() {
return this.$hasPermission(
'builder.page.create_element',
this.page,
this.workspace.id
)
},
canUpdate() {
return this.$hasPermission(
'builder.page.element.update',
this.element,
this.workspace.id
)
},
isSelected() { isSelected() {
return this.element.id === this.elementSelected?.id return this.element.id === this.elementSelected?.id
}, },

View file

@ -51,6 +51,11 @@
v-sortable="{ v-sortable="{
id: field.id, id: field.id,
update: orderFields, update: orderFields,
enabled: $hasPermission(
'builder.page.element.update',
element,
workspace.id
),
handle: '[data-sortable-handle]', handle: '[data-sortable-handle]',
}" }"
class="table-element-form__field" class="table-element-form__field"

View file

@ -6,7 +6,7 @@
class="row margin-bottom-2" class="row margin-bottom-2"
style="--gap: 6px" style="--gap: 6px"
> >
<div v-if="borderIsAllowed" class="col col-3"> <div v-if="borderIsAllowed" class="col col-4">
<div class="margin-bottom-1"> <div class="margin-bottom-1">
{{ $t('styleBoxForm.borderLabel') }} {{ $t('styleBoxForm.borderLabel') }}
</div> </div>
@ -20,7 +20,7 @@
@blur="$v.values.border_size.$touch()" @blur="$v.values.border_size.$touch()"
/> />
</div> </div>
<div v-if="paddingIsAllowed" class="col col-3"> <div v-if="paddingIsAllowed" class="col col-4">
<div class="margin-bottom-1"> <div class="margin-bottom-1">
{{ $t('styleBoxForm.paddingLabel') }} {{ $t('styleBoxForm.paddingLabel') }}
</div> </div>

View file

@ -1,5 +1,5 @@
<template> <template>
<Expandable toggle-on-click> <Expandable toggle-on-click :default-expanded="workflowActions.length < 2">
<template #header="{ expanded }"> <template #header="{ expanded }">
<div class="event__header"> <div class="event__header">
<div class="event__header-left"> <div class="event__header-left">
@ -36,6 +36,11 @@
id: workflowAction.id, id: workflowAction.id,
handle: '[data-sortable-handle]', handle: '[data-sortable-handle]',
update: orderWorkflowActions, update: orderWorkflowActions,
enabled: $hasPermission(
'builder.page.element.update',
element,
workspace.id
),
}" }"
class="event__workflow-action" class="event__workflow-action"
:class="{ 'event__workflow-action--first': index === 0 }" :class="{ 'event__workflow-action--first': index === 0 }"
@ -70,7 +75,7 @@ const DEFAULT_WORKFLOW_ACTION_TYPE = NotificationWorkflowActionType.getType()
export default { export default {
name: 'Event', name: 'Event',
components: { WorkflowAction }, components: { WorkflowAction },
inject: ['builder', 'page'], inject: ['workspace', 'builder', 'page'],
props: { props: {
event: { event: {
type: Event, type: Event,

View file

@ -27,7 +27,18 @@ export default {
name: 'CreatePageModal', name: 'CreatePageModal',
components: { PageSettingsForm }, components: { PageSettingsForm },
mixins: [modal], mixins: [modal],
provide() {
return {
page: null,
builder: this.builder,
workspace: this.workspace,
}
},
props: { props: {
workspace: {
type: Object,
required: true,
},
builder: { builder: {
type: Object, type: Object,
required: true, required: true,

View file

@ -70,9 +70,13 @@ export default {
isVisible() { isVisible() {
switch (this.element.visibility) { switch (this.element.visibility) {
case 'logged-in': case 'logged-in':
return this.$store.getters['userSourceUser/isAuthenticated'] return this.$store.getters['userSourceUser/isAuthenticated'](
this.builder
)
case 'not-logged': case 'not-logged':
return !this.$store.getters['userSourceUser/isAuthenticated'] return !this.$store.getters['userSourceUser/isAuthenticated'](
this.builder
)
default: default:
return true return true
} }

View file

@ -57,7 +57,7 @@ export default {
ElementPreview, ElementPreview,
PreviewNavigationBar, PreviewNavigationBar,
}, },
inject: ['page'], inject: ['page', 'workspace'],
data() { data() {
return { return {
// The element that is currently being copied // The element that is currently being copied
@ -100,6 +100,27 @@ export default {
this.elementSelected.parent_element_id this.elementSelected.parent_element_id
) )
}, },
canCreateElement() {
return this.$hasPermission(
'builder.page.create_element',
this.page,
this.workspace.id
)
},
canUpdateSelectedElement() {
return this.$hasPermission(
'builder.page.element.update',
this.elementSelected,
this.workspace.id
)
},
canDeleteSelectedElement() {
return this.$hasPermission(
'builder.page.element.delete',
this.elementSelected,
this.workspace.id
)
},
}, },
watch: { watch: {
deviceType(value) { deviceType(value) {
@ -174,15 +195,18 @@ export default {
previewScaled.style.height = `${currentHeight / scale}px` previewScaled.style.height = `${currentHeight / scale}px`
}, },
async moveElement(element, placement) { async moveElement(placement) {
if (!element?.id) { if (!this.elementSelected?.id || !this.canUpdateSelectedElement) {
return return
} }
const elementType = this.$registry.get('element', element.type) const elementType = this.$registry.get(
'element',
this.elementSelected.type
)
const placementsDisabled = elementType.getPlacementsDisabled( const placementsDisabled = elementType.getPlacementsDisabled(
this.page, this.page,
element this.elementSelected
) )
if (placementsDisabled.includes(placement)) { if (placementsDisabled.includes(placement)) {
@ -192,23 +216,26 @@ export default {
try { try {
await this.actionMoveElement({ await this.actionMoveElement({
page: this.page, page: this.page,
element, element: this.elementSelected,
placement, placement,
}) })
await this.actionSelectElement({ element }) await this.actionSelectElement({ element: this.elementSelected })
} catch (error) { } catch (error) {
notifyIf(error) notifyIf(error)
} }
}, },
async selectNextElement(element, placement) { async selectNextElement(placement) {
if (!element?.id) { if (!this.elementSelected?.id) {
return return
} }
const elementType = this.$registry.get('element', element.type) const elementType = this.$registry.get(
'element',
this.elementSelected.type
)
const placementsDisabled = elementType.getPlacementsDisabled( const placementsDisabled = elementType.getPlacementsDisabled(
this.page, this.page,
element this.elementSelected
) )
if (placementsDisabled.includes(placement)) { if (placementsDisabled.includes(placement)) {
@ -218,7 +245,7 @@ export default {
try { try {
await this.actionSelectNextElement({ await this.actionSelectNextElement({
page: this.page, page: this.page,
element, element: this.elementSelected,
placement, placement,
}) })
} catch (error) { } catch (error) {
@ -226,7 +253,7 @@ export default {
} }
}, },
async duplicateElement() { async duplicateElement() {
if (!this.elementSelected?.id) { if (!this.elementSelected?.id || !this.canCreateElement) {
return return
} }
@ -242,7 +269,7 @@ export default {
this.isDuplicating = false this.isDuplicating = false
}, },
async deleteElement() { async deleteElement() {
if (!this.elementSelected?.id) { if (!this.elementSelected?.id || !this.canDeleteSelectedElement) {
return return
} }
try { try {
@ -278,30 +305,30 @@ export default {
switch (e.key) { switch (e.key) {
case 'ArrowUp': case 'ArrowUp':
if (alternateAction) { if (alternateAction) {
this.moveElement(this.elementSelected, PLACEMENTS.BEFORE) this.moveElement(PLACEMENTS.BEFORE)
} else { } else {
this.selectNextElement(this.elementSelected, PLACEMENTS.BEFORE) this.selectNextElement(PLACEMENTS.BEFORE)
} }
break break
case 'ArrowDown': case 'ArrowDown':
if (alternateAction) { if (alternateAction) {
this.moveElement(this.elementSelected, PLACEMENTS.AFTER) this.moveElement(PLACEMENTS.AFTER)
} else { } else {
this.selectNextElement(this.elementSelected, PLACEMENTS.AFTER) this.selectNextElement(PLACEMENTS.AFTER)
} }
break break
case 'ArrowLeft': case 'ArrowLeft':
if (alternateAction) { if (alternateAction) {
this.moveElement(this.elementSelected, PLACEMENTS.LEFT) this.moveElement(PLACEMENTS.LEFT)
} else { } else {
this.selectNextElement(this.elementSelected, PLACEMENTS.LEFT) this.selectNextElement(PLACEMENTS.LEFT)
} }
break break
case 'ArrowRight': case 'ArrowRight':
if (alternateAction) { if (alternateAction) {
this.moveElement(this.elementSelected, PLACEMENTS.RIGHT) this.moveElement(PLACEMENTS.RIGHT)
} else { } else {
this.selectNextElement(this.elementSelected, PLACEMENTS.RIGHT) this.selectNextElement(PLACEMENTS.RIGHT)
} }
break break
case 'Backspace': case 'Backspace':

View file

@ -12,11 +12,21 @@
:title="pageSidePanelType.label" :title="pageSidePanelType.label"
:disabled="!element || pageSidePanelType.isDeactivated(element)" :disabled="!element || pageSidePanelType.isDeactivated(element)"
> >
<component <ReadOnlyForm
:is="pageSidePanelType.component"
v-if="element" v-if="element"
class="side-panels__panel" :read-only="
/> !$hasPermission(
'builder.page.element.update',
element,
workspace.id
)
"
>
<component
:is="pageSidePanelType.component"
class="side-panels__panel"
/>
</ReadOnlyForm>
<EmptySidePanelState v-else /> <EmptySidePanelState v-else />
</Tab> </Tab>
</Tabs> </Tabs>
@ -30,6 +40,7 @@ import EmptySidePanelState from '@baserow/modules/builder/components/page/sidePa
export default { export default {
name: 'PageSidePanels', name: 'PageSidePanels',
components: { EmptySidePanelState }, components: { EmptySidePanelState },
inject: ['workspace'],
computed: { computed: {
...mapGetters({ ...mapGetters({
element: 'element/getSelected', element: 'element/getSelected',

View file

@ -0,0 +1,11 @@
<template>
<header class="layout__col-2-1 header">
<div class="header__loading"></div>
</header>
</template>
<script>
export default {
name: 'PageSkeleton',
}
</script>

View file

@ -0,0 +1,120 @@
<template>
<PageTemplateContent
v-if="!loading && workspace && page && builder"
:workspace="workspace"
:builder="builder"
:page="page"
:mode="mode"
/>
<PageSkeleton v-else />
</template>
<script>
import { StoreItemLookupError } from '@baserow/modules/core/errors'
import PageTemplateContent from '@baserow/modules/builder/components/page/PageTemplateContent'
import PageSkeleton from '@baserow/modules/builder/components/page/PageSkeleton'
import { DataProviderType } from '@baserow/modules/core/dataProviderTypes'
import { BuilderApplicationType } from '@baserow/modules/builder/applicationTypes'
import { clone } from '@baserow/modules/core/utils/object'
const mode = 'editing'
export default {
name: 'PageTemplate',
components: { PageTemplateContent, PageSkeleton },
props: {
pageValue: {
type: Object,
required: true,
},
},
data() {
return {
workspace: null,
builder: null,
page: null,
mode,
loading: true,
}
},
watch: {
'pageValue.page.id': {
handler() {
this.loadData()
},
immediate: true,
},
},
destroyed() {
// Restore the current application to the selected application if any
this.$store.dispatch('userSourceUser/setCurrentApplication', {
application: this.$store.getters['application/getSelected'],
})
},
methods: {
async loadData() {
this.loading = true
this.$store.dispatch('element/select', {
element: null,
})
try {
const builderToDisplay = this.pageValue.builder
if (
this.$store.getters['userSourceUser/getCurrentApplication']?.id !==
builderToDisplay.id
) {
// We clone the builder because we are using it in the userSourceUser store
// And the application is then modified outside of the store elsewhere.
this.$store.dispatch('userSourceUser/setCurrentApplication', {
application: clone(builderToDisplay),
})
}
const builder =
this.$store.getters['userSourceUser/getCurrentApplication']
const page = this.$store.getters['page/getById'](
builder,
this.pageValue.page.id
)
const builderApplicationType = this.$registry.get(
'application',
BuilderApplicationType.getType()
)
await builderApplicationType.loadExtraData(builder)
await Promise.all([
this.$store.dispatch('dataSource/fetch', {
page,
}),
this.$store.dispatch('element/fetch', { page }),
this.$store.dispatch('workflowAction/fetch', { page }),
])
await DataProviderType.initAll(
this.$registry.getAll('builderDataProvider'),
{
builder,
page,
mode,
}
)
this.builder = builder
this.page = page
this.workspace = builder.workspace
} catch (e) {
// In case of a network error we want to fail hard.
if (e.response === undefined && !(e instanceof StoreItemLookupError)) {
throw e
}
}
this.loading = false
},
},
}
</script>

View file

@ -0,0 +1,99 @@
<template>
<div v-if="page" :key="page.id" class="page-template">
<PageHeader :page="page" />
<div class="layout__col-2-2 page-editor__content">
<div :style="{ width: `calc(100% - ${panelWidth}px)` }">
<PagePreview />
</div>
<div
class="page-editor__side-panel"
:style="{ width: `${panelWidth}px` }"
>
<PageSidePanels />
</div>
</div>
</div>
</template>
<script>
import PageHeader from '@baserow/modules/builder/components/page/header/PageHeader'
import PagePreview from '@baserow/modules/builder/components/page/PagePreview'
import PageSidePanels from '@baserow/modules/builder/components/page/PageSidePanels'
import ApplicationBuilderFormulaInputGroup from '@baserow/modules/builder/components/ApplicationBuilderFormulaInputGroup'
import { DataProviderType } from '@baserow/modules/core/dataProviderTypes'
import _ from 'lodash'
const mode = 'editing'
export default {
name: 'PageTemplate',
components: { PagePreview, PageHeader, PageSidePanels },
provide() {
return {
workspace: this.workspace,
builder: this.builder,
page: this.page,
mode,
formulaComponent: ApplicationBuilderFormulaInputGroup,
}
},
props: {
workspace: {
type: Object,
required: true,
},
builder: {
type: Object,
required: true,
},
page: {
type: Object,
required: true,
},
mode: {
type: String,
required: true,
},
},
data() {
return { panelWidth: 300 }
},
computed: {
applicationContext() {
return {
builder: this.builder,
page: this.page,
mode,
}
},
dataSources() {
return this.$store.getters['dataSource/getPageDataSources'](this.page)
},
dispatchContext() {
return DataProviderType.getAllDataSourceDispatchContext(
this.$registry.getAll('builderDataProvider'),
this.applicationContext
)
},
},
watch: {
dispatchContext: {
deep: true,
/**
* Update data source content on backend context changes
*/
handler(newDispatchContext, oldDispatchContext) {
if (!_.isEqual(newDispatchContext, oldDispatchContext)) {
this.$store.dispatch(
'dataSourceContent/debouncedFetchPageDataSourceContent',
{
page: this.page,
data: newDispatchContext,
}
)
}
},
},
},
}
</script>

View file

@ -0,0 +1,79 @@
<template>
<li
class="tree__item"
:class="{
active: application._.selected,
'tree__item--loading': application._.loading,
}"
>
<div class="tree__action">
<a class="tree__link" @click="$emit('selected', application)">
<i
class="tree__icon tree__icon--type"
:class="application._.type.iconClass"
></i>
<span class="tree__link-text">{{ application.name }}</span>
</a>
</div>
<template v-if="application._.selected">
<ul class="tree__subs">
<li
v-for="builderPage in orderedPages"
:key="builderPage.id"
class="tree__sub"
:class="{ active: isPageActive(builderPage) }"
>
<a
class="tree__sub-link"
@click="selectPage(application, builderPage)"
>
{{ builderPage.name }}
</a>
</li>
</ul>
</template>
</li>
</template>
<script>
import { BuilderApplicationType } from '@baserow/modules/builder/applicationTypes'
export default {
name: 'TemplateSidebar',
props: {
application: {
type: Object,
required: true,
},
page: {
required: true,
validator: (prop) => typeof prop === 'object' || prop === null,
},
},
computed: {
orderedPages() {
return this.application.pages
.map((page) => page)
.sort((a, b) => a.order - b.order)
},
},
methods: {
selectPage(application, page) {
this.$emit('selected-page', {
application: BuilderApplicationType.getType(),
value: {
builder: application,
page,
},
})
},
isPageActive(page) {
return (
this.page !== null &&
this.page.application === BuilderApplicationType.getType() &&
this.page.value.page.id === page.id
)
},
},
}
</script>

View file

@ -29,16 +29,18 @@
<script> <script>
import UserSourceUsersContext from '@baserow/modules/builder/components/page/UserSourceUsersContext' import UserSourceUsersContext from '@baserow/modules/builder/components/page/UserSourceUsersContext'
import { mapGetters } from 'vuex'
export default { export default {
components: { UserSourceUsersContext }, components: { UserSourceUsersContext },
inject: ['builder'],
props: {}, props: {},
computed: { computed: {
...mapGetters({ isAuthenticated() {
loggedUser: 'userSourceUser/getUser', return this.$store.getters['userSourceUser/isAuthenticated'](this.builder)
isAuthenticated: 'userSourceUser/isAuthenticated', },
}), loggedUser() {
return this.$store.getters['userSourceUser/getUser'](this.builder)
},
}, },
} }
</script> </script>

View file

@ -70,7 +70,7 @@
<script> <script>
import context from '@baserow/modules/core/mixins/context' import context from '@baserow/modules/core/mixins/context'
import { mapActions, mapGetters } from 'vuex' import { mapActions } from 'vuex'
import UserSourceService from '@baserow/modules/core/services/userSource' import UserSourceService from '@baserow/modules/core/services/userSource'
import { notifyIf } from '@baserow/modules/core/utils/error' import { notifyIf } from '@baserow/modules/core/utils/error'
import _ from 'lodash' import _ from 'lodash'
@ -88,10 +88,12 @@ export default {
} }
}, },
computed: { computed: {
...mapGetters({ isAuthenticated() {
loggedUser: 'userSourceUser/getUser', return this.$store.getters['userSourceUser/isAuthenticated'](this.builder)
isAuthenticated: 'userSourceUser/isAuthenticated', },
}), loggedUser() {
return this.$store.getters['userSourceUser/getUser'](this.builder)
},
userSources() { userSources() {
return this.$store.getters['userSource/getUserSources'](this.builder) return this.$store.getters['userSource/getUserSources'](this.builder)
}, },
@ -143,12 +145,16 @@ export default {
this.currentUser = user this.currentUser = user
try { try {
if (!user) { if (!user) {
await this.actionLogoff() await this.actionLogoff({ application: this.builder })
} else { } else {
const userSource = this.$store.getters[ const userSource = this.$store.getters[
'userSource/getUserSourceById' 'userSource/getUserSourceById'
](this.builder, user.user_source_id) ](this.builder, user.user_source_id)
await this.actionForceAuthenticate({ userSource, user }) await this.actionForceAuthenticate({
application: this.builder,
userSource,
user,
})
} }
} catch { } catch {
this.currentUser = previousUser this.currentUser = previousUser

View file

@ -8,20 +8,30 @@
> >
<template v-if="state === 'loaded'"> <template v-if="state === 'loaded'">
<div v-if="dataSources.length > 0"> <div v-if="dataSources.length > 0">
<DataSourceForm <ReadOnlyForm
v-for="dataSource in dataSources" v-for="dataSource in dataSources"
:id="dataSource.id"
:ref="`dataSourceForm_${dataSource.id}`"
:key="dataSource.id" :key="dataSource.id"
:builder="builder" :read-only="
:data-source="dataSource" !$hasPermission(
:page="page" 'builder.page.data_source.update',
:default-values="dataSource" dataSource,
:integrations="integrations" workspace.id
:loading="dataSourcesLoading.includes(dataSource.id)" )
@delete="deleteDataSource(dataSource)" "
@values-changed="updateDataSource(dataSource, $event)" >
/> <DataSourceForm
:id="dataSource.id"
:ref="`dataSourceForm_${dataSource.id}`"
:builder="builder"
:data-source="dataSource"
:page="page"
:default-values="dataSource"
:integrations="integrations"
:loading="dataSourcesLoading.includes(dataSource.id)"
@delete="deleteDataSource(dataSource)"
@values-changed="updateDataSource(dataSource, $event)"
/>
</ReadOnlyForm>
</div> </div>
<template v-else> <template v-else>
@ -36,8 +46,12 @@
</template> </template>
<ButtonText <ButtonText
icon="iconoir-plus" v-if="
$hasPermission('builder.page.create_data_source', page, workspace.id)
"
type="secondary" type="secondary"
icon="iconoir-plus"
size="small"
:loading="creationInProgress" :loading="creationInProgress"
@click="createDataSource()" @click="createDataSource()"
> >
@ -59,7 +73,7 @@ export default {
name: 'DataSourceContext', name: 'DataSourceContext',
components: { DataSourceForm }, components: { DataSourceForm },
mixins: [context], mixins: [context],
inject: ['builder'], inject: ['workspace', 'builder'],
props: { props: {
page: { page: {
type: Object, type: Object,

Some files were not shown because too many files have changed in this diff Show more