mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-07 06:15:36 +00:00
merge with develop, conflicts
This commit is contained in:
commit
7b421c8698
678 changed files with 130000 additions and 11876 deletions
.env.dev.example.env.example.gitignoreREADME.md
backend
Makefileapplication_types.pyapps.pyconstants.py
docker
src/baserow
api/templates
config/settings
contrib
builder
api
application_types.pyapps.pydomains
elements
formula_property_extractor.pyhandler.pylocale/en/LC_MESSAGES
migrations
0052_menuitemelement_menuelement.py0053_buttonthemeconfigblock_button_active_background_color_and_more.py0054_simplecontainerelement.py0055_linkthemeconfigblock_link_active_text_decoration_and_more.py
models.pypages
theme
workflow_actions
dashboard
database
airtable
airtable_column_types.pyairtable_filter_operators.pyairtable_view_types.pyconstants.pyexceptions.pyhandler.pyhelpers.pyimport_report.pyregistry.pyutils.py
api
fields
tables
views
webhooks
export
export_serialized.pyfields
constants.pyexceptions.pyfield_filters.pyfield_helpers.pyfield_types.pyhandler.pymodels.pyregistries.pytasks.py
file_import
locale
management/commands
migrations
0181_tablewebhookcall_batch_id_and_more.py0182_tablewebhookevent_views_viewrows_viewsubscription.py0183_viewgroupby_type_viewsort_type.py
plugins.pypopulate.pyrows
table
trash
views
webhooks
integrations/local_baserow
core
|
@ -29,3 +29,15 @@ MEDIA_URL=http://localhost:4000/media/
|
|||
POSTGRES_DEV_EXTRA_ARGS="-c shared_buffers=512MB -c fsync=off -c full_page_writes=off -c synchronous_commit=off"
|
||||
|
||||
POSTGRES_IMAGE_VERSION=12
|
||||
|
||||
# Settings for local dev with MinIO to have local s3-like object storage
|
||||
# AWS_ACCESS_KEY_ID=<REDACTED>
|
||||
# AWS_SECRET_ACCESS_KEY=<REDACTED>
|
||||
# AWS_STORAGE_BUCKET_NAME=baserow
|
||||
# AWS_S3_ENDPOINT_URL=http://192.168.1.16:9000
|
||||
# AWS_S3_USE_SSL=off
|
||||
# AWS_S3_CUSTOM_DOMAIN=
|
||||
# AWS_DEFAULT_ACL=
|
||||
# AWS_S3_VERIFY=off
|
||||
# AWS_S3_SIGNATURE_VERSION = 's3v4'
|
||||
# AWS_S3_ADDRESSING_STYLE = 'path'
|
||||
|
|
|
@ -93,6 +93,7 @@ DATABASE_NAME=baserow
|
|||
# BASEROW_WEBHOOKS_MAX_PER_TABLE=
|
||||
# BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES=
|
||||
# BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS=
|
||||
# BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE:
|
||||
|
||||
# BASEROW_AIRTABLE_IMPORT_SOFT_TIME_LIMIT=
|
||||
# HOURS_UNTIL_TRASH_PERMANENTLY_DELETED=
|
||||
|
@ -137,6 +138,7 @@ DATABASE_NAME=baserow
|
|||
# BASEROW_ENTERPRISE_AUDIT_LOG_CLEANUP_INTERVAL_MINUTES=
|
||||
# BASEROW_ENTERPRISE_AUDIT_LOG_RETENTION_DAYS=
|
||||
# BASEROW_ALLOW_MULTIPLE_SSO_PROVIDERS_FOR_SAME_ACCOUNT=
|
||||
# BASEROW_ENTERPRISE_GROUPED_AGGREGATE_SERVICE_MAX_SERIES:
|
||||
|
||||
# BASEROW_PERIODIC_FIELD_UPDATE_CRONTAB=
|
||||
# BASEROW_PERIODIC_FIELD_UPDATE_UNUSED_WORKSPACE_INTERVAL_MIN=
|
||||
|
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -123,6 +123,9 @@ out/
|
|||
.cursor-config/
|
||||
!config/cursor/.cursor-config/
|
||||
|
||||
# Pyright config file
|
||||
pyrightconfig.json
|
||||
|
||||
# VIM's swap files
|
||||
*.swp
|
||||
|
||||
|
@ -146,4 +149,4 @@ premium/web-frontend/package.json
|
|||
|
||||
# Storybook
|
||||
web-frontend/.nuxt-storybook/
|
||||
web-frontend/storybook-static
|
||||
web-frontend/storybook-static
|
||||
|
|
|
@ -17,7 +17,7 @@ tool gives you the powers of a developer without leaving your browser.
|
|||
[](https://www.heroku.com/deploy/?template=https://github.com/bram2w/baserow/tree/master)
|
||||
|
||||
```bash
|
||||
docker run -v baserow_data:/baserow/data -p 80:80 -p 443:443 baserow/baserow:1.31.1
|
||||
docker run -v baserow_data:/baserow/data -p 80:80 -p 443:443 baserow/baserow:1.32.0
|
||||
```
|
||||
|
||||

|
||||
|
@ -89,7 +89,7 @@ Created by Baserow B.V. - bram@baserow.io.
|
|||
|
||||
Distributes under the MIT license. See `LICENSE` for more information.
|
||||
|
||||
Version: 1.31.1
|
||||
Version: 1.32.0
|
||||
|
||||
The official repository can be found at https://gitlab.com/baserow/baserow.
|
||||
|
||||
|
|
|
@ -54,8 +54,8 @@ BACKEND_TESTS_DIRS_FROM_ROOT=backend/tests/ premium/backend/tests/ enterprise/ba
|
|||
.PHONY: help venv venv-clean install-oss install install-extra package docker-build package-install\
|
||||
clean clean-all package-build package-clean deps deps-clean deps-install deps-install-dev deps-upgrade\
|
||||
lint lint-fix lint-python format sort make-translations compile-translations\
|
||||
test test-builder test-builder-parallel test-compat test-compat-parallel test-coverage test-parallel \
|
||||
test-regenerate-ci-durations ci-test-python ci-check-startup-python ci-coverage-report fix\
|
||||
test test-builder test-builder-parallel test-coverage test-parallel test-regenerate-ci-durations\
|
||||
ci-test-python ci-check-startup-python ci-coverage-report fix\
|
||||
run-dev
|
||||
|
||||
|
||||
|
@ -179,18 +179,12 @@ test-builder: .check-dev
|
|||
test-builder-parallel: .check-dev
|
||||
$(VPYTEST) tests/baserow/contrib/builder -n 10 || exit
|
||||
|
||||
test-compat: .check-dev
|
||||
$(VPYTEST) tests/baserow/compat --run-disabled-in-ci || exit;
|
||||
|
||||
test-regenerate-ci-durations: .check-dev
|
||||
$(VPYTEST) $(BACKEND_TESTS_DIRS) --store-durations || exit;
|
||||
|
||||
test-parallel: .check-dev
|
||||
$(VPYTEST) $(BACKEND_TESTS_DIRS) -n 10 || exit;
|
||||
|
||||
test-compat-parallel: .check-dev
|
||||
$(VPYTEST) tests/baserow/compat --run-disabled-in-ci -n 10 || exit;
|
||||
|
||||
.make-django-cmd: .check-dev
|
||||
for pkg_dir in $(SOURCE_DIRS); do echo $$pkg_dir ; cd $$pkg_dir ; \
|
||||
$(VDJANGO) $(DJANGO_COMMAND) || true ; cd - ;\
|
||||
|
|
|
@ -6,7 +6,7 @@ set -euo pipefail
|
|||
# ENVIRONMENT VARIABLES USED DIRECTLY BY THIS ENTRYPOINT
|
||||
# ======================================================
|
||||
|
||||
export BASEROW_VERSION="1.31.1"
|
||||
export BASEROW_VERSION="1.32.0"
|
||||
|
||||
# Used by docker-entrypoint.sh to start the dev server
|
||||
# If not configured you'll receive this: CommandError: "0.0.0.0:" is not a valid port number or address:port pair.
|
||||
|
|
|
@ -28,6 +28,7 @@ class TemplateSerializer(serializers.ModelSerializer):
|
|||
"keywords",
|
||||
"workspace_id",
|
||||
"is_default",
|
||||
"open_application",
|
||||
)
|
||||
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
|
|
|
@ -423,7 +423,7 @@ SPECTACULAR_SETTINGS = {
|
|||
"name": "MIT",
|
||||
"url": "https://gitlab.com/baserow/baserow/-/blob/master/LICENSE",
|
||||
},
|
||||
"VERSION": "1.31.1",
|
||||
"VERSION": "1.32.0",
|
||||
"SERVE_INCLUDE_SCHEMA": False,
|
||||
"TAGS": [
|
||||
{"name": "Settings"},
|
||||
|
@ -986,6 +986,10 @@ BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS = int(
|
|||
BASEROW_MAX_WEBHOOK_CALLS_IN_QUEUE_PER_WEBHOOK = (
|
||||
int(os.getenv("BASEROW_MAX_WEBHOOK_CALLS_IN_QUEUE_PER_WEBHOOK", "0")) or None
|
||||
)
|
||||
BASEROW_WEBHOOKS_BATCH_LIMIT = int(os.getenv("BASEROW_WEBHOOKS_BATCH_LIMIT", 5))
|
||||
BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE = int(
|
||||
os.getenv("BASEROW_WEBHOOK_ROWS_ENTER_VIEW_BATCH_SIZE", BATCH_ROWS_SIZE_LIMIT)
|
||||
)
|
||||
|
||||
# ======== WARNING ========
|
||||
# Please read and understand everything at:
|
||||
|
@ -1271,7 +1275,9 @@ BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE = int(
|
|||
|
||||
BASEROW_USE_LOCAL_CACHE = str_to_bool(os.getenv("BASEROW_USE_LOCAL_CACHE", "true"))
|
||||
|
||||
|
||||
# -- CACHALOT SETTINGS --
|
||||
|
||||
CACHALOT_TIMEOUT = int(os.getenv("BASEROW_CACHALOT_TIMEOUT", 60 * 60 * 24 * 7))
|
||||
BASEROW_CACHALOT_ONLY_CACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_ONLY_CACHABLE_TABLES", None
|
||||
|
|
|
@ -5,6 +5,8 @@ from unittest.mock import patch
|
|||
|
||||
from dotenv import dotenv_values
|
||||
|
||||
from baserow.config.settings.utils import str_to_bool
|
||||
|
||||
# Create a .env.testing file in the backend directory to store different test settings and
|
||||
# override the default ones. For different test settings, provide the TEST_ENV_FILE
|
||||
# environment variable with the name of the file to use. Everything that starts with
|
||||
|
@ -36,11 +38,22 @@ CELERY_TASK_EAGER_PROPAGATES = True
|
|||
|
||||
CHANNEL_LAYERS = {"default": {"BACKEND": "channels.layers.InMemoryChannelLayer"}}
|
||||
|
||||
|
||||
# Set to 'off' to runs all migrations and disable the custom setup fixture that installs
|
||||
# all pgPSQL functions. Default is 'on' for faster setup by skipping migrations.
|
||||
BASEROW_TESTS_SETUP_DB_FIXTURE = str_to_bool(
|
||||
os.getenv("BASEROW_TESTS_SETUP_DB_FIXTURE", "on")
|
||||
)
|
||||
DATABASES["default"]["TEST"] = {
|
||||
"MIGRATE": not BASEROW_TESTS_SETUP_DB_FIXTURE,
|
||||
}
|
||||
# Psycopg3 only?
|
||||
# Disable default optimizations for the tests because they make tests slower.
|
||||
DATABASES["default"]["OPTIONS"] = {
|
||||
"server_side_binding": False,
|
||||
"prepare_threshold": None,
|
||||
}
|
||||
|
||||
# Open a second database connection that can be used to test transactions.
|
||||
DATABASES["default-copy"] = deepcopy(DATABASES["default"])
|
||||
|
||||
|
@ -102,7 +115,6 @@ BASEROW_LOGIN_ACTION_LOG_LIMIT = RateLimit.from_string("1000/s")
|
|||
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS = False
|
||||
|
||||
|
||||
CACHALOT_ENABLED = str_to_bool(os.getenv("CACHALOT_ENABLED", "false"))
|
||||
if CACHALOT_ENABLED:
|
||||
CACHES[CACHALOT_CACHE] = {
|
||||
|
|
|
@ -270,9 +270,7 @@ class DataSourceView(APIView):
|
|||
if "page_id" in request.data:
|
||||
page = PageHandler().get_page(
|
||||
int(request.data["page_id"]),
|
||||
base_queryset=Page.objects_with_shared.filter(
|
||||
builder=data_source.page.builder
|
||||
),
|
||||
base_queryset=Page.objects.filter(builder=data_source.page.builder),
|
||||
)
|
||||
|
||||
# Do we have a service?
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
from typing import Any, Dict, List
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
|
@ -27,7 +30,11 @@ from baserow.contrib.builder.api.data_sources.errors import (
|
|||
from baserow.contrib.builder.api.data_sources.serializers import (
|
||||
DispatchDataSourceRequestSerializer,
|
||||
)
|
||||
from baserow.contrib.builder.api.domains.serializers import PublicBuilderSerializer
|
||||
from baserow.contrib.builder.api.domains.serializers import (
|
||||
PublicBuilderSerializer,
|
||||
PublicDataSourceSerializer,
|
||||
PublicElementSerializer,
|
||||
)
|
||||
from baserow.contrib.builder.api.pages.errors import ERROR_PAGE_DOES_NOT_EXIST
|
||||
from baserow.contrib.builder.api.workflow_actions.serializers import (
|
||||
BuilderWorkflowActionSerializer,
|
||||
|
@ -42,12 +49,17 @@ from baserow.contrib.builder.data_sources.exceptions import (
|
|||
)
|
||||
from baserow.contrib.builder.data_sources.handler import DataSourceHandler
|
||||
from baserow.contrib.builder.data_sources.service import DataSourceService
|
||||
from baserow.contrib.builder.domains.handler import DomainHandler
|
||||
from baserow.contrib.builder.domains.service import DomainService
|
||||
from baserow.contrib.builder.elements.registries import element_type_registry
|
||||
from baserow.contrib.builder.elements.service import ElementService
|
||||
from baserow.contrib.builder.errors import ERROR_BUILDER_DOES_NOT_EXIST
|
||||
from baserow.contrib.builder.exceptions import BuilderDoesNotExist
|
||||
from baserow.contrib.builder.handler import BuilderHandler
|
||||
from baserow.contrib.builder.handler import (
|
||||
BUILDER_PUBLIC_BUILDER_BY_DOMAIN_TTL_SECONDS,
|
||||
BUILDER_PUBLIC_RECORDS_CACHE_TTL_SECONDS,
|
||||
BuilderHandler,
|
||||
)
|
||||
from baserow.contrib.builder.pages.exceptions import PageDoesNotExist
|
||||
from baserow.contrib.builder.pages.handler import PageHandler
|
||||
from baserow.contrib.builder.service import BuilderService
|
||||
|
@ -57,6 +69,7 @@ from baserow.contrib.builder.workflow_actions.registries import (
|
|||
from baserow.contrib.builder.workflow_actions.service import (
|
||||
BuilderWorkflowActionService,
|
||||
)
|
||||
from baserow.core.cache import global_cache
|
||||
from baserow.core.exceptions import ApplicationDoesNotExist, PermissionException
|
||||
from baserow.core.services.exceptions import (
|
||||
DoesNotExist,
|
||||
|
@ -66,8 +79,6 @@ from baserow.core.services.exceptions import (
|
|||
)
|
||||
from baserow.core.services.registries import service_type_registry
|
||||
|
||||
from .serializers import PublicDataSourceSerializer, PublicElementSerializer
|
||||
|
||||
|
||||
class PublicBuilderByDomainNameView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
@ -93,18 +104,36 @@ class PublicBuilderByDomainNameView(APIView):
|
|||
},
|
||||
)
|
||||
@map_exceptions({BuilderDoesNotExist: ERROR_BUILDER_DOES_NOT_EXIST})
|
||||
def get(self, request, domain_name):
|
||||
def get(self, request: Request, domain_name: str):
|
||||
"""
|
||||
Responds with a serialized version of the builder related to the query.
|
||||
Try to match a published builder for the given domain name. Used to display
|
||||
the public site.
|
||||
"""
|
||||
|
||||
data = global_cache.get(
|
||||
DomainHandler.get_public_builder_by_domain_cache_key(domain_name),
|
||||
default=lambda: self._get_public_builder_by_domain(request, domain_name),
|
||||
timeout=BUILDER_PUBLIC_BUILDER_BY_DOMAIN_TTL_SECONDS,
|
||||
)
|
||||
return Response(data)
|
||||
|
||||
def _get_public_builder_by_domain(self, request: Request, domain_name: str):
|
||||
"""
|
||||
Returns a serialized builder which has a domain matching `domain_name`.
|
||||
|
||||
Only requested if the public get-by-domain cache is stale, or if the
|
||||
application has been re-published.
|
||||
|
||||
:param request: the HTTP request.
|
||||
:param domain_name: the domain name to match.
|
||||
:return: a publicly serialized builder.
|
||||
"""
|
||||
|
||||
builder = DomainService().get_public_builder_by_domain_name(
|
||||
request.user, domain_name
|
||||
)
|
||||
|
||||
return Response(PublicBuilderSerializer(builder).data)
|
||||
return PublicBuilderSerializer(builder).data
|
||||
|
||||
|
||||
class PublicBuilderByIdView(APIView):
|
||||
|
@ -180,20 +209,44 @@ class PublicElementsView(APIView):
|
|||
PageDoesNotExist: ERROR_PAGE_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def get(self, request, page_id):
|
||||
def get(self, request: Request, page_id: int):
|
||||
"""
|
||||
Responds with a list of serialized elements that belongs to the given page id.
|
||||
"""
|
||||
|
||||
if PageHandler().is_published_page(page_id):
|
||||
data = global_cache.get(
|
||||
PageHandler.get_page_public_records_cache_key(
|
||||
page_id, request.user_source_user, "elements"
|
||||
),
|
||||
default=lambda: self._get_public_page_elements(request, page_id),
|
||||
timeout=BUILDER_PUBLIC_RECORDS_CACHE_TTL_SECONDS,
|
||||
)
|
||||
else:
|
||||
data = self._get_public_page_elements(request, page_id)
|
||||
|
||||
return Response(data)
|
||||
|
||||
def _get_public_page_elements(
|
||||
self, request: Request, page_id: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Returns a list of serialized elements that belong to the given page id.
|
||||
|
||||
Only requested if the public elements cache is stale, or if the page is
|
||||
being previewed.
|
||||
|
||||
:param request: the HTTP request.
|
||||
:param page_id: the page id.
|
||||
:return: a list of serialized elements.
|
||||
"""
|
||||
|
||||
page = PageHandler().get_page(page_id)
|
||||
|
||||
elements = ElementService().get_elements(request.user, page)
|
||||
|
||||
data = [
|
||||
return [
|
||||
element_type_registry.get_serializer(element, PublicElementSerializer).data
|
||||
for element in elements
|
||||
]
|
||||
return Response(data)
|
||||
|
||||
|
||||
class PublicDataSourcesView(APIView):
|
||||
|
@ -227,26 +280,48 @@ class PublicDataSourcesView(APIView):
|
|||
PageDoesNotExist: ERROR_PAGE_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def get(self, request, page_id):
|
||||
def get(self, request: Request, page_id: int):
|
||||
"""
|
||||
Responds with a list of serialized data_sources that belong to the page if the
|
||||
user has access to it.
|
||||
"""
|
||||
|
||||
page = PageHandler().get_page(page_id)
|
||||
if PageHandler().is_published_page(page_id):
|
||||
data = global_cache.get(
|
||||
PageHandler.get_page_public_records_cache_key(
|
||||
page_id, request.user_source_user, "data_sources"
|
||||
),
|
||||
default=lambda: self._get_public_page_data_sources(request, page_id),
|
||||
timeout=BUILDER_PUBLIC_RECORDS_CACHE_TTL_SECONDS,
|
||||
)
|
||||
else:
|
||||
data = self._get_public_page_data_sources(request, page_id)
|
||||
|
||||
return Response(data)
|
||||
|
||||
def _get_public_page_data_sources(self, request: Request, page_id: int):
|
||||
"""
|
||||
Returns a list of serialized data sources that belong to the given page id.
|
||||
|
||||
Only requested if the public data sources cache is stale, or if the page is
|
||||
being previewed.
|
||||
|
||||
:param request: the HTTP request.
|
||||
:param page_id: the page id.
|
||||
:return: a list of serialized data sources.
|
||||
"""
|
||||
|
||||
page = PageHandler().get_page(page_id)
|
||||
data_sources = DataSourceService().get_data_sources(request.user, page)
|
||||
|
||||
handler = BuilderHandler()
|
||||
public_properties = handler.get_builder_public_properties(
|
||||
public_properties = BuilderHandler().get_builder_public_properties(
|
||||
request.user_source_user, page.builder
|
||||
)
|
||||
|
||||
allowed_fields = []
|
||||
for fields in public_properties["external"].values():
|
||||
allowed_fields.extend(fields)
|
||||
|
||||
data = [
|
||||
return [
|
||||
service_type_registry.get_serializer(
|
||||
data_source.service,
|
||||
PublicDataSourceSerializer,
|
||||
|
@ -256,8 +331,6 @@ class PublicDataSourcesView(APIView):
|
|||
if data_source.service and data_source.service.integration_id
|
||||
]
|
||||
|
||||
return Response(data)
|
||||
|
||||
|
||||
class PublicBuilderWorkflowActionsView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
@ -295,14 +368,45 @@ class PublicBuilderWorkflowActionsView(APIView):
|
|||
PageDoesNotExist: ERROR_PAGE_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def get(self, request, page_id: int):
|
||||
page = PageHandler().get_page(page_id)
|
||||
def get(self, request: Request, page_id: int):
|
||||
""" "
|
||||
Responds with a list of serialized workflow actions that belongs to the given
|
||||
page id.
|
||||
"""
|
||||
|
||||
if PageHandler().is_published_page(page_id):
|
||||
data = global_cache.get(
|
||||
PageHandler.get_page_public_records_cache_key(
|
||||
page_id, request.user_source_user, "workflow_actions"
|
||||
),
|
||||
default=lambda: self._get_public_page_workflow_actions(
|
||||
request, page_id
|
||||
),
|
||||
timeout=BUILDER_PUBLIC_RECORDS_CACHE_TTL_SECONDS,
|
||||
)
|
||||
else:
|
||||
data = self._get_public_page_workflow_actions(request, page_id)
|
||||
|
||||
return Response(data)
|
||||
|
||||
def _get_public_page_workflow_actions(self, request: Request, page_id: int):
|
||||
"""
|
||||
Returns a list of serialized workflow actions that belong to the given page id.
|
||||
|
||||
Only requested if the public workflow actions cache is stale, or if the page is
|
||||
being previewed.
|
||||
|
||||
:param request: the HTTP request.
|
||||
:param page_id: the page id.
|
||||
:return: a list of serialized workflow actions.
|
||||
"""
|
||||
|
||||
page = PageHandler().get_page(page_id)
|
||||
workflow_actions = BuilderWorkflowActionService().get_workflow_actions(
|
||||
request.user, page
|
||||
)
|
||||
|
||||
data = [
|
||||
return [
|
||||
builder_workflow_action_type_registry.get_serializer(
|
||||
workflow_action,
|
||||
BuilderWorkflowActionSerializer,
|
||||
|
@ -311,8 +415,6 @@ class PublicBuilderWorkflowActionsView(APIView):
|
|||
for workflow_action in workflow_actions
|
||||
]
|
||||
|
||||
return Response(data)
|
||||
|
||||
|
||||
class PublicDispatchDataSourceView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
|
|
@ -18,6 +18,9 @@ from baserow.contrib.builder.elements.models import (
|
|||
CollectionElementPropertyOptions,
|
||||
CollectionField,
|
||||
Element,
|
||||
LinkElement,
|
||||
MenuItemElement,
|
||||
NavigationElementMixin,
|
||||
)
|
||||
from baserow.contrib.builder.elements.registries import (
|
||||
collection_field_type_registry,
|
||||
|
@ -378,3 +381,103 @@ class CollectionElementPropertyOptionsSerializer(
|
|||
class Meta:
|
||||
model = CollectionElementPropertyOptions
|
||||
fields = ["schema_property", "filterable", "sortable", "searchable"]
|
||||
|
||||
|
||||
class MenuItemSerializer(serializers.ModelSerializer):
|
||||
"""Serializes the MenuItemElement."""
|
||||
|
||||
children = serializers.ListSerializer(
|
||||
child=serializers.DictField(),
|
||||
required=False,
|
||||
help_text="A MenuItemElement that is a child of this instance.",
|
||||
)
|
||||
|
||||
navigation_type = serializers.ChoiceField(
|
||||
choices=NavigationElementMixin.NAVIGATION_TYPES.choices,
|
||||
help_text=LinkElement._meta.get_field("navigation_type").help_text,
|
||||
required=False,
|
||||
)
|
||||
navigate_to_page_id = serializers.IntegerField(
|
||||
allow_null=True,
|
||||
default=None,
|
||||
help_text=LinkElement._meta.get_field("navigate_to_page").help_text,
|
||||
required=False,
|
||||
)
|
||||
navigate_to_url = FormulaSerializerField(
|
||||
help_text=LinkElement._meta.get_field("navigate_to_url").help_text,
|
||||
default="",
|
||||
allow_blank=True,
|
||||
required=False,
|
||||
)
|
||||
page_parameters = PageParameterValueSerializer(
|
||||
many=True,
|
||||
default=[],
|
||||
help_text=LinkElement._meta.get_field("page_parameters").help_text,
|
||||
required=False,
|
||||
)
|
||||
query_parameters = PageParameterValueSerializer(
|
||||
many=True,
|
||||
default=[],
|
||||
help_text=LinkElement._meta.get_field("query_parameters").help_text,
|
||||
required=False,
|
||||
)
|
||||
target = serializers.ChoiceField(
|
||||
choices=NavigationElementMixin.TARGETS.choices,
|
||||
help_text=LinkElement._meta.get_field("target").help_text,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = MenuItemElement
|
||||
fields = [
|
||||
"id",
|
||||
"variant",
|
||||
"type",
|
||||
"menu_item_order",
|
||||
"uid",
|
||||
"name",
|
||||
"navigation_type",
|
||||
"navigate_to_page_id",
|
||||
"navigate_to_url",
|
||||
"page_parameters",
|
||||
"query_parameters",
|
||||
"parent_menu_item",
|
||||
"target",
|
||||
"children",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
"""Recursively serializes child MenuItemElements."""
|
||||
|
||||
data = super().to_representation(instance)
|
||||
all_items = self.context.get("all_items", [])
|
||||
|
||||
# Get children from all_items to save queries
|
||||
children = [i for i in all_items if instance.id == i.parent_menu_item_id]
|
||||
|
||||
data["children"] = MenuItemSerializer(
|
||||
children, many=True, context=self.context
|
||||
).data
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class NestedMenuItemsMixin(serializers.Serializer):
|
||||
menu_items = serializers.SerializerMethodField(
|
||||
help_text="Menu items of the MenuElement."
|
||||
)
|
||||
|
||||
@extend_schema_field(MenuItemSerializer)
|
||||
def get_menu_items(self, obj):
|
||||
"""Return the serialized version of the MenuItemElement."""
|
||||
|
||||
# Prefetches the child MenuItemElements for performance.
|
||||
menu_items = obj.menu_items.all()
|
||||
|
||||
root_items = [
|
||||
child for child in menu_items if child.parent_menu_item_id is None
|
||||
]
|
||||
|
||||
return MenuItemSerializer(
|
||||
root_items, many=True, context={"all_items": menu_items}
|
||||
).data
|
||||
|
|
|
@ -37,16 +37,35 @@ class DynamicConfigBlockSerializer(serializers.Serializer):
|
|||
if not isinstance(theme_config_block_type_name, list):
|
||||
theme_config_block_type_name = [theme_config_block_type_name]
|
||||
|
||||
for prop, type_name in zip(property_name, theme_config_block_type_name):
|
||||
theme_config_block_type = theme_config_block_registry.get(type_name)
|
||||
self.fields[prop] = theme_config_block_type.get_serializer_class(
|
||||
request_serializer=request_serializer
|
||||
)(**({"help_text": f"Styles overrides for {prop}"} | serializer_kwargs))
|
||||
for prop, type_names in zip(property_name, theme_config_block_type_name):
|
||||
if not isinstance(type_names, list):
|
||||
type_names = [type_names]
|
||||
|
||||
config_blocks = (
|
||||
theme_config_block_registry.get(type_name) for type_name in type_names
|
||||
)
|
||||
serializer_class = combine_theme_config_blocks_serializer_class(
|
||||
config_blocks,
|
||||
request_serializer=request_serializer,
|
||||
name="SubConfigBlockSerializer",
|
||||
)
|
||||
|
||||
self.fields[prop] = serializer_class(**serializer_kwargs)
|
||||
|
||||
all_type_names = "".join(
|
||||
[
|
||||
"And".join(sub.capitalize() for sub in p)
|
||||
if isinstance(p, list)
|
||||
else p.capitalize()
|
||||
for p in theme_config_block_type_name
|
||||
]
|
||||
)
|
||||
|
||||
# Dynamically create the Meta class with ref name to prevent collision
|
||||
class DynamicMeta:
|
||||
type_names = "".join([p.capitalize() for p in theme_config_block_type_name])
|
||||
ref_name = f"{type_names}ConfigBlockSerializer"
|
||||
type_names = all_type_names
|
||||
ref_name = f"{all_type_names}ConfigBlockSerializer"
|
||||
meta_ref_name = f"{all_type_names}ConfigBlockSerializer"
|
||||
|
||||
self.Meta = DynamicMeta
|
||||
|
||||
|
@ -72,6 +91,41 @@ def serialize_builder_theme(builder: Builder) -> dict:
|
|||
return theme
|
||||
|
||||
|
||||
def combine_theme_config_blocks_serializer_class(
|
||||
theme_config_blocks,
|
||||
request_serializer=False,
|
||||
name="CombinedThemeConfigBlocksSerializer",
|
||||
) -> serializers.Serializer:
|
||||
"""
|
||||
This helper function generates one single serializer that contains all the fields
|
||||
of all the theme config blocks. The API always communicates all theme properties
|
||||
flat in one single object.
|
||||
|
||||
:return: The generated serializer.
|
||||
"""
|
||||
|
||||
attrs = {}
|
||||
|
||||
for theme_config_block in theme_config_blocks:
|
||||
serializer = theme_config_block.get_serializer_class(
|
||||
request_serializer=request_serializer
|
||||
)
|
||||
serializer_fields = serializer().get_fields()
|
||||
|
||||
for name, field in serializer_fields.items():
|
||||
attrs[name] = field
|
||||
|
||||
class Meta:
|
||||
ref_name = "".join(t.type.capitalize() for t in theme_config_blocks) + name
|
||||
meta_ref_name = "".join(t.type.capitalize() for t in theme_config_blocks) + name
|
||||
|
||||
attrs["Meta"] = Meta
|
||||
|
||||
class_object = type(name, (serializers.Serializer,), attrs)
|
||||
|
||||
return class_object
|
||||
|
||||
|
||||
@cache
|
||||
def get_combined_theme_config_blocks_serializer_class(
|
||||
request_serializer=False,
|
||||
|
@ -90,28 +144,10 @@ def get_combined_theme_config_blocks_serializer_class(
|
|||
"imported before the theme config blocks have been registered."
|
||||
)
|
||||
|
||||
attrs = {}
|
||||
|
||||
for theme_config_block in theme_config_block_registry.get_all():
|
||||
serializer = theme_config_block.get_serializer_class(
|
||||
request_serializer=request_serializer
|
||||
)
|
||||
serializer_fields = serializer().get_fields()
|
||||
|
||||
for name, field in serializer_fields.items():
|
||||
attrs[name] = field
|
||||
|
||||
class Meta:
|
||||
meta_ref_name = "combined_theme_config_blocks_serializer"
|
||||
|
||||
attrs["Meta"] = Meta
|
||||
|
||||
class_object = type(
|
||||
"CombinedThemeConfigBlocksSerializer", (serializers.Serializer,), attrs
|
||||
return combine_theme_config_blocks_serializer_class(
|
||||
theme_config_block_registry.get_all(), request_serializer=request_serializer
|
||||
)
|
||||
|
||||
return class_object
|
||||
|
||||
|
||||
CombinedThemeConfigBlocksSerializer = (
|
||||
get_combined_theme_config_blocks_serializer_class()
|
||||
|
|
|
@ -7,7 +7,6 @@ from django.conf import settings
|
|||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.files.storage import Storage
|
||||
from django.db import transaction
|
||||
from django.db.models import Prefetch
|
||||
from django.db.transaction import Atomic
|
||||
from django.urls import include, path
|
||||
|
||||
|
@ -175,7 +174,7 @@ class BuilderApplicationType(ApplicationType):
|
|||
|
||||
pages = PageHandler().get_pages(
|
||||
builder,
|
||||
base_queryset=Page.objects_with_shared.prefetch_related(
|
||||
base_queryset=Page.objects.prefetch_related(
|
||||
"element_set", "datasource_set"
|
||||
),
|
||||
)
|
||||
|
@ -497,9 +496,7 @@ class BuilderApplicationType(ApplicationType):
|
|||
|
||||
def enhance_queryset(self, queryset):
|
||||
queryset = queryset.select_related("favicon_file").prefetch_related(
|
||||
"user_sources",
|
||||
"integrations",
|
||||
Prefetch("page_set", queryset=Page.objects_with_shared.all()),
|
||||
"user_sources", "integrations", "page_set"
|
||||
)
|
||||
queryset = theme_config_block_registry.enhance_list_builder_queryset(queryset)
|
||||
return queryset
|
||||
|
|
|
@ -183,8 +183,10 @@ class BuilderConfig(AppConfig):
|
|||
ImageElementType,
|
||||
InputTextElementType,
|
||||
LinkElementType,
|
||||
MenuElementType,
|
||||
RecordSelectorElementType,
|
||||
RepeatElementType,
|
||||
SimpleContainerElementType,
|
||||
TableElementType,
|
||||
TextElementType,
|
||||
)
|
||||
|
@ -207,6 +209,8 @@ class BuilderConfig(AppConfig):
|
|||
element_type_registry.register(DateTimePickerElementType())
|
||||
element_type_registry.register(HeaderElementType())
|
||||
element_type_registry.register(FooterElementType())
|
||||
element_type_registry.register(MenuElementType())
|
||||
element_type_registry.register(SimpleContainerElementType())
|
||||
|
||||
from .domains.domain_types import CustomDomainType, SubDomainType
|
||||
from .domains.registries import domain_type_registry
|
||||
|
@ -261,11 +265,11 @@ class BuilderConfig(AppConfig):
|
|||
theme_config_block_registry.register(ColorThemeConfigBlockType())
|
||||
theme_config_block_registry.register(TypographyThemeConfigBlockType())
|
||||
theme_config_block_registry.register(ButtonThemeConfigBlockType())
|
||||
theme_config_block_registry.register(LinkThemeConfigBlockType())
|
||||
theme_config_block_registry.register(ImageThemeConfigBlockType())
|
||||
theme_config_block_registry.register(PageThemeConfigBlockType())
|
||||
theme_config_block_registry.register(InputThemeConfigBlockType())
|
||||
theme_config_block_registry.register(TableThemeConfigBlockType())
|
||||
theme_config_block_registry.register(LinkThemeConfigBlockType())
|
||||
|
||||
from .workflow_actions.registries import builder_workflow_action_type_registry
|
||||
from .workflow_actions.workflow_action_types import (
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Iterable, List, cast
|
||||
from typing import Iterable, List, Optional, cast
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from django.db.utils import IntegrityError
|
||||
|
@ -13,8 +13,10 @@ from baserow.contrib.builder.domains.models import Domain
|
|||
from baserow.contrib.builder.domains.registries import DomainType
|
||||
from baserow.contrib.builder.exceptions import BuilderDoesNotExist
|
||||
from baserow.contrib.builder.models import Builder
|
||||
from baserow.core.cache import global_cache
|
||||
from baserow.core.db import specific_iterator
|
||||
from baserow.core.exceptions import IdDoesNotExist
|
||||
from baserow.core.models import Workspace
|
||||
from baserow.core.registries import ImportExportConfig, application_type_registry
|
||||
from baserow.core.storage import get_default_storage
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
@ -193,6 +195,28 @@ class DomainHandler:
|
|||
|
||||
return full_order
|
||||
|
||||
def get_published_domain_applications(
|
||||
self, workspace: Optional[Workspace] = None
|
||||
) -> QuerySet[Builder]:
|
||||
"""
|
||||
Returns all published domain applications in a workspace or all published
|
||||
domain applications in the instance if no workspace is provided.
|
||||
|
||||
A domain application is the builder application which is associated with
|
||||
the domain it was published to. It is not the application which the page
|
||||
designer created their application with.
|
||||
|
||||
:param workspace: Only return published domain applications in this workspace.
|
||||
:return: A queryset of published domain applications.
|
||||
"""
|
||||
|
||||
applications = Builder.objects.exclude(published_from=None)
|
||||
return (
|
||||
applications.filter(published_from__builder__workspace=workspace)
|
||||
if workspace
|
||||
else applications
|
||||
)
|
||||
|
||||
def publish(self, domain: Domain, progress: Progress | None = None):
|
||||
"""
|
||||
Publishes a builder for the given domain object. If the builder was
|
||||
|
@ -252,4 +276,15 @@ class DomainHandler:
|
|||
domain.last_published = datetime.now(tz=timezone.utc)
|
||||
domain.save()
|
||||
|
||||
# Invalidate the public builder-by-domain cache after a new publication.
|
||||
DomainHandler.invalidate_public_builder_by_domain_cache(domain.domain_name)
|
||||
|
||||
return domain
|
||||
|
||||
@classmethod
|
||||
def get_public_builder_by_domain_cache_key(cls, domain_name: str) -> str:
|
||||
return f"ab_public_builder_by_domain_{domain_name}"
|
||||
|
||||
@classmethod
|
||||
def invalidate_public_builder_by_domain_cache(cls, domain_name: str):
|
||||
global_cache.invalidate(cls.get_public_builder_by_domain_cache_key(domain_name))
|
||||
|
|
|
@ -15,13 +15,17 @@ from typing import (
|
|||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.db.models import IntegerField, QuerySet
|
||||
from django.db.models import IntegerField, Q, QuerySet
|
||||
from django.db.models.functions import Cast
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import ValidationError as DRFValidationError
|
||||
|
||||
from baserow.contrib.builder.api.elements.serializers import ChoiceOptionSerializer
|
||||
from baserow.contrib.builder.api.elements.serializers import (
|
||||
ChoiceOptionSerializer,
|
||||
MenuItemSerializer,
|
||||
NestedMenuItemsMixin,
|
||||
)
|
||||
from baserow.contrib.builder.data_providers.exceptions import (
|
||||
FormDataProviderChunkInvalidException,
|
||||
)
|
||||
|
@ -47,13 +51,17 @@ from baserow.contrib.builder.elements.models import (
|
|||
FormContainerElement,
|
||||
HeaderElement,
|
||||
HeadingElement,
|
||||
HorizontalAlignments,
|
||||
IFrameElement,
|
||||
ImageElement,
|
||||
InputTextElement,
|
||||
LinkElement,
|
||||
MenuElement,
|
||||
MenuItemElement,
|
||||
NavigationElementMixin,
|
||||
RecordSelectorElement,
|
||||
RepeatElement,
|
||||
SimpleContainerElement,
|
||||
TableElement,
|
||||
TextElement,
|
||||
VerticalAlignments,
|
||||
|
@ -70,6 +78,7 @@ from baserow.contrib.builder.theme.theme_config_block_types import (
|
|||
TableThemeConfigBlockType,
|
||||
)
|
||||
from baserow.contrib.builder.types import ElementDict
|
||||
from baserow.contrib.builder.workflow_actions.models import BuilderWorkflowAction
|
||||
from baserow.core.constants import (
|
||||
DATE_FORMAT,
|
||||
DATE_FORMAT_CHOICES,
|
||||
|
@ -270,6 +279,17 @@ class FormContainerElementType(ContainerElementTypeMixin, ElementType):
|
|||
]
|
||||
|
||||
|
||||
class SimpleContainerElementType(ContainerElementTypeMixin, ElementType):
|
||||
type = "simple_container"
|
||||
model_class = SimpleContainerElement
|
||||
|
||||
class SerializedDict(ContainerElementTypeMixin.SerializedDict):
|
||||
pass
|
||||
|
||||
def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]:
|
||||
return {}
|
||||
|
||||
|
||||
class TableElementType(CollectionElementWithFieldsTypeMixin, ElementType):
|
||||
type = "table"
|
||||
model_class = TableElement
|
||||
|
@ -303,10 +323,11 @@ class TableElementType(CollectionElementWithFieldsTypeMixin, ElementType):
|
|||
),
|
||||
"styles": DynamicConfigBlockSerializer(
|
||||
required=False,
|
||||
property_name=["button", "table"],
|
||||
property_name=["button", "table", "header_button"],
|
||||
theme_config_block_type_name=[
|
||||
ButtonThemeConfigBlockType.type,
|
||||
TableThemeConfigBlockType.type,
|
||||
ButtonThemeConfigBlockType.type,
|
||||
],
|
||||
serializer_kwargs={"required": False},
|
||||
),
|
||||
|
@ -366,8 +387,11 @@ class RepeatElementType(
|
|||
**super().serializer_field_overrides,
|
||||
"styles": DynamicConfigBlockSerializer(
|
||||
required=False,
|
||||
property_name="button",
|
||||
theme_config_block_type_name=ButtonThemeConfigBlockType.type,
|
||||
property_name=["button", "header_button"],
|
||||
theme_config_block_type_name=[
|
||||
ButtonThemeConfigBlockType.type,
|
||||
ButtonThemeConfigBlockType.type,
|
||||
],
|
||||
serializer_kwargs={"required": False},
|
||||
),
|
||||
}
|
||||
|
@ -1961,3 +1985,327 @@ class FooterElementType(MultiPageContainerElementType):
|
|||
|
||||
type = "footer"
|
||||
model_class = FooterElement
|
||||
|
||||
|
||||
class MenuElementType(ElementType):
|
||||
"""
|
||||
A Menu element that provides navigation capabilities to the application.
|
||||
"""
|
||||
|
||||
type = "menu"
|
||||
model_class = MenuElement
|
||||
serializer_field_names = ["orientation", "alignment", "menu_items"]
|
||||
allowed_fields = ["orientation", "alignment"]
|
||||
|
||||
serializer_mixins = [NestedMenuItemsMixin]
|
||||
request_serializer_mixins = []
|
||||
|
||||
class SerializedDict(ElementDict):
|
||||
orientation: str
|
||||
alignment: str
|
||||
menu_items: List[Dict]
|
||||
|
||||
@property
|
||||
def serializer_field_overrides(self) -> Dict[str, Any]:
|
||||
from baserow.contrib.builder.api.theme.serializers import (
|
||||
DynamicConfigBlockSerializer,
|
||||
)
|
||||
from baserow.contrib.builder.theme.theme_config_block_types import (
|
||||
ButtonThemeConfigBlockType,
|
||||
LinkThemeConfigBlockType,
|
||||
)
|
||||
|
||||
overrides = {
|
||||
**super().serializer_field_overrides,
|
||||
"styles": DynamicConfigBlockSerializer(
|
||||
required=False,
|
||||
property_name="menu",
|
||||
theme_config_block_type_name=[
|
||||
[ButtonThemeConfigBlockType.type, LinkThemeConfigBlockType.type]
|
||||
],
|
||||
serializer_kwargs={"required": False},
|
||||
),
|
||||
}
|
||||
return overrides
|
||||
|
||||
@property
|
||||
def request_serializer_field_overrides(self) -> Dict[str, Any]:
|
||||
return {
|
||||
**self.serializer_field_overrides,
|
||||
"menu_items": MenuItemSerializer(many=True, required=False),
|
||||
}
|
||||
|
||||
def enhance_queryset(
|
||||
self, queryset: QuerySet[MenuItemElement]
|
||||
) -> QuerySet[MenuItemElement]:
|
||||
return queryset.prefetch_related("menu_items")
|
||||
|
||||
def before_delete(self, instance: MenuElement) -> None:
|
||||
"""
|
||||
Handle any clean-up needed before the MenuElement is deleted.
|
||||
|
||||
Deletes all related objects of this MenuElement instance such as Menu
|
||||
Items and Workflow actions.
|
||||
"""
|
||||
|
||||
self.delete_workflow_actions(instance)
|
||||
instance.menu_items.all().delete()
|
||||
|
||||
def after_create(self, instance: MenuItemElement, values: Dict[str, Any]) -> None:
|
||||
"""
|
||||
After a MenuElement is created, MenuItemElements are bulk-created
|
||||
using the information in the "menu_items" array.
|
||||
"""
|
||||
|
||||
menu_items = values.get("menu_items", [])
|
||||
|
||||
created_menu_items = MenuItemElement.objects.bulk_create(
|
||||
[
|
||||
MenuItemElement(**item, menu_item_order=index)
|
||||
for index, item in enumerate(menu_items)
|
||||
]
|
||||
)
|
||||
instance.menu_items.add(*created_menu_items)
|
||||
|
||||
def delete_workflow_actions(
|
||||
self, instance: MenuElement, menu_item_uids_to_keep: Optional[List[str]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Deletes all Workflow actions related to a specific MenuElement instance.
|
||||
|
||||
:param instance: The MenuElement instance for which related Workflow
|
||||
actions will be deleted.
|
||||
:param menu_item_uids_to_keep: An optional list of UUIDs. If a related
|
||||
Workflow action matches a UUID in this list, it will *not* be deleted.
|
||||
:return: None
|
||||
"""
|
||||
|
||||
# Get all workflow actions associated with this menu element.
|
||||
all_workflow_actions = BuilderWorkflowAction.objects.filter(element=instance)
|
||||
|
||||
# If there are menu items, only keep workflow actions that match
|
||||
# existing menu items.
|
||||
if menu_item_uids_to_keep:
|
||||
workflow_actions_to_keep_query = Q()
|
||||
for uid in menu_item_uids_to_keep:
|
||||
workflow_actions_to_keep_query |= Q(event__startswith=uid)
|
||||
|
||||
# Find Workflow actions to delete (those not matching any
|
||||
# current Menu Item).
|
||||
workflow_actions_to_delete = all_workflow_actions.exclude(
|
||||
workflow_actions_to_keep_query
|
||||
)
|
||||
else:
|
||||
# Since there are no Menu Items, delete all Workflow actions
|
||||
# for this element.
|
||||
workflow_actions_to_delete = all_workflow_actions
|
||||
|
||||
# Delete the workflow actions that are no longer associated with
|
||||
# any menu item.
|
||||
if workflow_actions_to_delete.exists():
|
||||
workflow_actions_to_delete.delete()
|
||||
|
||||
def after_update(self, instance: MenuElement, values, changes: Dict[str, Tuple]):
|
||||
"""
|
||||
After the element has been updated we need to update the fields.
|
||||
|
||||
:param instance: The instance of the element that has been updated.
|
||||
:param values: The values that have been updated.
|
||||
:param changes: A dictionary containing all changes which were made to the
|
||||
collection element prior to `after_update` being called.
|
||||
:return: None
|
||||
"""
|
||||
|
||||
if "menu_items" in values:
|
||||
instance.menu_items.all().delete()
|
||||
|
||||
menu_item_uids_to_keep = [item["uid"] for item in values["menu_items"]]
|
||||
self.delete_workflow_actions(instance, menu_item_uids_to_keep)
|
||||
|
||||
items_to_create = []
|
||||
child_uids_parent_uids = {}
|
||||
|
||||
keys_to_remove = ["parent_menu_item", "menu_item_order"]
|
||||
for index, item in enumerate(values["menu_items"]):
|
||||
for key in keys_to_remove:
|
||||
item.pop(key, None)
|
||||
|
||||
# Keep track of child-parent relationship via the uid
|
||||
for child_index, child in enumerate(item.pop("children", [])):
|
||||
for key in keys_to_remove + ["children"]:
|
||||
child.pop(key, None)
|
||||
|
||||
items_to_create.append(
|
||||
MenuItemElement(**child, menu_item_order=child_index)
|
||||
)
|
||||
child_uids_parent_uids[str(child["uid"])] = str(item["uid"])
|
||||
|
||||
items_to_create.append(MenuItemElement(**item, menu_item_order=index))
|
||||
|
||||
created_items = MenuItemElement.objects.bulk_create(items_to_create)
|
||||
instance.menu_items.add(*created_items)
|
||||
|
||||
# Re-associate the child-parent
|
||||
for item in instance.menu_items.all():
|
||||
if parent_uid := child_uids_parent_uids.get(str(item.uid)):
|
||||
parent_item = instance.menu_items.filter(uid=parent_uid).first()
|
||||
item.parent_menu_item = parent_item
|
||||
item.save()
|
||||
|
||||
super().after_update(instance, values, changes)
|
||||
|
||||
def get_pytest_params(self, pytest_data_fixture):
|
||||
return {
|
||||
"orientation": RepeatElement.ORIENTATIONS.VERTICAL,
|
||||
"alignment": HorizontalAlignments.LEFT,
|
||||
}
|
||||
|
||||
def deserialize_property(
|
||||
self,
|
||||
prop_name: str,
|
||||
value: Any,
|
||||
id_mapping: Dict[str, Any],
|
||||
files_zip=None,
|
||||
storage=None,
|
||||
cache=None,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if prop_name == "menu_items":
|
||||
updated_menu_items = []
|
||||
for item in value:
|
||||
updated = {}
|
||||
for item_key, item_value in item.items():
|
||||
new_value = super().deserialize_property(
|
||||
item_key,
|
||||
NavigationElementManager().deserialize_property(
|
||||
item_key, item_value, id_mapping, **kwargs
|
||||
),
|
||||
id_mapping,
|
||||
files_zip=files_zip,
|
||||
storage=storage,
|
||||
cache=cache,
|
||||
**kwargs,
|
||||
)
|
||||
updated[item_key] = new_value
|
||||
updated_menu_items.append(updated)
|
||||
return updated_menu_items
|
||||
|
||||
return super().deserialize_property(
|
||||
prop_name,
|
||||
value,
|
||||
id_mapping,
|
||||
files_zip=files_zip,
|
||||
storage=storage,
|
||||
cache=cache,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def serialize_property(
|
||||
self,
|
||||
element: MenuElement,
|
||||
prop_name: str,
|
||||
files_zip=None,
|
||||
storage=None,
|
||||
cache=None,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if prop_name == "menu_items":
|
||||
return MenuItemSerializer(
|
||||
element.menu_items.all(),
|
||||
many=True,
|
||||
).data
|
||||
|
||||
return super().serialize_property(
|
||||
element,
|
||||
prop_name,
|
||||
files_zip=files_zip,
|
||||
storage=storage,
|
||||
cache=cache,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def create_instance_from_serialized(
|
||||
self,
|
||||
serialized_values: Dict[str, Any],
|
||||
id_mapping,
|
||||
files_zip=None,
|
||||
storage=None,
|
||||
cache=None,
|
||||
**kwargs,
|
||||
) -> MenuElement:
|
||||
menu_items = serialized_values.pop("menu_items", [])
|
||||
|
||||
instance = super().create_instance_from_serialized(
|
||||
serialized_values,
|
||||
id_mapping,
|
||||
files_zip=files_zip,
|
||||
storage=storage,
|
||||
cache=cache,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
menu_items_to_create = []
|
||||
child_uids_parent_uids = {}
|
||||
|
||||
ids_uids = {i["id"]: i["uid"] for i in menu_items}
|
||||
keys_to_remove = ["id", "menu_item_order", "children"]
|
||||
for index, item in enumerate(menu_items):
|
||||
for key in keys_to_remove:
|
||||
item.pop(key, None)
|
||||
|
||||
# Keep track of child-parent relationship via the uid
|
||||
if parent_id := item.pop("parent_menu_item", None):
|
||||
child_uids_parent_uids[item["uid"]] = ids_uids[parent_id]
|
||||
|
||||
menu_items_to_create.append(MenuItemElement(**item, menu_item_order=index))
|
||||
|
||||
created_menu_items = MenuItemElement.objects.bulk_create(menu_items_to_create)
|
||||
instance.menu_items.add(*created_menu_items)
|
||||
|
||||
# Re-associate the child-parent
|
||||
for item in instance.menu_items.all():
|
||||
if parent_uid := child_uids_parent_uids.get(str(item.uid)):
|
||||
parent_item = instance.menu_items.filter(uid=parent_uid).first()
|
||||
item.parent_menu_item = parent_item
|
||||
item.save()
|
||||
|
||||
return instance
|
||||
|
||||
def formula_generator(
|
||||
self, element: Element
|
||||
) -> Generator[str | Instance, str, None]:
|
||||
"""
|
||||
Generator that returns formula fields for the MenuElementType.
|
||||
|
||||
The MenuElement has a menu_items field, which is a many-to-many
|
||||
relationship with MenuItemElement. The MenuItemElement has navigation
|
||||
related fields like page_parameters, yet does not have a type of its
|
||||
own.
|
||||
|
||||
This method ensures that any formulas found inside MenuItemElements
|
||||
are extracted correctly. It ensures that when a formula is declared
|
||||
in page_parameters, etc, the resolved formula value is available
|
||||
in the frontend.
|
||||
"""
|
||||
|
||||
yield from super().formula_generator(element)
|
||||
|
||||
for item in element.menu_items.all():
|
||||
for index, data in enumerate(item.page_parameters or []):
|
||||
new_formula = yield data["value"]
|
||||
if new_formula is not None:
|
||||
item.page_parameters[index]["value"] = new_formula
|
||||
yield item
|
||||
|
||||
for index, data in enumerate(item.query_parameters or []):
|
||||
new_formula = yield data["value"]
|
||||
if new_formula is not None:
|
||||
item.query_parameters[index]["value"] = new_formula
|
||||
yield item
|
||||
|
||||
for formula_field in NavigationElementManager.simple_formula_fields:
|
||||
formula = getattr(item, formula_field, "")
|
||||
new_formula = yield formula
|
||||
if new_formula is not None:
|
||||
setattr(item, formula_field, new_formula)
|
||||
yield item
|
||||
|
|
|
@ -237,7 +237,14 @@ class ElementHandler:
|
|||
"""
|
||||
|
||||
if specific:
|
||||
elements = specific_iterator(base_queryset)
|
||||
elements = specific_iterator(
|
||||
base_queryset,
|
||||
per_content_type_queryset_hook=(
|
||||
lambda element, queryset: element_type_registry.get_by_model(
|
||||
element
|
||||
).enhance_queryset(queryset)
|
||||
),
|
||||
)
|
||||
else:
|
||||
elements = base_queryset
|
||||
|
||||
|
|
|
@ -834,7 +834,7 @@ class MultiPageElementTypeMixin:
|
|||
if "pages" in values:
|
||||
pages = PageHandler().get_pages(
|
||||
instance.page.builder,
|
||||
base_queryset=Page.objects.filter(
|
||||
base_queryset=Page.objects_without_shared.filter(
|
||||
id__in=[p.id for p in values["pages"]]
|
||||
),
|
||||
)
|
||||
|
@ -852,7 +852,7 @@ class MultiPageElementTypeMixin:
|
|||
if "pages" in values:
|
||||
pages = PageHandler().get_pages(
|
||||
instance.page.builder,
|
||||
base_queryset=Page.objects.filter(
|
||||
base_queryset=Page.objects_without_shared.filter(
|
||||
id__in=[p.id for p in values["pages"]]
|
||||
),
|
||||
)
|
||||
|
|
|
@ -9,6 +9,7 @@ from django.db.models import SET_NULL, QuerySet
|
|||
from baserow.contrib.builder.constants import (
|
||||
BACKGROUND_IMAGE_MODES,
|
||||
COLOR_FIELD_MAX_LENGTH,
|
||||
HorizontalAlignments,
|
||||
VerticalAlignments,
|
||||
)
|
||||
from baserow.core.constants import DATE_FORMAT_CHOICES, DATE_TIME_FORMAT_CHOICES
|
||||
|
@ -990,3 +991,84 @@ class FooterElement(MultiPageElement, ContainerElement):
|
|||
"""
|
||||
A multi-page container element positioned at the bottom of the page.
|
||||
"""
|
||||
|
||||
|
||||
class MenuItemElement(NavigationElementMixin):
|
||||
"""
|
||||
An item in a MenuElement.
|
||||
"""
|
||||
|
||||
class VARIANTS(models.TextChoices):
|
||||
LINK = "link"
|
||||
BUTTON = "button"
|
||||
|
||||
variant = models.CharField(
|
||||
choices=VARIANTS.choices,
|
||||
help_text="The variant of the link.",
|
||||
max_length=10,
|
||||
default=VARIANTS.LINK,
|
||||
)
|
||||
|
||||
class TYPES(models.TextChoices):
|
||||
BUTTON = "button"
|
||||
LINK = "link"
|
||||
SEPARATOR = "separator"
|
||||
SPACER = "spacer"
|
||||
|
||||
type = models.CharField(
|
||||
choices=TYPES.choices,
|
||||
help_text="The type of the Menu Item.",
|
||||
max_length=9,
|
||||
default=TYPES.LINK,
|
||||
)
|
||||
|
||||
name = models.CharField(
|
||||
max_length=225,
|
||||
help_text="The name of the Menu Item.",
|
||||
)
|
||||
|
||||
menu_item_order = models.PositiveIntegerField()
|
||||
uid = models.UUIDField(default=uuid.uuid4)
|
||||
|
||||
parent_menu_item = models.ForeignKey(
|
||||
"self",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
default=None,
|
||||
help_text="The parent MenuItemElement element, if it is a nested item.",
|
||||
related_name="menu_item_children",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("menu_item_order",)
|
||||
|
||||
|
||||
class MenuElement(Element):
|
||||
"""
|
||||
A menu element that helps with navigating the application.
|
||||
"""
|
||||
|
||||
class ORIENTATIONS(models.TextChoices):
|
||||
HORIZONTAL = "horizontal"
|
||||
VERTICAL = "vertical"
|
||||
|
||||
orientation = models.CharField(
|
||||
choices=ORIENTATIONS.choices,
|
||||
max_length=10,
|
||||
default=ORIENTATIONS.HORIZONTAL,
|
||||
db_default=ORIENTATIONS.HORIZONTAL,
|
||||
)
|
||||
|
||||
alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
|
||||
menu_items = models.ManyToManyField(MenuItemElement)
|
||||
|
||||
|
||||
class SimpleContainerElement(ContainerElement):
|
||||
"""
|
||||
A simple container to group elements
|
||||
"""
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
from typing import TYPE_CHECKING, Dict, List, Set
|
||||
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
|
||||
from antlr4.tree import Tree
|
||||
|
||||
from baserow.contrib.builder.data_providers.registries import (
|
||||
|
@ -11,6 +9,7 @@ from baserow.contrib.builder.elements.models import Element
|
|||
from baserow.contrib.builder.formula_importer import BaserowFormulaImporter
|
||||
from baserow.core.formula import BaserowFormula
|
||||
from baserow.core.formula.exceptions import InvalidBaserowFormula
|
||||
from baserow.core.user_sources.user_source_user import UserSourceUser
|
||||
from baserow.core.utils import merge_dicts_no_duplicates, to_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -176,10 +175,10 @@ def get_data_source_property_names(
|
|||
|
||||
|
||||
def get_builder_used_property_names(
|
||||
user: AbstractUser, builder: "Builder"
|
||||
user: UserSourceUser, builder: "Builder"
|
||||
) -> Dict[str, Dict[int, List[str]]]:
|
||||
"""
|
||||
Given a User and a Builder, return all property names used in the all the
|
||||
Given a UserSourceUser and a Builder, return all property names used in the all the
|
||||
pages.
|
||||
|
||||
This involves looping over all Elements, Workflow Actions, and Data Sources
|
||||
|
|
|
@ -1,22 +1,31 @@
|
|||
from typing import Dict, List, Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
from baserow.contrib.builder.formula_property_extractor import (
|
||||
get_builder_used_property_names,
|
||||
)
|
||||
from baserow.contrib.builder.models import Builder
|
||||
from baserow.contrib.builder.theme.registries import theme_config_block_registry
|
||||
from baserow.core.cache import global_cache
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.utils import invalidate_versioned_cache, safe_get_or_set_cache
|
||||
from baserow.core.models import Workspace
|
||||
from baserow.core.user_sources.handler import UserSourceHandler
|
||||
from baserow.core.user_sources.models import UserSource
|
||||
from baserow.core.user_sources.user_source_user import UserSourceUser
|
||||
|
||||
User = get_user_model()
|
||||
CACHE_KEY_PREFIX = "used_properties_for_page"
|
||||
USED_PROPERTIES_CACHE_KEY_PREFIX = "used_properties_for_page"
|
||||
|
||||
# The duration of the cached public element, data source and workflow action API views.
|
||||
BUILDER_PUBLIC_RECORDS_CACHE_TTL_SECONDS = 60 * 60
|
||||
|
||||
# The duration of the cached public `get_public_builder_by_domain_name` view.
|
||||
BUILDER_PUBLIC_BUILDER_BY_DOMAIN_TTL_SECONDS = 60 * 60
|
||||
|
||||
# The duration of the cached public properties for the builder API views.
|
||||
BUILDER_PREVIEW_USED_PROPERTIES_CACHE_TTL_SECONDS = 60
|
||||
|
||||
|
||||
SENTINEL = "__no_results__"
|
||||
|
||||
|
||||
|
@ -46,20 +55,15 @@ class BuilderHandler:
|
|||
)
|
||||
|
||||
@classmethod
|
||||
def _get_builder_version_cache(cls, builder: Builder):
|
||||
return f"{CACHE_KEY_PREFIX}_version_{builder.id}"
|
||||
def _get_builder_public_properties_version_cache(cls, builder: Builder) -> str:
|
||||
return f"{USED_PROPERTIES_CACHE_KEY_PREFIX}_version_{builder.id}"
|
||||
|
||||
def get_builder_used_properties_cache_key(
|
||||
self, user: AbstractUser, builder: Builder
|
||||
) -> Optional[str]:
|
||||
self, user: UserSourceUser, builder: Builder
|
||||
) -> str:
|
||||
"""
|
||||
Returns a cache key that can be used to key the results of making the
|
||||
expensive function call to get_builder_used_property_names().
|
||||
|
||||
If the user is a Django user, return None. This is because the Page
|
||||
Designer should always have the latest data in the Preview (e.g. when
|
||||
they are not authenticated). Also, the Django user doesn't have the role
|
||||
attribute, unlike the User Source User.
|
||||
"""
|
||||
|
||||
if user.is_anonymous or not user.role:
|
||||
|
@ -68,14 +72,16 @@ class BuilderHandler:
|
|||
else:
|
||||
role = f"_{user.role}"
|
||||
|
||||
return f"{CACHE_KEY_PREFIX}_{builder.id}{role}"
|
||||
return f"{USED_PROPERTIES_CACHE_KEY_PREFIX}_{builder.id}{role}"
|
||||
|
||||
@classmethod
|
||||
def invalidate_builder_public_properties_cache(cls, builder):
|
||||
invalidate_versioned_cache(cls._get_builder_version_cache(builder))
|
||||
def invalidate_builder_public_properties_cache(cls, builder: Builder):
|
||||
global_cache.invalidate(
|
||||
invalidate_key=cls._get_builder_public_properties_version_cache(builder)
|
||||
)
|
||||
|
||||
def get_builder_public_properties(
|
||||
self, user: AbstractUser, builder: Builder
|
||||
self, user: UserSourceUser, builder: Builder
|
||||
) -> Dict[str, Dict[int, List[str]]]:
|
||||
"""
|
||||
Return a Dict where keys are ["all", "external", "internal"] and values
|
||||
|
@ -94,13 +100,53 @@ class BuilderHandler:
|
|||
properties = get_builder_used_property_names(user, builder)
|
||||
return SENTINEL if properties is None else properties
|
||||
|
||||
result = safe_get_or_set_cache(
|
||||
result = global_cache.get(
|
||||
self.get_builder_used_properties_cache_key(user, builder),
|
||||
self._get_builder_version_cache(builder),
|
||||
default=compute_properties,
|
||||
# We want to invalidate the cache for all roles at once so we create a
|
||||
# unique key for all.
|
||||
invalidate_key=self._get_builder_public_properties_version_cache(builder),
|
||||
timeout=settings.BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS
|
||||
if builder.workspace_id
|
||||
else BUILDER_PREVIEW_USED_PROPERTIES_CACHE_TTL_SECONDS,
|
||||
)
|
||||
|
||||
return result if result != SENTINEL else None
|
||||
|
||||
def get_published_applications(
|
||||
self, workspace: Optional[Workspace] = None
|
||||
) -> QuerySet[Builder]:
|
||||
"""
|
||||
Returns all published applications in a workspace or all published applications
|
||||
in the instance if no workspace is provided.
|
||||
|
||||
A published application is a builder application which points to one more
|
||||
published domains. The application is the one that the page designer is
|
||||
creating their application in.
|
||||
|
||||
:param workspace: Only return published applications in this workspace.
|
||||
:return: A queryset of published applications.
|
||||
"""
|
||||
|
||||
applications = Builder.objects.exclude(domains__published_to=None)
|
||||
return applications.filter(workspace=workspace) if workspace else applications
|
||||
|
||||
def aggregate_user_source_counts(
|
||||
self,
|
||||
workspace: Optional[Workspace] = None,
|
||||
) -> int:
|
||||
"""
|
||||
The builder implementation of the `UserSourceHandler.aggregate_user_counts`
|
||||
method, we need it to only count user sources in published applications.
|
||||
|
||||
:param workspace: If provided, only count user sources in published
|
||||
applications within this workspace.
|
||||
:return: The total number of user sources in published applications.
|
||||
"""
|
||||
|
||||
queryset = UserSourceHandler().get_user_sources(
|
||||
base_queryset=UserSource.objects.filter(
|
||||
application__in=self.get_published_applications(workspace)
|
||||
)
|
||||
)
|
||||
return UserSourceHandler().aggregate_user_counts(workspace, queryset)
|
||||
|
|
|
@ -8,7 +8,7 @@ msgid ""
|
|||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-01-15 11:59+0000\n"
|
||||
"POT-Creation-Date: 2025-03-05 11:01+0000\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
|
@ -46,18 +46,18 @@ msgstr ""
|
|||
msgid "Last name"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/builder/data_providers/data_provider_types.py:452
|
||||
#: src/baserow/contrib/builder/data_providers/data_provider_types.py:563
|
||||
#, python-format
|
||||
msgid "%(user_source_name)s member"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/builder/data_sources/service.py:154
|
||||
#: src/baserow/contrib/builder/data_sources/service.py:158
|
||||
msgid "Data source"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:578
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:583
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:588
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:586
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:591
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:596
|
||||
#, python-format
|
||||
msgid "Column %(count)s"
|
||||
msgstr ""
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
# Generated by Django 5.0.9 on 2025-02-25 09:11
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import baserow.core.formula.field
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0051_alter_builderworkflowaction_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="MenuItemElement",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"navigation_type",
|
||||
models.CharField(
|
||||
choices=[("page", "Page"), ("custom", "Custom")],
|
||||
default="page",
|
||||
help_text="The navigation type.",
|
||||
max_length=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"navigate_to_url",
|
||||
baserow.core.formula.field.FormulaField(
|
||||
default="",
|
||||
help_text="If no page is selected, this indicate the destination of the link.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"page_parameters",
|
||||
models.JSONField(
|
||||
default=list,
|
||||
help_text="The parameters for each parameters of the selected page if any.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"query_parameters",
|
||||
models.JSONField(
|
||||
db_default=[],
|
||||
default=list,
|
||||
help_text="The query parameters for each parameter of the selected page if any.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"target",
|
||||
models.CharField(
|
||||
choices=[("self", "Self"), ("blank", "Blank")],
|
||||
default="self",
|
||||
help_text="The target of the link when we click on it.",
|
||||
max_length=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"variant",
|
||||
models.CharField(
|
||||
choices=[("link", "Link"), ("button", "Button")],
|
||||
default="link",
|
||||
help_text="The variant of the link.",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
(
|
||||
"type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("button", "Button"),
|
||||
("link", "Link"),
|
||||
("separator", "Separator"),
|
||||
("spacer", "Spacer"),
|
||||
],
|
||||
default="link",
|
||||
help_text="The type of the Menu Item.",
|
||||
max_length=9,
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="The name of the Menu Item.", max_length=225
|
||||
),
|
||||
),
|
||||
("menu_item_order", models.PositiveIntegerField()),
|
||||
("uid", models.UUIDField(default=uuid.uuid4)),
|
||||
(
|
||||
"navigate_to_page",
|
||||
models.ForeignKey(
|
||||
help_text=(
|
||||
"Destination page id for this link. If null then we use the navigate_to_url property instead.",
|
||||
),
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="builder.page",
|
||||
),
|
||||
),
|
||||
(
|
||||
"parent_menu_item",
|
||||
models.ForeignKey(
|
||||
default=None,
|
||||
help_text="The parent MenuItemElement element, if it is a nested item.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="menu_item_children",
|
||||
to="builder.menuitemelement",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ("menu_item_order",),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="MenuElement",
|
||||
fields=[
|
||||
(
|
||||
"element_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="builder.element",
|
||||
),
|
||||
),
|
||||
(
|
||||
"orientation",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("horizontal", "Horizontal"),
|
||||
("vertical", "Vertical"),
|
||||
],
|
||||
db_default="horizontal",
|
||||
default="horizontal",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
("menu_items", models.ManyToManyField(to="builder.menuitemelement")),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("builder.element",),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,312 @@
|
|||
# Generated by Django 5.0.9 on 2025-03-05 10:12
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import baserow.core.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0052_menuitemelement_menuelement"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_active_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#4783db",
|
||||
help_text="The background color of buttons when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_active_border_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#275d9f",
|
||||
help_text="The border color of buttons when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_active_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The text color of buttons when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="linkthemeconfigblock",
|
||||
name="link_active_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#275d9f",
|
||||
help_text="The hover color of links when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="menuelement",
|
||||
name="alignment",
|
||||
field=models.CharField(
|
||||
choices=[("left", "Left"), ("center", "Center"), ("right", "Right")],
|
||||
default="left",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="MenuThemeConfigBlock",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_font_family",
|
||||
models.CharField(default="inter", max_length=250),
|
||||
),
|
||||
("button_font_size", models.SmallIntegerField(default=13)),
|
||||
(
|
||||
"button_font_weight",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_alignment",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("left", "Left"),
|
||||
("center", "Center"),
|
||||
("right", "Right"),
|
||||
],
|
||||
default="left",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_text_alignment",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("left", "Left"),
|
||||
("center", "Center"),
|
||||
("right", "Right"),
|
||||
],
|
||||
default="center",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_width",
|
||||
models.CharField(
|
||||
choices=[("auto", "Auto"), ("full", "Full")],
|
||||
default="auto",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_background_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="primary",
|
||||
help_text="The background color of buttons",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_text_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The text color of buttons",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_border_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="The border color of buttons",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_border_size",
|
||||
models.SmallIntegerField(default=0, help_text="Button border size"),
|
||||
),
|
||||
(
|
||||
"button_border_radius",
|
||||
models.SmallIntegerField(
|
||||
default=4, help_text="Button border radius"
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_vertical_padding",
|
||||
models.SmallIntegerField(
|
||||
default=12, help_text="Button vertical padding"
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_horizontal_padding",
|
||||
models.SmallIntegerField(
|
||||
default=12, help_text="Button horizontal padding"
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_hover_background_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#96baf6ff",
|
||||
help_text="The background color of buttons when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_hover_text_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The text color of buttons when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_hover_border_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="The border color of buttons when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_active_background_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#4783db",
|
||||
help_text="The background color of buttons when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_active_text_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The text color of buttons when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"button_active_border_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#275d9f",
|
||||
help_text="The border color of buttons when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
("link_font_family", models.CharField(default="inter", max_length=250)),
|
||||
("link_font_size", models.SmallIntegerField(default=13)),
|
||||
(
|
||||
"link_font_weight",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
(
|
||||
"link_text_alignment",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("left", "Left"),
|
||||
("center", "Center"),
|
||||
("right", "Right"),
|
||||
],
|
||||
default="left",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
(
|
||||
"link_text_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="primary",
|
||||
help_text="The text color of links",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"link_hover_text_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#96baf6ff",
|
||||
help_text="The hover color of links when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"link_active_text_color",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="#275d9f",
|
||||
help_text="The hover color of links when active",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"builder",
|
||||
baserow.core.fields.AutoOneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="%(class)s",
|
||||
to="builder.builder",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,36 @@
|
|||
# Generated by Django 5.0.9 on 2025-03-08 13:26
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
(
|
||||
"builder",
|
||||
"0053_buttonthemeconfigblock_button_active_background_color_and_more",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="SimpleContainerElement",
|
||||
fields=[
|
||||
(
|
||||
"element_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="builder.element",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("builder.element",),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,116 @@
|
|||
# Generated by Django 5.0.9 on 2025-03-18 13:03
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import baserow.core.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0054_simplecontainerelement"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="linkthemeconfigblock",
|
||||
name="link_active_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="1000",
|
||||
default="1000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="linkthemeconfigblock",
|
||||
name="link_default_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="1000",
|
||||
default="1000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="linkthemeconfigblock",
|
||||
name="link_hover_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="1000",
|
||||
default="1000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_1_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="0000",
|
||||
default="0000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_2_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="0000",
|
||||
default="0000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_3_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="0000",
|
||||
default="0000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_4_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="0000",
|
||||
default="0000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_5_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="0000",
|
||||
default="0000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_6_text_decoration",
|
||||
field=baserow.core.fields.MultipleFlagField(
|
||||
db_default="0000",
|
||||
default="0000",
|
||||
help_text="The text decoration flags [underline, strike, uppercase, italic]",
|
||||
max_length=4,
|
||||
num_flags=4,
|
||||
),
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="MenuThemeConfigBlock",
|
||||
),
|
||||
]
|
|
@ -55,6 +55,10 @@ class Builder(Application):
|
|||
# but it's a more generic type
|
||||
return self.application_ptr
|
||||
|
||||
@property
|
||||
def visible_pages(self):
|
||||
return self.page_set(manager="objects_without_shared")
|
||||
|
||||
@cached_property
|
||||
def shared_page(self):
|
||||
from baserow.contrib.builder.pages.handler import PageHandler
|
||||
|
|
|
@ -40,10 +40,14 @@ from baserow.contrib.builder.types import PageDict
|
|||
from baserow.contrib.builder.workflow_actions.handler import (
|
||||
BuilderWorkflowActionHandler,
|
||||
)
|
||||
from baserow.core.cache import global_cache
|
||||
from baserow.core.exceptions import IdDoesNotExist
|
||||
from baserow.core.storage import ExportZipFile
|
||||
from baserow.core.user_sources.user_source_user import UserSourceUser
|
||||
from baserow.core.utils import ChildProgressBuilder, MirrorDict, find_unused_name
|
||||
|
||||
BUILDER_PAGE_IS_PUBLISHED_CACHE_TTL_SECONDS = 60 * 60
|
||||
|
||||
|
||||
class PageHandler:
|
||||
def get_page(self, page_id: int, base_queryset: Optional[QuerySet] = None) -> Page:
|
||||
|
@ -58,7 +62,7 @@ class PageHandler:
|
|||
"""
|
||||
|
||||
if base_queryset is None:
|
||||
base_queryset = Page.objects_with_shared
|
||||
base_queryset = Page.objects
|
||||
|
||||
try:
|
||||
return base_queryset.select_related("builder__workspace").get(id=page_id)
|
||||
|
@ -70,7 +74,7 @@ class PageHandler:
|
|||
Returns the shared page for the given builder.
|
||||
"""
|
||||
|
||||
return Page.objects_with_shared.select_related("builder__workspace").get(
|
||||
return Page.objects.select_related("builder__workspace").get(
|
||||
builder=builder, shared=True
|
||||
)
|
||||
|
||||
|
@ -80,7 +84,7 @@ class PageHandler:
|
|||
"""
|
||||
|
||||
if base_queryset is None:
|
||||
base_queryset = Page.objects_with_shared.all()
|
||||
base_queryset = Page.objects.all()
|
||||
|
||||
return base_queryset.filter(builder=builder).select_related(
|
||||
"builder__workspace"
|
||||
|
@ -178,7 +182,7 @@ class PageHandler:
|
|||
self.is_page_path_unique(
|
||||
page.builder,
|
||||
path,
|
||||
base_queryset=Page.objects_with_shared.exclude(
|
||||
base_queryset=Page.objects.exclude(
|
||||
id=page.id
|
||||
), # We don't want to conflict with the current page
|
||||
raises=True,
|
||||
|
@ -220,7 +224,7 @@ class PageHandler:
|
|||
"""
|
||||
|
||||
if base_qs is None:
|
||||
base_qs = Page.objects.filter(builder=builder)
|
||||
base_qs = Page.objects_without_shared.filter(builder=builder)
|
||||
|
||||
try:
|
||||
full_order = Page.order_objects(base_qs, order)
|
||||
|
@ -229,6 +233,55 @@ class PageHandler:
|
|||
|
||||
return full_order
|
||||
|
||||
@classmethod
|
||||
def get_page_public_records_cache_key(
|
||||
cls, page_id: int, user: UserSourceUser, record_name: str
|
||||
):
|
||||
"""
|
||||
Generates the cache key used by the public elements, data sources and workflow
|
||||
actions endpoints. If the `user` is authenticated, and they have a role, we will
|
||||
include the role in the cache key.
|
||||
|
||||
:param page_id: the ID of the public page being requested.
|
||||
:param user: the `UserSourceUser` performing the HTTP request.
|
||||
:param record_name: one of "elements", "data_sources" or "workflow_actions".
|
||||
Used to differentiate between public view endpoints.
|
||||
:return: the cache key.
|
||||
"""
|
||||
|
||||
role = f"_{user.role}" if not user.is_anonymous and user.role else ""
|
||||
return f"ab_public_page_{page_id}{role}_{record_name}_records"
|
||||
|
||||
def is_published_page(self, public_page_id: int) -> bool:
|
||||
"""
|
||||
Returns whether this public page ID points to a published domain
|
||||
application or not.
|
||||
|
||||
:param public_page_id: The ID of the public page.
|
||||
:return: whether this public page ID is published or not.
|
||||
"""
|
||||
|
||||
return global_cache.get(
|
||||
f"ab_public_page_{public_page_id}_published",
|
||||
default=lambda: self._is_published_application_page(public_page_id),
|
||||
timeout=BUILDER_PAGE_IS_PUBLISHED_CACHE_TTL_SECONDS,
|
||||
)
|
||||
|
||||
def _is_published_application_page(self, public_page_id: int) -> bool:
|
||||
"""
|
||||
Given a *public* page ID, is responsible for returning the published domain
|
||||
application it's associated with.
|
||||
|
||||
:param public_page_id: The ID of the public page.
|
||||
:return: The published domain application associated with the public page.
|
||||
"""
|
||||
|
||||
return (
|
||||
Builder.objects.filter(page__id=public_page_id)
|
||||
.exclude(published_from=None)
|
||||
.exists()
|
||||
)
|
||||
|
||||
def duplicate_page(
|
||||
self, page: Page, progress_builder: Optional[ChildProgressBuilder] = None
|
||||
):
|
||||
|
@ -418,7 +471,7 @@ class PageHandler:
|
|||
:return: If the path is unique
|
||||
"""
|
||||
|
||||
queryset = Page.objects_with_shared if base_queryset is None else base_queryset
|
||||
queryset = Page.objects if base_queryset is None else base_queryset
|
||||
|
||||
existing_paths = queryset.filter(builder=builder).values_list("path", flat=True)
|
||||
|
||||
|
|
|
@ -46,8 +46,8 @@ class Page(
|
|||
ALLOW_ALL_EXCEPT = "allow_all_except"
|
||||
DISALLOW_ALL_EXCEPT = "disallow_all_except"
|
||||
|
||||
objects = PageWithoutSharedManager()
|
||||
objects_with_shared = models.Manager()
|
||||
objects = models.Manager()
|
||||
objects_without_shared = PageWithoutSharedManager()
|
||||
|
||||
builder = models.ForeignKey("builder.Builder", on_delete=models.CASCADE)
|
||||
order = models.PositiveIntegerField()
|
||||
|
@ -98,7 +98,7 @@ class Page(
|
|||
|
||||
@classmethod
|
||||
def get_last_order(cls, builder: "Builder"):
|
||||
queryset = Page.objects.filter(builder=builder)
|
||||
queryset = Page.objects_without_shared.filter(builder=builder)
|
||||
return cls.get_highest_order_of_queryset(queryset) + 1
|
||||
|
||||
|
||||
|
|
|
@ -159,7 +159,9 @@ class PageService:
|
|||
context=builder,
|
||||
)
|
||||
|
||||
all_pages = self.handler.get_pages(builder, base_queryset=Page.objects)
|
||||
all_pages = self.handler.get_pages(
|
||||
builder, base_queryset=Page.objects_without_shared
|
||||
)
|
||||
|
||||
user_pages = CoreHandler().filter_queryset(
|
||||
user,
|
||||
|
|
|
@ -8,7 +8,7 @@ from baserow.contrib.builder.constants import (
|
|||
FontWeights,
|
||||
HorizontalAlignments,
|
||||
)
|
||||
from baserow.core.fields import AutoOneToOneField
|
||||
from baserow.core.fields import AutoOneToOneField, MultipleFlagField
|
||||
from baserow.core.user_files.models import UserFile
|
||||
|
||||
|
||||
|
@ -84,6 +84,12 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
heading_1_text_decoration = MultipleFlagField(
|
||||
default=[False, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="0000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
heading_2_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -103,6 +109,12 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
heading_2_text_decoration = MultipleFlagField(
|
||||
default=[False, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="0000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
heading_3_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -122,6 +134,12 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
heading_3_text_decoration = MultipleFlagField(
|
||||
default=[False, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="0000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
heading_4_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -141,6 +159,12 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
heading_4_text_decoration = MultipleFlagField(
|
||||
default=[False, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="0000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
heading_5_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -160,6 +184,12 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
heading_5_text_decoration = MultipleFlagField(
|
||||
default=[False, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="0000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
heading_6_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -179,9 +209,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
heading_6_text_decoration = MultipleFlagField(
|
||||
default=[False, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="0000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
|
||||
|
||||
class ButtonThemeConfigBlock(ThemeConfigBlock):
|
||||
class ButtonThemeConfigBlockMixin(models.Model):
|
||||
button_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -256,9 +292,34 @@ class ButtonThemeConfigBlock(ThemeConfigBlock):
|
|||
blank=True,
|
||||
help_text="The border color of buttons when hovered",
|
||||
)
|
||||
button_active_background_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#4783db",
|
||||
blank=True,
|
||||
help_text="The background color of buttons when active",
|
||||
)
|
||||
button_active_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#ffffffff",
|
||||
blank=True,
|
||||
help_text="The text color of buttons when active",
|
||||
)
|
||||
button_active_border_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#275d9f",
|
||||
blank=True,
|
||||
help_text="The border color of buttons when active",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class LinkThemeConfigBlock(ThemeConfigBlock):
|
||||
class ButtonThemeConfigBlock(ButtonThemeConfigBlockMixin, ThemeConfigBlock):
|
||||
pass
|
||||
|
||||
|
||||
class LinkThemeConfigBlockMixin(models.Model):
|
||||
link_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -287,6 +348,37 @@ class LinkThemeConfigBlock(ThemeConfigBlock):
|
|||
blank=True,
|
||||
help_text="The hover color of links when hovered",
|
||||
)
|
||||
link_active_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#275d9f",
|
||||
blank=True,
|
||||
help_text="The hover color of links when active",
|
||||
)
|
||||
link_default_text_decoration = MultipleFlagField(
|
||||
default=[True, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="1000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
link_hover_text_decoration = MultipleFlagField(
|
||||
default=[True, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="1000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
link_active_text_decoration = MultipleFlagField(
|
||||
default=[True, False, False, False],
|
||||
num_flags=4,
|
||||
db_default="1000",
|
||||
help_text=("The text decoration flags [underline, strike, uppercase, italic]"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class LinkThemeConfigBlock(LinkThemeConfigBlockMixin, ThemeConfigBlock):
|
||||
pass
|
||||
|
||||
|
||||
class ImageThemeConfigBlock(ThemeConfigBlock):
|
||||
|
|
|
@ -32,6 +32,41 @@ class TypographyThemeConfigBlockType(ThemeConfigBlockType):
|
|||
type = "typography"
|
||||
model_class = TypographyThemeConfigBlock
|
||||
|
||||
@property
|
||||
def serializer_field_overrides(self):
|
||||
return {
|
||||
"heading_1_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
"heading_2_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
"heading_3_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
"heading_4_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
"heading_5_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
"heading_6_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
}
|
||||
|
||||
def import_serialized(
|
||||
self,
|
||||
parent: Any,
|
||||
|
@ -63,6 +98,26 @@ class LinkThemeConfigBlockType(ThemeConfigBlockType):
|
|||
type = "link"
|
||||
model_class = LinkThemeConfigBlock
|
||||
|
||||
@property
|
||||
def serializer_field_overrides(self):
|
||||
return {
|
||||
"link_default_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Default text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
"link_hover_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Hover text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
"link_active_text_decoration": serializers.ListField(
|
||||
child=serializers.BooleanField(),
|
||||
help_text="Active text decoration: [underline, stroke, uppercase, italic]",
|
||||
required=False,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class ImageThemeConfigBlockType(ThemeConfigBlockType):
|
||||
type = "image"
|
||||
|
|
|
@ -386,6 +386,10 @@ class BuilderWorkflowServiceActionType(BuilderWorkflowActionType):
|
|||
service = workflow_action.service.specific
|
||||
yield from service.get_type().formula_generator(service)
|
||||
|
||||
def enhance_queryset(self, queryset):
|
||||
queryset = queryset.select_related("service")
|
||||
return super().enhance_queryset(queryset)
|
||||
|
||||
|
||||
class UpsertRowWorkflowActionType(BuilderWorkflowServiceActionType):
|
||||
type = "upsert_row"
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
from baserow.core.feature_flags import FF_DASHBOARDS, feature_flag_is_enabled
|
||||
|
||||
|
||||
class DashboardConfig(AppConfig):
|
||||
name = "baserow.contrib.dashboard"
|
||||
|
@ -18,105 +16,102 @@ class DashboardConfig(AppConfig):
|
|||
|
||||
from .application_types import DashboardApplicationType
|
||||
|
||||
if feature_flag_is_enabled(FF_DASHBOARDS):
|
||||
application_type_registry.register(DashboardApplicationType())
|
||||
application_type_registry.register(DashboardApplicationType())
|
||||
|
||||
from baserow.contrib.dashboard.object_scopes import DashboardObjectScopeType
|
||||
from baserow.contrib.dashboard.object_scopes import DashboardObjectScopeType
|
||||
|
||||
object_scope_type_registry.register(DashboardObjectScopeType())
|
||||
object_scope_type_registry.register(DashboardObjectScopeType())
|
||||
|
||||
from baserow.contrib.dashboard.data_sources.object_scopes import (
|
||||
DashboardDataSourceObjectScopeType,
|
||||
)
|
||||
from baserow.contrib.dashboard.data_sources.object_scopes import (
|
||||
DashboardDataSourceObjectScopeType,
|
||||
)
|
||||
|
||||
object_scope_type_registry.register(DashboardDataSourceObjectScopeType())
|
||||
object_scope_type_registry.register(DashboardDataSourceObjectScopeType())
|
||||
|
||||
from baserow.contrib.dashboard.widgets.object_scopes import (
|
||||
WidgetObjectScopeType,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.object_scopes import (
|
||||
WidgetObjectScopeType,
|
||||
)
|
||||
|
||||
object_scope_type_registry.register(WidgetObjectScopeType())
|
||||
object_scope_type_registry.register(WidgetObjectScopeType())
|
||||
|
||||
from baserow.contrib.dashboard.widgets.operations import (
|
||||
CreateWidgetOperationType,
|
||||
DeleteWidgetOperationType,
|
||||
ListWidgetsOperationType,
|
||||
ReadWidgetOperationType,
|
||||
RestoreWidgetOperationType,
|
||||
UpdateWidgetOperationType,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.operations import (
|
||||
CreateWidgetOperationType,
|
||||
DeleteWidgetOperationType,
|
||||
ListWidgetsOperationType,
|
||||
ReadWidgetOperationType,
|
||||
RestoreWidgetOperationType,
|
||||
UpdateWidgetOperationType,
|
||||
)
|
||||
|
||||
operation_type_registry.register(ListWidgetsOperationType())
|
||||
operation_type_registry.register(ReadWidgetOperationType())
|
||||
operation_type_registry.register(CreateWidgetOperationType())
|
||||
operation_type_registry.register(UpdateWidgetOperationType())
|
||||
operation_type_registry.register(DeleteWidgetOperationType())
|
||||
operation_type_registry.register(RestoreWidgetOperationType())
|
||||
operation_type_registry.register(ListWidgetsOperationType())
|
||||
operation_type_registry.register(ReadWidgetOperationType())
|
||||
operation_type_registry.register(CreateWidgetOperationType())
|
||||
operation_type_registry.register(UpdateWidgetOperationType())
|
||||
operation_type_registry.register(DeleteWidgetOperationType())
|
||||
operation_type_registry.register(RestoreWidgetOperationType())
|
||||
|
||||
from baserow.contrib.dashboard.data_sources.operations import (
|
||||
CreateDashboardDataSourceOperationType,
|
||||
DeleteDashboardDataSourceOperationType,
|
||||
DispatchDashboardDataSourceOperationType,
|
||||
ListDashboardDataSourcesOperationType,
|
||||
ReadDashboardDataSourceOperationType,
|
||||
UpdateDashboardDataSourceOperationType,
|
||||
)
|
||||
from baserow.contrib.dashboard.data_sources.operations import (
|
||||
CreateDashboardDataSourceOperationType,
|
||||
DeleteDashboardDataSourceOperationType,
|
||||
DispatchDashboardDataSourceOperationType,
|
||||
ListDashboardDataSourcesOperationType,
|
||||
ReadDashboardDataSourceOperationType,
|
||||
UpdateDashboardDataSourceOperationType,
|
||||
)
|
||||
|
||||
operation_type_registry.register(ListDashboardDataSourcesOperationType())
|
||||
operation_type_registry.register(CreateDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(DeleteDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(UpdateDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(ReadDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(DispatchDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(ListDashboardDataSourcesOperationType())
|
||||
operation_type_registry.register(CreateDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(DeleteDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(UpdateDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(ReadDashboardDataSourceOperationType())
|
||||
operation_type_registry.register(DispatchDashboardDataSourceOperationType())
|
||||
|
||||
from baserow.contrib.dashboard.widgets.registries import (
|
||||
widget_type_registry,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.widget_types import SummaryWidgetType
|
||||
from baserow.contrib.dashboard.widgets.registries import widget_type_registry
|
||||
from baserow.contrib.dashboard.widgets.widget_types import SummaryWidgetType
|
||||
|
||||
widget_type_registry.register(SummaryWidgetType())
|
||||
widget_type_registry.register(SummaryWidgetType())
|
||||
|
||||
from baserow.contrib.dashboard.widgets.trash_types import (
|
||||
WidgetTrashableItemType,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.trash_types import (
|
||||
WidgetTrashableItemType,
|
||||
)
|
||||
|
||||
trash_item_type_registry.register(WidgetTrashableItemType())
|
||||
trash_item_type_registry.register(WidgetTrashableItemType())
|
||||
|
||||
from .ws.pages import DashboardPageType
|
||||
from .ws.pages import DashboardPageType
|
||||
|
||||
page_registry.register(DashboardPageType())
|
||||
page_registry.register(DashboardPageType())
|
||||
|
||||
from baserow.core.registries import permission_manager_type_registry
|
||||
from baserow.core.registries import permission_manager_type_registry
|
||||
|
||||
from .permission_manager import AllowIfTemplatePermissionManagerType
|
||||
from .permission_manager import AllowIfTemplatePermissionManagerType
|
||||
|
||||
prev_manager = permission_manager_type_registry.get(
|
||||
AllowIfTemplatePermissionManagerType.type
|
||||
)
|
||||
permission_manager_type_registry.unregister(
|
||||
AllowIfTemplatePermissionManagerType.type
|
||||
)
|
||||
permission_manager_type_registry.register(
|
||||
AllowIfTemplatePermissionManagerType(prev_manager)
|
||||
)
|
||||
prev_manager = permission_manager_type_registry.get(
|
||||
AllowIfTemplatePermissionManagerType.type
|
||||
)
|
||||
permission_manager_type_registry.unregister(
|
||||
AllowIfTemplatePermissionManagerType.type
|
||||
)
|
||||
permission_manager_type_registry.register(
|
||||
AllowIfTemplatePermissionManagerType(prev_manager)
|
||||
)
|
||||
|
||||
from baserow.contrib.dashboard.data_sources.actions import (
|
||||
UpdateDashboardDataSourceActionType,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.actions import (
|
||||
CreateWidgetActionType,
|
||||
DeleteWidgetActionType,
|
||||
UpdateWidgetActionType,
|
||||
)
|
||||
from baserow.contrib.dashboard.data_sources.actions import (
|
||||
UpdateDashboardDataSourceActionType,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.actions import (
|
||||
CreateWidgetActionType,
|
||||
DeleteWidgetActionType,
|
||||
UpdateWidgetActionType,
|
||||
)
|
||||
|
||||
from .ws.receivers import ( # noqa: F401
|
||||
dashboard_data_source_updated,
|
||||
widget_created,
|
||||
widget_deleted,
|
||||
widget_updated,
|
||||
)
|
||||
from .ws.receivers import ( # noqa: F401
|
||||
dashboard_data_source_updated,
|
||||
widget_created,
|
||||
widget_deleted,
|
||||
widget_updated,
|
||||
)
|
||||
|
||||
action_type_registry.register(CreateWidgetActionType())
|
||||
action_type_registry.register(UpdateWidgetActionType())
|
||||
action_type_registry.register(DeleteWidgetActionType())
|
||||
action_type_registry.register(UpdateDashboardDataSourceActionType())
|
||||
action_type_registry.register(CreateWidgetActionType())
|
||||
action_type_registry.register(UpdateWidgetActionType())
|
||||
action_type_registry.register(DeleteWidgetActionType())
|
||||
action_type_registry.register(UpdateDashboardDataSourceActionType())
|
||||
|
|
|
@ -1,16 +1,18 @@
|
|||
from datetime import datetime, timezone
|
||||
from decimal import Decimal, InvalidOperation
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from baserow.contrib.database.export_serialized import DatabaseExportSerializedStructure
|
||||
from baserow.contrib.database.fields.models import (
|
||||
NUMBER_MAX_DECIMAL_PLACES,
|
||||
AutonumberField,
|
||||
BooleanField,
|
||||
CountField,
|
||||
CreatedOnField,
|
||||
DateField,
|
||||
DurationField,
|
||||
EmailField,
|
||||
Field,
|
||||
FileField,
|
||||
|
@ -26,9 +28,18 @@ from baserow.contrib.database.fields.models import (
|
|||
URLField,
|
||||
)
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.fields.utils.duration import D_H, H_M_S_SSS
|
||||
from baserow.core.utils import get_value_at_path
|
||||
|
||||
from .config import AirtableImportConfig
|
||||
from .constants import AIRTABLE_NUMBER_FIELD_SEPARATOR_FORMAT_MAPPING
|
||||
from .constants import (
|
||||
AIRTABLE_DURATION_FIELD_DURATION_FORMAT_MAPPING,
|
||||
AIRTABLE_MAX_DURATION_VALUE,
|
||||
AIRTABLE_NUMBER_FIELD_SEPARATOR_FORMAT_MAPPING,
|
||||
AIRTABLE_RATING_COLOR_MAPPING,
|
||||
AIRTABLE_RATING_ICON_MAPPING,
|
||||
)
|
||||
from .exceptions import AirtableSkipCellValue
|
||||
from .helpers import import_airtable_date_type_options, set_select_options_on_field
|
||||
from .import_report import (
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
|
@ -38,7 +49,7 @@ from .import_report import (
|
|||
AirtableImportReport,
|
||||
)
|
||||
from .registry import AirtableColumnType
|
||||
from .utils import get_airtable_row_primary_value
|
||||
from .utils import get_airtable_row_primary_value, quill_to_markdown
|
||||
|
||||
|
||||
class TextAirtableColumnType(AirtableColumnType):
|
||||
|
@ -53,7 +64,7 @@ class TextAirtableColumnType(AirtableColumnType):
|
|||
elif validator_name == "email":
|
||||
return EmailField()
|
||||
else:
|
||||
return TextField()
|
||||
return TextField(text_default=raw_airtable_column.get("default", ""))
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
|
@ -86,6 +97,25 @@ class TextAirtableColumnType(AirtableColumnType):
|
|||
|
||||
return value
|
||||
|
||||
def to_baserow_export_empty_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
# If the `text_default` is set, then we must return an empty string. If we
|
||||
# don't, the value is omitted in the export, resulting in the default value
|
||||
# automatically being set, while it's actually empty in Airtable.
|
||||
if isinstance(baserow_field, TextField) and baserow_field.text_default != "":
|
||||
return ""
|
||||
else:
|
||||
raise AirtableSkipCellValue
|
||||
|
||||
|
||||
class MultilineTextAirtableColumnType(AirtableColumnType):
|
||||
type = "multilineText"
|
||||
|
@ -102,7 +132,7 @@ class RichTextTextAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return LongTextField()
|
||||
return LongTextField(long_text_enable_rich_text=True)
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
|
@ -116,37 +146,7 @@ class RichTextTextAirtableColumnType(AirtableColumnType):
|
|||
config,
|
||||
import_report,
|
||||
):
|
||||
# We don't support rich text formatting yet, so this converts the value to
|
||||
# plain text.
|
||||
rich_values = []
|
||||
for v in value["documentValue"]:
|
||||
insert_value = v["insert"]
|
||||
if isinstance(insert_value, str):
|
||||
rich_values.append(insert_value)
|
||||
elif isinstance(insert_value, dict):
|
||||
rich_value = self._extract_value_from_airtable_rich_value_dict(
|
||||
insert_value
|
||||
)
|
||||
if rich_value is not None:
|
||||
rich_values.append(rich_value)
|
||||
|
||||
return "".join(rich_values)
|
||||
|
||||
def _extract_value_from_airtable_rich_value_dict(
|
||||
self, insert_value_dict: Dict[Any, Any]
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Airtable rich text fields can contain references to users. For now this method
|
||||
attempts to return a @userId reference string. In the future if Baserow has
|
||||
a rich text field and the ability to reference users in them we should map
|
||||
this airtable userId to the corresponding Baserow user id.
|
||||
"""
|
||||
|
||||
mention = insert_value_dict.get("mention")
|
||||
if isinstance(mention, dict):
|
||||
user_id = mention.get("userId")
|
||||
if user_id is not None:
|
||||
return f"@{user_id}"
|
||||
return quill_to_markdown(value["documentValue"])
|
||||
|
||||
|
||||
class NumberAirtableColumnType(AirtableColumnType):
|
||||
|
@ -155,10 +155,51 @@ class NumberAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
self.add_import_report_failed_if_default_is_provided(
|
||||
raw_airtable_table, raw_airtable_column, import_report
|
||||
)
|
||||
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
options_format = type_options.get("format", "")
|
||||
|
||||
if options_format in ["duration", "durationInDays"]:
|
||||
return self.to_duration_field(
|
||||
raw_airtable_table, raw_airtable_column, config, import_report
|
||||
)
|
||||
else:
|
||||
return self.to_number_field(
|
||||
raw_airtable_table, raw_airtable_column, config, import_report
|
||||
)
|
||||
|
||||
def to_duration_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
options_format = type_options.get("format", "")
|
||||
duration_format = type_options.get("durationFormat", "")
|
||||
|
||||
if options_format == "durationInDays":
|
||||
# It looks like this option is broken in Airtable. When this is selected,
|
||||
# the exact value seems to be in seconds, but it should be in days. We
|
||||
# will therefore convert it to days when calculating the value.
|
||||
duration_format = D_H
|
||||
else:
|
||||
# Fallback to the most specific format because that leaves most of the
|
||||
# value intact.
|
||||
duration_format = AIRTABLE_DURATION_FIELD_DURATION_FORMAT_MAPPING.get(
|
||||
duration_format, H_M_S_SSS
|
||||
)
|
||||
|
||||
return DurationField(duration_format=duration_format)
|
||||
|
||||
def to_number_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
suffix = ""
|
||||
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
options_format = type_options.get("format", "")
|
||||
|
||||
if "percent" in options_format:
|
||||
suffix = "%"
|
||||
|
||||
|
@ -173,7 +214,7 @@ class NumberAirtableColumnType(AirtableColumnType):
|
|||
|
||||
if separator_format != "" and number_separator == "":
|
||||
import_report.add_failed(
|
||||
f"Number field: \"{raw_airtable_column['name']}\"",
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
|
@ -204,14 +245,40 @@ class NumberAirtableColumnType(AirtableColumnType):
|
|||
if value is None:
|
||||
return None
|
||||
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
options_format = type_options.get("format", "")
|
||||
row_name = get_airtable_row_primary_value(raw_airtable_table, raw_airtable_row)
|
||||
|
||||
if options_format == "durationInDays":
|
||||
# If the formatting is in days, we must multiply the raw value in seconds
|
||||
# by the number of seconds in a day.
|
||||
value = value * 60 * 60 * 24
|
||||
|
||||
if "duration" in options_format:
|
||||
# If the value is higher than the maximum that the `timedelta` can handle,
|
||||
# then we can't use it, so we have to drop it. The maximum number of days
|
||||
# in `timedelta` is `999999999`, so the max number of seconds are
|
||||
# 999999999 * 24 * 60 * 60 = 86399999913600.
|
||||
if abs(value) > AIRTABLE_MAX_DURATION_VALUE:
|
||||
import_report.add_failed(
|
||||
f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_CELL,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
f"Cell value was left empty because the duration seconds {value} "
|
||||
f'is outside the -86399999913600 and 86399999913600 range."',
|
||||
)
|
||||
return None
|
||||
|
||||
# If the value is a duration, then we can use the same value because both
|
||||
# store it as seconds.
|
||||
return value
|
||||
|
||||
try:
|
||||
value = Decimal(value)
|
||||
except InvalidOperation:
|
||||
# If the value can't be parsed as decimal, then it might be corrupt, so we
|
||||
# need to inform the user and skip the import.
|
||||
row_name = get_airtable_row_primary_value(
|
||||
raw_airtable_table, raw_airtable_row
|
||||
)
|
||||
import_report.add_failed(
|
||||
f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_CELL,
|
||||
|
@ -224,8 +291,6 @@ class NumberAirtableColumnType(AirtableColumnType):
|
|||
|
||||
# Airtable stores 10% as 0.1, so we would need to multiply it by 100 so get the
|
||||
# correct value in Baserow.
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
options_format = type_options.get("format", "")
|
||||
if "percent" in options_format:
|
||||
value = value * 100
|
||||
|
||||
|
@ -241,8 +306,39 @@ class RatingAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
airtable_icon = type_options.get("icon", "")
|
||||
airtable_max = type_options.get("max", 5)
|
||||
airtable_color = type_options.get("color", "")
|
||||
|
||||
style = AIRTABLE_RATING_ICON_MAPPING.get(airtable_icon, "")
|
||||
if style == "":
|
||||
style = list(AIRTABLE_RATING_ICON_MAPPING.values())[0]
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but the icon {airtable_icon} does not "
|
||||
f"exist, so it defaulted to {style}.",
|
||||
)
|
||||
|
||||
color = AIRTABLE_RATING_COLOR_MAPPING.get(airtable_color, "")
|
||||
if color == "":
|
||||
color = list(AIRTABLE_RATING_COLOR_MAPPING.values())[0]
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but the color {airtable_color} does not "
|
||||
f"exist, so it defaulted to {color}.",
|
||||
)
|
||||
|
||||
return RatingField(
|
||||
max_value=raw_airtable_column.get("typeOptions", {}).get("max", 5)
|
||||
max_value=airtable_max,
|
||||
style=style,
|
||||
color=color,
|
||||
)
|
||||
|
||||
|
||||
|
@ -252,6 +348,32 @@ class CheckboxAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
self.add_import_report_failed_if_default_is_provided(
|
||||
raw_airtable_table, raw_airtable_column, import_report
|
||||
)
|
||||
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
airtable_icon = type_options.get("icon", "check")
|
||||
airtable_color = type_options.get("color", "green")
|
||||
|
||||
if airtable_icon != "check":
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but the icon {airtable_icon} is not supported.",
|
||||
)
|
||||
|
||||
if airtable_color != "green":
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but the color {airtable_color} is not supported.",
|
||||
)
|
||||
|
||||
return BooleanField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
|
@ -275,6 +397,13 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
self.add_import_report_failed_if_default_is_provided(
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
import_report,
|
||||
to_human_readable_default=lambda x: "Current date",
|
||||
)
|
||||
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
# Check if a timezone is provided in the type options, if so, we might want
|
||||
# to use that timezone for the conversion later on.
|
||||
|
@ -283,13 +412,6 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
|
||||
# date_force_timezone=None it the equivalent of airtable_timezone="client".
|
||||
if airtable_timezone == "client":
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
"The date field was imported, but the client timezone setting was dropped.",
|
||||
)
|
||||
airtable_timezone = None
|
||||
|
||||
return DateField(
|
||||
|
@ -358,15 +480,6 @@ class FormulaAirtableColumnType(AirtableColumnType):
|
|||
is_last_modified = display_type == "lastModifiedTime"
|
||||
is_created = display_type == "createdTime"
|
||||
|
||||
if is_last_modified or is_created and airtable_timezone == "client":
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
"The field was imported, but the client timezone setting was dropped.",
|
||||
)
|
||||
|
||||
# date_force_timezone=None it the equivalent of airtable_timezone="client".
|
||||
if airtable_timezone == "client":
|
||||
airtable_timezone = None
|
||||
|
@ -374,6 +487,22 @@ class FormulaAirtableColumnType(AirtableColumnType):
|
|||
# The formula conversion isn't support yet, but because the Created on and
|
||||
# Last modified fields work as a formula, we can convert those.
|
||||
if is_last_modified:
|
||||
dependencies = type_options.get("dependencies", {})
|
||||
all_column_modifications = dependencies.get(
|
||||
"dependsOnAllColumnModifications", False
|
||||
)
|
||||
|
||||
if not all_column_modifications:
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but the support to depend on "
|
||||
f"specific fields was dropped because that's not supported by "
|
||||
f"Baserow.",
|
||||
)
|
||||
|
||||
return LastModifiedField(
|
||||
date_show_tzinfo=date_show_tzinfo,
|
||||
date_force_timezone=airtable_timezone,
|
||||
|
@ -421,12 +550,72 @@ class ForeignKeyAirtableColumnType(AirtableColumnType):
|
|||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
foreign_table_id = type_options.get("foreignTableId")
|
||||
relationship = type_options.get("relationship", "many") # can be: one
|
||||
view_id_for_record_selection = type_options.get(
|
||||
"viewIdForRecordSelection", None
|
||||
)
|
||||
filters_for_record_selection = type_options.get(
|
||||
"filtersForRecordSelection", None
|
||||
)
|
||||
ai_matching_options = type_options.get("aiMatchingOptions", None)
|
||||
|
||||
if relationship != "many":
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but support for a one to many "
|
||||
f"relationship was dropped because it's not supported by Baserow.",
|
||||
)
|
||||
|
||||
if view_id_for_record_selection is not None:
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but limiting record selection to a view "
|
||||
f"was dropped because the views have not been imported.",
|
||||
)
|
||||
|
||||
if filters_for_record_selection is not None:
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but filtering record by a condition "
|
||||
f"was dropped because it's not supported by Baserow.",
|
||||
)
|
||||
|
||||
if ai_matching_options is not None:
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but using AI to show top matches was "
|
||||
f"dropped because it's not supported by Baserow.",
|
||||
)
|
||||
|
||||
return LinkRowField(
|
||||
link_row_table_id=foreign_table_id,
|
||||
link_row_related_field_id=type_options.get("symmetricColumnId"),
|
||||
)
|
||||
|
||||
def after_field_objects_prepared(
|
||||
self, field_mapping_per_table, baserow_field, raw_airtable_column
|
||||
):
|
||||
foreign_table_id = raw_airtable_column["typeOptions"]["foreignTableId"]
|
||||
foreign_field_mapping = field_mapping_per_table[foreign_table_id]
|
||||
foreign_primary_field = next(
|
||||
field["baserow_field"]
|
||||
for field in foreign_field_mapping.values()
|
||||
if field["baserow_field"].primary
|
||||
)
|
||||
baserow_field.link_row_table_primary_field = foreign_primary_field
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
|
@ -531,12 +720,21 @@ class SelectAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
field = SingleSelectField()
|
||||
field = set_select_options_on_field(
|
||||
field,
|
||||
raw_airtable_column.get("id", ""),
|
||||
raw_airtable_column.get("typeOptions", {}),
|
||||
id_value = raw_airtable_column.get("id", "")
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
|
||||
def get_default(x):
|
||||
return get_value_at_path(type_options, f"choices.{x}.name", "")
|
||||
|
||||
self.add_import_report_failed_if_default_is_provided(
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
import_report,
|
||||
to_human_readable_default=get_default,
|
||||
)
|
||||
|
||||
field = SingleSelectField()
|
||||
field = set_select_options_on_field(field, id_value, type_options)
|
||||
return field
|
||||
|
||||
|
||||
|
@ -562,12 +760,27 @@ class MultiSelectAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
field = MultipleSelectField()
|
||||
field = set_select_options_on_field(
|
||||
field,
|
||||
raw_airtable_column.get("id", ""),
|
||||
raw_airtable_column.get("typeOptions", {}),
|
||||
id_value = raw_airtable_column.get("id", "")
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
|
||||
def get_default(default):
|
||||
default = default or []
|
||||
return ", ".join(
|
||||
[
|
||||
get_value_at_path(type_options, f"choices.{v}.name", "")
|
||||
for v in default
|
||||
]
|
||||
)
|
||||
|
||||
self.add_import_report_failed_if_default_is_provided(
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
import_report,
|
||||
to_human_readable_default=get_default,
|
||||
)
|
||||
|
||||
field = MultipleSelectField()
|
||||
field = set_select_options_on_field(field, id_value, type_options)
|
||||
return field
|
||||
|
||||
|
||||
|
@ -631,3 +844,12 @@ class CountAirtableColumnType(AirtableColumnType):
|
|||
import_report,
|
||||
):
|
||||
return None
|
||||
|
||||
|
||||
class AutoNumberAirtableColumnType(AirtableColumnType):
|
||||
type = "autoNumber"
|
||||
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return AutonumberField()
|
||||
|
|
|
@ -0,0 +1,423 @@
|
|||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
from baserow.core.utils import get_value_at_path
|
||||
|
||||
from .exceptions import AirtableSkipFilter
|
||||
from .helpers import to_import_select_option_id
|
||||
from .registry import AirtableFilterOperator
|
||||
from .utils import (
|
||||
airtable_date_filter_value_to_baserow,
|
||||
skip_filter_if_type_duration_and_value_too_high,
|
||||
)
|
||||
|
||||
|
||||
class AirtableContainsOperator(AirtableFilterOperator):
|
||||
type = "contains"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in ["foreignKey"]:
|
||||
return view_filter_type_registry.get("link_row_contains"), value
|
||||
|
||||
return view_filter_type_registry.get("contains"), value
|
||||
|
||||
|
||||
class AirtableDoesNotContainOperator(AirtableFilterOperator):
|
||||
type = "doesNotContain"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in ["foreignKey"]:
|
||||
return view_filter_type_registry.get("link_row_not_contains"), value
|
||||
|
||||
if raw_airtable_column["type"] in ["multiSelect"]:
|
||||
value = [f"{raw_airtable_column['id']}_{v}" for v in value]
|
||||
value = ",".join(value)
|
||||
return view_filter_type_registry.get("multiple_select_has_not"), value
|
||||
|
||||
return view_filter_type_registry.get("contains_not"), value
|
||||
|
||||
|
||||
class AirtableEqualOperator(AirtableFilterOperator):
|
||||
type = "="
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in [
|
||||
"text",
|
||||
"multilineText",
|
||||
"number",
|
||||
"rating",
|
||||
"phone",
|
||||
"autoNumber",
|
||||
]:
|
||||
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
|
||||
return view_filter_type_registry.get("equal"), str(value)
|
||||
|
||||
if raw_airtable_column["type"] in ["checkbox"]:
|
||||
return (
|
||||
view_filter_type_registry.get("boolean"),
|
||||
"true" if value else "false",
|
||||
)
|
||||
|
||||
if raw_airtable_column["type"] in ["select"]:
|
||||
value = to_import_select_option_id(raw_airtable_column["id"], value)
|
||||
return view_filter_type_registry.get("single_select_equal"), value
|
||||
|
||||
if raw_airtable_column["type"] in ["multiSelect"]:
|
||||
value = [f"{raw_airtable_column['id']}_{v}" for v in value]
|
||||
value = ",".join(value)
|
||||
return view_filter_type_registry.get("multiple_select_has"), value
|
||||
|
||||
if raw_airtable_column["type"] in ["collaborator"]:
|
||||
return view_filter_type_registry.get("multiple_collaborators_has"), value
|
||||
|
||||
if raw_airtable_column["type"] in ["date"]:
|
||||
value = airtable_date_filter_value_to_baserow(value)
|
||||
return view_filter_type_registry.get("date_is"), value
|
||||
|
||||
if raw_airtable_column["type"] in ["foreignKey"]:
|
||||
if isinstance(value, list):
|
||||
if len(value) > 1:
|
||||
raise AirtableSkipFilter
|
||||
foreign_table_id = get_value_at_path(
|
||||
raw_airtable_column, "typeOptions.foreignTableId"
|
||||
)
|
||||
table_row_id_mapping = row_id_mapping.get(foreign_table_id, {})
|
||||
value = [
|
||||
str(table_row_id_mapping.get(v))
|
||||
for v in value
|
||||
if v in table_row_id_mapping
|
||||
]
|
||||
value = ",".join(value)
|
||||
return view_filter_type_registry.get("link_row_has"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableNotEqualOperator(AirtableFilterOperator):
|
||||
type = "!="
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in [
|
||||
"text",
|
||||
"multilineText",
|
||||
"number",
|
||||
"rating",
|
||||
"phone",
|
||||
"autoNumber",
|
||||
]:
|
||||
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
|
||||
return view_filter_type_registry.get("not_equal"), str(value)
|
||||
|
||||
if raw_airtable_column["type"] in ["select"]:
|
||||
value = to_import_select_option_id(raw_airtable_column["id"], value)
|
||||
return view_filter_type_registry.get("single_select_not_equal"), value
|
||||
|
||||
if raw_airtable_column["type"] in ["collaborator"]:
|
||||
return (
|
||||
view_filter_type_registry.get("multiple_collaborators_has_not"),
|
||||
value,
|
||||
)
|
||||
|
||||
if raw_airtable_column["type"] in ["date"]:
|
||||
value = airtable_date_filter_value_to_baserow(value)
|
||||
return view_filter_type_registry.get("date_is_not"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableIsEmptyOperator(AirtableFilterOperator):
|
||||
type = "isEmpty"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
return view_filter_type_registry.get("empty"), ""
|
||||
|
||||
|
||||
class AirtableIsNotEmptyOperator(AirtableFilterOperator):
|
||||
type = "isNotEmpty"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
return view_filter_type_registry.get("not_empty"), ""
|
||||
|
||||
|
||||
class AirtableFilenameOperator(AirtableFilterOperator):
|
||||
type = "filename"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
return view_filter_type_registry.get("filename_contains"), value
|
||||
|
||||
|
||||
class AirtableFiletypeOperator(AirtableFilterOperator):
|
||||
type = "filetype"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if value == "image":
|
||||
value = "image"
|
||||
elif value == "text":
|
||||
value = "document"
|
||||
else:
|
||||
raise AirtableSkipFilter
|
||||
|
||||
return view_filter_type_registry.get("has_file_type"), value
|
||||
|
||||
|
||||
class AirtableIsAnyOfOperator(AirtableFilterOperator):
|
||||
type = "isAnyOf"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in ["select"]:
|
||||
value = [
|
||||
to_import_select_option_id(raw_airtable_column["id"], v) for v in value
|
||||
]
|
||||
value = ",".join(value)
|
||||
return view_filter_type_registry.get("single_select_is_any_of"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableIsNoneOfOperator(AirtableFilterOperator):
|
||||
type = "isNoneOf"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in ["select"]:
|
||||
value = [
|
||||
to_import_select_option_id(raw_airtable_column["id"], v) for v in value
|
||||
]
|
||||
value = ",".join(value)
|
||||
return view_filter_type_registry.get("single_select_is_none_of"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableHasAnyOfOperator(AirtableFilterOperator):
|
||||
type = "|"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableHasAllOfOperator(AirtableFilterOperator):
|
||||
type = "&"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableLessThanOperator(AirtableFilterOperator):
|
||||
type = "<"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in [
|
||||
"number",
|
||||
"rating",
|
||||
"autoNumber",
|
||||
]:
|
||||
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
|
||||
return view_filter_type_registry.get("lower_than"), str(value)
|
||||
|
||||
if raw_airtable_column["type"] in ["date"]:
|
||||
value = airtable_date_filter_value_to_baserow(value)
|
||||
return view_filter_type_registry.get("date_is_before"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableMoreThanOperator(AirtableFilterOperator):
|
||||
type = ">"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in [
|
||||
"number",
|
||||
"rating",
|
||||
"autoNumber",
|
||||
]:
|
||||
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
|
||||
return view_filter_type_registry.get("higher_than"), str(value)
|
||||
|
||||
if raw_airtable_column["type"] in ["date"]:
|
||||
value = airtable_date_filter_value_to_baserow(value)
|
||||
return view_filter_type_registry.get("date_is_after"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableLessThanOrEqualOperator(AirtableFilterOperator):
|
||||
type = "<="
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in [
|
||||
"number",
|
||||
"rating",
|
||||
"autoNumber",
|
||||
]:
|
||||
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
|
||||
return view_filter_type_registry.get("lower_than_or_equal"), str(value)
|
||||
|
||||
if raw_airtable_column["type"] in ["date"]:
|
||||
value = airtable_date_filter_value_to_baserow(value)
|
||||
return view_filter_type_registry.get("date_is_on_or_before"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableMoreThanOrEqualOperator(AirtableFilterOperator):
|
||||
type = ">="
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in [
|
||||
"number",
|
||||
"rating",
|
||||
"autoNumber",
|
||||
]:
|
||||
skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value)
|
||||
return view_filter_type_registry.get("higher_than_or_equal"), str(value)
|
||||
|
||||
if raw_airtable_column["type"] in ["date"]:
|
||||
value = airtable_date_filter_value_to_baserow(value)
|
||||
return view_filter_type_registry.get("date_is_on_or_after"), value
|
||||
|
||||
raise AirtableSkipFilter
|
||||
|
||||
|
||||
class AirtableIsWithinOperator(AirtableFilterOperator):
|
||||
type = "isWithin"
|
||||
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
value,
|
||||
):
|
||||
if raw_airtable_column["type"] in ["date"]:
|
||||
value = airtable_date_filter_value_to_baserow(value)
|
||||
return view_filter_type_registry.get("date_is_within"), value
|
||||
|
||||
raise AirtableSkipFilter
|
|
@ -0,0 +1,56 @@
|
|||
from baserow.contrib.database.airtable.registry import AirtableViewType
|
||||
from baserow.contrib.database.views.models import GridView, GridViewFieldOptions
|
||||
from baserow.contrib.database.views.view_types import GridViewType
|
||||
from baserow.core.utils import get_value_at_path
|
||||
|
||||
|
||||
class GridAirtableViewType(AirtableViewType):
|
||||
type = "grid"
|
||||
baserow_view_type = GridViewType.type
|
||||
|
||||
def prepare_view_object(
|
||||
self,
|
||||
field_mapping,
|
||||
view: GridView,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
# Airtable doesn't have this option, and by default it is count.
|
||||
view.row_identifier_type = GridView.RowIdentifierTypes.count.value
|
||||
|
||||
# Set the row height if the value size is available. Baserow doesn't support
|
||||
# `xlarge`, so we're falling back on `large`in that case.
|
||||
row_height_mapping = {v: v for v in GridView.RowHeightSizes.values}
|
||||
row_height_mapping["xlarge"] = GridView.RowHeightSizes.large.value
|
||||
row_height = get_value_at_path(
|
||||
raw_airtable_view_data, "metadata.grid.rowHeight"
|
||||
)
|
||||
view.row_height_size = row_height_mapping.get(
|
||||
row_height, GridView.RowHeightSizes.small.value
|
||||
)
|
||||
|
||||
# Map the columnOrder entries to the matching `GridViewFieldOptions`,
|
||||
# and set that as `get_field_options`, so that it's correctly serialized
|
||||
# exported.
|
||||
field_options = []
|
||||
column_orders = raw_airtable_view_data.get("columnOrder", None) or []
|
||||
for index, column_order in enumerate(column_orders):
|
||||
if column_order["columnId"] not in field_mapping:
|
||||
continue
|
||||
|
||||
field_options.append(
|
||||
GridViewFieldOptions(
|
||||
id=f"{raw_airtable_view['id']}_columnOrder_{index}",
|
||||
grid_view_id=view.id,
|
||||
field_id=column_order["columnId"],
|
||||
width=column_order.get("width", 200),
|
||||
hidden=not column_order.get("visibility", True),
|
||||
order=index + 1,
|
||||
)
|
||||
)
|
||||
view.get_field_options = lambda *args, **kwargs: field_options
|
||||
|
||||
return view
|
|
@ -1,17 +1,54 @@
|
|||
from baserow.contrib.database.fields.utils.duration import (
|
||||
H_M,
|
||||
H_M_S,
|
||||
H_M_S_S,
|
||||
H_M_S_SS,
|
||||
H_M_S_SSS,
|
||||
)
|
||||
|
||||
AIRTABLE_MAX_DURATION_VALUE = 86399999913600
|
||||
AIRTABLE_BASE_URL = "https://airtable.com"
|
||||
AIRTABLE_API_BASE_URL = f"{AIRTABLE_BASE_URL}/v0.3"
|
||||
AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE = "downloading-base"
|
||||
AIRTABLE_EXPORT_JOB_CONVERTING = "converting"
|
||||
AIRTABLE_EXPORT_JOB_DOWNLOADING_FILES = "downloading-files"
|
||||
AIRTABLE_BASEROW_COLOR_MAPPING = {
|
||||
"blue": "blue",
|
||||
"cyan": "light-blue",
|
||||
"teal": "light-green",
|
||||
"green": "green",
|
||||
"yellow": "light-orange",
|
||||
"orange": "orange",
|
||||
"blue": "light-blue",
|
||||
"cyan": "light-cyan",
|
||||
"teal": "light-pink", # Baserow doesn't have teal, so we're using the left-over color
|
||||
"green": "light-green",
|
||||
"yellow": "light-yellow",
|
||||
"orange": "light-orange",
|
||||
"red": "light-red",
|
||||
"pink": "red",
|
||||
"purple": "dark-blue",
|
||||
"purple": "light-purple",
|
||||
"gray": "light-gray",
|
||||
"blueMedium": "blue",
|
||||
"cyanMedium": "cyan",
|
||||
"tealMedium": "pink",
|
||||
"greenMedium": "green",
|
||||
"yellowMedium": "yellow",
|
||||
"orangeMedium": "orange",
|
||||
"redMedium": "red",
|
||||
"purpleMedium": "purple",
|
||||
"grayMedium": "gray",
|
||||
"blueDark": "dark-blue",
|
||||
"cyanDark": "dark-cyan",
|
||||
"tealDark": "dark-pink",
|
||||
"greenDark": "dark-green",
|
||||
"yellowDark": "dark-yellow",
|
||||
"orangeDark": "dark-orange",
|
||||
"redDark": "dark-red",
|
||||
"purpleDark": "dark-purple",
|
||||
"grayDark": "dark-gray",
|
||||
"blueDarker": "darker-blue",
|
||||
"cyanDarker": "darker-cyan",
|
||||
"tealDarker": "darker-pink",
|
||||
"greenDarker": "darker-green",
|
||||
"yellowDarker": "darker-yellow",
|
||||
"orangeDarker": "darker-orange",
|
||||
"redDarker": "darker-red",
|
||||
"purpleDarker": "darker-purple",
|
||||
"grayDarker": "darker-gray",
|
||||
}
|
||||
AIRTABLE_NUMBER_FIELD_SEPARATOR_FORMAT_MAPPING = {
|
||||
"commaPeriod": "COMMA_PERIOD",
|
||||
|
@ -19,3 +56,51 @@ AIRTABLE_NUMBER_FIELD_SEPARATOR_FORMAT_MAPPING = {
|
|||
"spaceComma": "SPACE_COMMA",
|
||||
"spacePeriod": "SPACE_PERIOD",
|
||||
}
|
||||
AIRTABLE_DURATION_FIELD_DURATION_FORMAT_MAPPING = {
|
||||
"h:mm": H_M,
|
||||
"h:mm:ss": H_M_S,
|
||||
"h:mm:ss.s": H_M_S_S,
|
||||
"h:mm:ss.ss": H_M_S_SS,
|
||||
"h:mm:ss.sss": H_M_S_SSS,
|
||||
}
|
||||
# All colors from the rating field in Airtable: yellow, orange, red, pink, purple,
|
||||
# blue, cyan, teal, green, gray. We're only mapping the ones that we have an
|
||||
# alternative for.
|
||||
AIRTABLE_RATING_COLOR_MAPPING = {
|
||||
"blue": "dark-blue",
|
||||
"green": "dark-green",
|
||||
"orange": "dark-orange",
|
||||
"red": "dark-red",
|
||||
}
|
||||
# All icons from Airtable: star, heart, thumbsUp, flag, dot. We're only mapping the
|
||||
# ones that we have an alternative for.
|
||||
AIRTABLE_RATING_ICON_MAPPING = {
|
||||
"star": "star",
|
||||
"heart": "heart",
|
||||
"thumbsUp": "thumbs-up",
|
||||
"flag": "flag",
|
||||
}
|
||||
AIRTABLE_ASCENDING_MAP = {
|
||||
"ascending": True,
|
||||
"descending": False,
|
||||
}
|
||||
AIRTABLE_DATE_FILTER_VALUE_MAP = {
|
||||
"daysAgo": "{timeZone}?{numberOfDays}?nr_days_ago",
|
||||
"daysFromNow": "{timeZone}?{numberOfDays}?nr_days_from_now",
|
||||
"exactDate": "{timeZone}?{exactDate}?exact_date",
|
||||
"nextMonth": "{timeZone}??next_month",
|
||||
"nextNumberOfDays": "{timeZone}?{numberOfDays}?nr_days_from_now",
|
||||
"nextWeek": "{timeZone}??next_week",
|
||||
"oneMonthAgo": "{timeZone}??one_month_ago",
|
||||
"oneWeekAgo": "{timeZone}?1?nr_weeks_ago",
|
||||
"oneMonthFromNow": "{timeZone}?1?nr_months_from_now",
|
||||
"oneWeekFromNow": "{timeZone}?1?nr_weeks_from_now",
|
||||
"pastMonth": "{timeZone}?1?nr_months_ago",
|
||||
"pastNumberOfDays": "{timeZone}?{numberOfDays}?nr_days_ago",
|
||||
"pastWeek": "{timeZone}?1?nr_weeks_ago",
|
||||
"pastYear": "{timeZone}?1?nr_years_ago",
|
||||
"thisCalendarYear": "{timeZone}?0?nr_years_ago",
|
||||
"today": "{timeZone}??today",
|
||||
"tomorrow": "{timeZone}??tomorrow",
|
||||
"yesterday": "{timeZone}??yesterday",
|
||||
}
|
||||
|
|
|
@ -8,3 +8,16 @@ class AirtableShareIsNotABase(Exception):
|
|||
|
||||
class AirtableImportNotRespectingConfig(Exception):
|
||||
"""Raised when the Airtable import is not respecting the `AirtableImportConfig`."""
|
||||
|
||||
|
||||
class AirtableSkipCellValue(Exception):
|
||||
"""
|
||||
Raised when an Airtable cell value must be skipped, and be omitted from the
|
||||
export.
|
||||
"""
|
||||
|
||||
|
||||
class AirtableSkipFilter(Exception):
|
||||
"""
|
||||
Raised when an Airtable filter is not compatible and must be skipped.
|
||||
"""
|
||||
|
|
|
@ -7,12 +7,15 @@ from typing import Dict, List, Optional, Tuple, Union
|
|||
from zipfile import ZIP_DEFLATED, ZipFile
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.files.storage import Storage
|
||||
|
||||
import requests
|
||||
from requests import Response
|
||||
|
||||
from baserow.contrib.database.airtable.constants import (
|
||||
AIRTABLE_API_BASE_URL,
|
||||
AIRTABLE_BASE_URL,
|
||||
AIRTABLE_EXPORT_JOB_CONVERTING,
|
||||
AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE,
|
||||
AIRTABLE_EXPORT_JOB_DOWNLOADING_FILES,
|
||||
|
@ -20,25 +23,29 @@ from baserow.contrib.database.airtable.constants import (
|
|||
from baserow.contrib.database.airtable.registry import (
|
||||
AirtableColumnType,
|
||||
airtable_column_type_registry,
|
||||
airtable_view_type_registry,
|
||||
)
|
||||
from baserow.contrib.database.application_types import DatabaseApplicationType
|
||||
from baserow.contrib.database.export_serialized import DatabaseExportSerializedStructure
|
||||
from baserow.contrib.database.fields.field_types import FieldType, field_type_registry
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.models import Database
|
||||
from baserow.contrib.database.views.models import GridView
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
from baserow.core.export_serialized import CoreExportSerializedStructure
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import Workspace
|
||||
from baserow.core.registries import ImportExportConfig
|
||||
from baserow.core.utils import ChildProgressBuilder, remove_invalid_surrogate_characters
|
||||
from baserow.core.utils import (
|
||||
ChildProgressBuilder,
|
||||
Progress,
|
||||
remove_invalid_surrogate_characters,
|
||||
)
|
||||
|
||||
from .config import AirtableImportConfig
|
||||
from .exceptions import (
|
||||
AirtableBaseNotPublic,
|
||||
AirtableImportNotRespectingConfig,
|
||||
AirtableShareIsNotABase,
|
||||
AirtableSkipCellValue,
|
||||
)
|
||||
from .import_report import (
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
|
@ -48,6 +55,7 @@ from .import_report import (
|
|||
SCOPE_VIEW,
|
||||
AirtableImportReport,
|
||||
)
|
||||
from .utils import parse_json_and_remove_invalid_surrogate_characters
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
@ -79,7 +87,7 @@ class AirtableHandler:
|
|||
:return: The request ID, initial data and the cookies of the response.
|
||||
"""
|
||||
|
||||
url = f"https://airtable.com/{share_id}"
|
||||
url = f"{AIRTABLE_BASE_URL}/{share_id}"
|
||||
response = requests.get(url, headers=BASE_HEADERS) # nosec B113
|
||||
|
||||
if not response.ok:
|
||||
|
@ -102,6 +110,44 @@ class AirtableHandler:
|
|||
|
||||
return request_id, init_data, cookies
|
||||
|
||||
@staticmethod
|
||||
def make_airtable_request(init_data: dict, request_id: str, **kwargs) -> Response:
|
||||
"""
|
||||
Helper method to make a valid request to to Airtable with the correct headers
|
||||
and params.
|
||||
|
||||
:param init_data: The init_data returned by the initially requested shared base.
|
||||
:param request_id: The request_id returned by the initially requested shared
|
||||
base.
|
||||
:param kwargs: THe kwargs that must be passed into the `requests.get` method.
|
||||
:return: The requests Response object related to the request.
|
||||
"""
|
||||
|
||||
application_id = list(init_data["rawApplications"].keys())[0]
|
||||
client_code_version = init_data["codeVersion"]
|
||||
page_load_id = init_data["pageLoadId"]
|
||||
access_policy = json.loads(init_data["accessPolicy"])
|
||||
|
||||
params = kwargs.get("params", {})
|
||||
params["accessPolicy"] = json.dumps(access_policy)
|
||||
params["request_id"] = request_id
|
||||
|
||||
return requests.get(
|
||||
headers={
|
||||
"x-airtable-application-id": application_id,
|
||||
"x-airtable-client-queue-time": "45",
|
||||
"x-airtable-inter-service-client": "webClient",
|
||||
"x-airtable-inter-service-client-code-version": client_code_version,
|
||||
"x-airtable-page-load-id": page_load_id,
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"x-time-zone": "Europe/Amsterdam",
|
||||
"x-user-locale": "en",
|
||||
**BASE_HEADERS,
|
||||
},
|
||||
timeout=3 * 60, # it can take quite a while for Airtable to respond.
|
||||
**kwargs,
|
||||
) # nosec
|
||||
|
||||
@staticmethod
|
||||
def fetch_table_data(
|
||||
table_id: str,
|
||||
|
@ -135,43 +181,63 @@ class AirtableHandler:
|
|||
"""
|
||||
|
||||
application_id = list(init_data["rawApplications"].keys())[0]
|
||||
client_code_version = init_data["codeVersion"]
|
||||
page_load_id = init_data["pageLoadId"]
|
||||
|
||||
stringified_object_params = {
|
||||
"includeDataForViewIds": None,
|
||||
"shouldIncludeSchemaChecksum": True,
|
||||
"mayOnlyIncludeRowAndCellDataForIncludedViews": False,
|
||||
}
|
||||
access_policy = json.loads(init_data["accessPolicy"])
|
||||
|
||||
if fetch_application_structure:
|
||||
stringified_object_params["includeDataForTableIds"] = [table_id]
|
||||
url = f"https://airtable.com/v0.3/application/{application_id}/read"
|
||||
url = f"{AIRTABLE_API_BASE_URL}/application/{application_id}/read"
|
||||
else:
|
||||
url = f"https://airtable.com/v0.3/table/{table_id}/readData"
|
||||
url = f"{AIRTABLE_API_BASE_URL}/table/{table_id}/readData"
|
||||
|
||||
response = requests.get(
|
||||
response = AirtableHandler.make_airtable_request(
|
||||
init_data,
|
||||
request_id,
|
||||
url=url,
|
||||
stream=stream,
|
||||
params={
|
||||
"stringifiedObjectParams": json.dumps(stringified_object_params),
|
||||
"requestId": request_id,
|
||||
"accessPolicy": json.dumps(access_policy),
|
||||
},
|
||||
headers={
|
||||
"x-airtable-application-id": application_id,
|
||||
"x-airtable-client-queue-time": "45",
|
||||
"x-airtable-inter-service-client": "webClient",
|
||||
"x-airtable-inter-service-client-code-version": client_code_version,
|
||||
"x-airtable-page-load-id": page_load_id,
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"x-time-zone": "Europe/Amsterdam",
|
||||
"x-user-locale": "en",
|
||||
**BASE_HEADERS,
|
||||
},
|
||||
cookies=cookies,
|
||||
) # nosec B113
|
||||
)
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def fetch_view_data(
|
||||
view_id: str,
|
||||
init_data: dict,
|
||||
request_id: str,
|
||||
cookies: dict,
|
||||
stream=True,
|
||||
) -> Response:
|
||||
"""
|
||||
:param view_id: The Airtable view id that must be fetched. The id starts with
|
||||
`viw`.
|
||||
:param init_data: The init_data returned by the initially requested shared base.
|
||||
:param request_id: The request_id returned by the initially requested shared
|
||||
base.
|
||||
:param cookies: The cookies dict returned by the initially requested shared
|
||||
base.
|
||||
:param stream: Indicates whether the request should be streamed. This could be
|
||||
useful if we want to show a progress bar. It will directly be passed into
|
||||
the `requests` request.
|
||||
:return: The `requests` response containing the result.
|
||||
"""
|
||||
|
||||
stringified_object_params = {}
|
||||
url = f"{AIRTABLE_API_BASE_URL}/view/{view_id}/readData"
|
||||
|
||||
response = AirtableHandler.make_airtable_request(
|
||||
init_data,
|
||||
request_id,
|
||||
url=url,
|
||||
stream=stream,
|
||||
params={"stringifiedObjectParams": json.dumps(stringified_object_params)},
|
||||
cookies=cookies,
|
||||
)
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
|
@ -247,6 +313,7 @@ class AirtableHandler:
|
|||
baserow_field.pk = 0
|
||||
baserow_field.name = column["name"]
|
||||
baserow_field.order = order
|
||||
baserow_field.description = column.get("description", None) or None
|
||||
baserow_field.primary = (
|
||||
baserow_field_type.can_be_primary_field(baserow_field)
|
||||
and table["primaryColumnId"] == column["id"]
|
||||
|
@ -305,25 +372,42 @@ class AirtableHandler:
|
|||
# Some empty rows don't have the `cellValuesByColumnId` property because it
|
||||
# doesn't contain values, hence the fallback to prevent failing hard.
|
||||
cell_values = row.get("cellValuesByColumnId", {})
|
||||
for column_id, column_value in cell_values.items():
|
||||
if column_id not in column_mapping:
|
||||
continue
|
||||
|
||||
mapping_values = column_mapping[column_id]
|
||||
baserow_serialized_value = mapping_values[
|
||||
"airtable_column_type"
|
||||
].to_baserow_export_serialized_value(
|
||||
for column_id, mapping_values in column_mapping.items():
|
||||
airtable_column_type = mapping_values["airtable_column_type"]
|
||||
args = [
|
||||
row_id_mapping,
|
||||
table,
|
||||
row,
|
||||
mapping_values["raw_airtable_column"],
|
||||
mapping_values["baserow_field"],
|
||||
column_value,
|
||||
cell_values.get(column_id, None),
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
exported_row[f"field_{column_id}"] = baserow_serialized_value
|
||||
]
|
||||
|
||||
try:
|
||||
# The column_id typically doesn't exist in the `cell_values` if the
|
||||
# value is empty in Airtable.
|
||||
if column_id in cell_values:
|
||||
baserow_serialized_value = (
|
||||
airtable_column_type.to_baserow_export_serialized_value(*args)
|
||||
)
|
||||
else:
|
||||
# remove the cell_value because that one is not accepted in the args
|
||||
# of this method.
|
||||
args.pop(5)
|
||||
baserow_serialized_value = (
|
||||
airtable_column_type.to_baserow_export_empty_value(*args)
|
||||
)
|
||||
exported_row[f"field_{column_id}"] = baserow_serialized_value
|
||||
except AirtableSkipCellValue:
|
||||
# If the `AirtableSkipCellValue` is raised, then the cell value must
|
||||
# not be included in the export. This is the default behavior for
|
||||
# `to_baserow_export_empty_value`, but in some cases, a specific empty
|
||||
# value must be returned.
|
||||
pass
|
||||
|
||||
return exported_row
|
||||
|
||||
|
@ -375,6 +459,243 @@ class AirtableHandler:
|
|||
|
||||
return files_buffer
|
||||
|
||||
@classmethod
|
||||
def _parse_table_fields(
|
||||
cls,
|
||||
schema: dict,
|
||||
converting_progress: Progress,
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
):
|
||||
field_mapping_per_table = {}
|
||||
for table_index, table in enumerate(schema["tableSchemas"]):
|
||||
field_mapping = {}
|
||||
|
||||
# Loop over all the columns in the table and try to convert them to Baserow
|
||||
# format.
|
||||
primary = None
|
||||
for column in table["columns"]:
|
||||
(
|
||||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(table, column, config, import_report)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# None means that none of the field types know how to parse this field,
|
||||
# so we must ignore it.
|
||||
if baserow_field is None:
|
||||
import_report.add_failed(
|
||||
column["name"],
|
||||
SCOPE_FIELD,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"""Field "{column['name']}" with field type {column["type"]} was not imported because it is not supported.""",
|
||||
)
|
||||
continue
|
||||
|
||||
# The `baserow_field` is returning it it's specific form, but it doesn't
|
||||
# have the `content_type` property yet. This breaks all the `.specific`
|
||||
# behavior because an `id` is also not set.
|
||||
baserow_field.content_type = ContentType.objects.get_for_model(
|
||||
baserow_field
|
||||
)
|
||||
|
||||
# Construct a mapping where the Airtable column id is the key and the
|
||||
# value contains the raw Airtable column values, Baserow field and
|
||||
# the Baserow field type object for later use.
|
||||
field_mapping[column["id"]] = {
|
||||
"baserow_field": baserow_field,
|
||||
"baserow_field_type": baserow_field_type,
|
||||
"raw_airtable_column": column,
|
||||
"airtable_column_type": airtable_column_type,
|
||||
}
|
||||
if baserow_field.primary:
|
||||
primary = baserow_field
|
||||
|
||||
# There is always a primary field, but it could be that it's not compatible
|
||||
# with Baserow. In that case, we need to find an alternative field, or
|
||||
# create a new one.
|
||||
if primary is None:
|
||||
# First check if another field can act as the primary field type.
|
||||
found_existing_field = False
|
||||
for value in field_mapping.values():
|
||||
if field_type_registry.get_by_model(
|
||||
value["baserow_field"]
|
||||
).can_be_primary_field(value["baserow_field"]):
|
||||
value["baserow_field"].primary = True
|
||||
found_existing_field = True
|
||||
import_report.add_failed(
|
||||
value["baserow_field"].name,
|
||||
SCOPE_FIELD,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"""Changed primary field to "{value["baserow_field"].name}" because the original primary field is incompatible.""",
|
||||
)
|
||||
break
|
||||
|
||||
# If none of the existing fields can be primary, we will add a new
|
||||
# text field.
|
||||
if not found_existing_field:
|
||||
airtable_column = {
|
||||
"id": "primary_field",
|
||||
"name": "Primary field (auto created)",
|
||||
"type": "text",
|
||||
}
|
||||
(
|
||||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(
|
||||
table, airtable_column, config, import_report
|
||||
)
|
||||
baserow_field.primary = True
|
||||
baserow_field.content_type = ContentType.objects.get_for_model(
|
||||
baserow_field
|
||||
)
|
||||
field_mapping["primary_id"] = {
|
||||
"baserow_field": baserow_field,
|
||||
"baserow_field_type": baserow_field_type,
|
||||
"raw_airtable_column": airtable_column,
|
||||
"airtable_column_type": airtable_column_type,
|
||||
}
|
||||
import_report.add_failed(
|
||||
baserow_field.name,
|
||||
SCOPE_FIELD,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"""Created new primary field "{baserow_field.name}" because none of the provided fields are compatible.""",
|
||||
)
|
||||
|
||||
field_mapping_per_table[table["id"]] = field_mapping
|
||||
|
||||
# Loop over all created fields, and post process them if needed. This is for
|
||||
# example needed for the link row field where the object must be enhanced with
|
||||
# the primary field of the related tables.
|
||||
for table_index, table in enumerate(schema["tableSchemas"]):
|
||||
field_mapping = field_mapping_per_table[table["id"]]
|
||||
|
||||
for field_object in field_mapping.values():
|
||||
field_object["airtable_column_type"].after_field_objects_prepared(
|
||||
field_mapping_per_table,
|
||||
field_object["baserow_field"],
|
||||
field_object["raw_airtable_column"],
|
||||
)
|
||||
|
||||
return field_mapping_per_table
|
||||
|
||||
@classmethod
|
||||
def _parse_rows_and_views(
|
||||
cls,
|
||||
schema: dict,
|
||||
tables: list,
|
||||
converting_progress: Progress,
|
||||
row_id_mapping: Dict[str, int],
|
||||
field_mapping_per_table: dict,
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
):
|
||||
# A list containing all the exported table in Baserow format.
|
||||
exported_tables = []
|
||||
|
||||
# A dict containing all the user files that must be downloaded and added to a
|
||||
# zip file.
|
||||
files_to_download = {}
|
||||
|
||||
# Loop over the table one more time to export the fields, rows, and views to
|
||||
# the serialized format. This must be done last after all the data is prepared
|
||||
# correctly.
|
||||
for table_index, table in enumerate(schema["tableSchemas"]):
|
||||
field_mapping = field_mapping_per_table[table["id"]]
|
||||
files_to_download_for_table = {}
|
||||
|
||||
# Loop over all the fields and convert them to Baserow serialized format.
|
||||
exported_fields = [
|
||||
value["baserow_field_type"].export_serialized(value["baserow_field"])
|
||||
for value in field_mapping.values()
|
||||
]
|
||||
|
||||
# Loop over all the rows in the table and convert them to Baserow format. We
|
||||
# need to provide the `row_id_mapping` and `field_mapping` because there
|
||||
# could be references to other rows and fields. the
|
||||
# `files_to_download_for_table` is needed because every value could be
|
||||
# depending on additional files that must later be downloaded.
|
||||
exported_rows = []
|
||||
for row_index, row in enumerate(tables[table["id"]]["rows"]):
|
||||
exported_rows.append(
|
||||
cls.to_baserow_row_export(
|
||||
table,
|
||||
row_id_mapping,
|
||||
field_mapping,
|
||||
row,
|
||||
row_index,
|
||||
files_to_download_for_table,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# Loop over all views to add them to them as failed to the import report
|
||||
# because the views are not yet supported.
|
||||
exported_views = []
|
||||
for view in table["views"]:
|
||||
table_data = tables[table["id"]]
|
||||
view_data = next(
|
||||
(
|
||||
view_data
|
||||
for view_data in table_data["viewDatas"]
|
||||
if view_data["id"] == view["id"]
|
||||
)
|
||||
)
|
||||
serialized_view = (
|
||||
airtable_view_type_registry.from_airtable_view_to_serialized(
|
||||
field_mapping,
|
||||
row_id_mapping,
|
||||
table,
|
||||
view,
|
||||
view_data,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
)
|
||||
|
||||
if serialized_view is None:
|
||||
import_report.add_failed(
|
||||
view["name"],
|
||||
SCOPE_VIEW,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"View \"{view['name']}\" was not imported because "
|
||||
f"{view['type']} is not supported.",
|
||||
)
|
||||
continue
|
||||
|
||||
exported_views.append(serialized_view)
|
||||
|
||||
exported_table = DatabaseExportSerializedStructure.table(
|
||||
id=table["id"],
|
||||
name=table["name"],
|
||||
order=table_index,
|
||||
fields=exported_fields,
|
||||
views=exported_views,
|
||||
rows=exported_rows,
|
||||
data_sync=None,
|
||||
)
|
||||
exported_tables.append(exported_table)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# Airtable has a mapping of signed URLs for the uploaded files. The
|
||||
# mapping is provided in the table payload, and if it exists, we need
|
||||
# that URL for download instead of the one originally provided.
|
||||
signed_user_content_urls = tables[table["id"]]["signedUserContentUrls"]
|
||||
for file_name, url in files_to_download_for_table.items():
|
||||
if url in signed_user_content_urls:
|
||||
url = signed_user_content_urls[url]
|
||||
files_to_download[file_name] = url
|
||||
|
||||
return exported_tables, files_to_download
|
||||
|
||||
@classmethod
|
||||
def to_baserow_database_export(
|
||||
cls,
|
||||
|
@ -389,9 +710,6 @@ class AirtableHandler:
|
|||
Converts the provided raw Airtable database dict to a Baserow export format and
|
||||
an in memory zip file containing all the downloaded user files.
|
||||
|
||||
@TODO add the views.
|
||||
@TODO preserve the order of least one view.
|
||||
|
||||
:param init_data: The init_data, extracted from the initial page related to the
|
||||
shared base.
|
||||
:param schema: An object containing the schema of the Airtable base.
|
||||
|
@ -430,13 +748,6 @@ class AirtableHandler:
|
|||
),
|
||||
)
|
||||
|
||||
# A list containing all the exported table in Baserow format.
|
||||
exported_tables = []
|
||||
|
||||
# A dict containing all the user files that must be downloaded and added to a
|
||||
# zip file.
|
||||
files_to_download = {}
|
||||
|
||||
# A mapping containing the Airtable table id as key and as value another mapping
|
||||
# containing with the key as Airtable row id and the value as new Baserow row
|
||||
# id. This mapping is created because Airtable has string row id that look like
|
||||
|
@ -451,151 +762,18 @@ class AirtableHandler:
|
|||
row["id"] = new_id
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
view_id = 0
|
||||
for table_index, table in enumerate(schema["tableSchemas"]):
|
||||
field_mapping = {}
|
||||
files_to_download_for_table = {}
|
||||
|
||||
# Loop over all the columns in the table and try to convert them to Baserow
|
||||
# format.
|
||||
primary = None
|
||||
for column in table["columns"]:
|
||||
(
|
||||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(table, column, config, import_report)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# None means that none of the field types know how to parse this field,
|
||||
# so we must ignore it.
|
||||
if baserow_field is None:
|
||||
import_report.add_failed(
|
||||
column["name"],
|
||||
SCOPE_FIELD,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"""Field "{column['name']}" with field type {column["type"]} was not imported because it is not supported.""",
|
||||
)
|
||||
continue
|
||||
|
||||
# Construct a mapping where the Airtable column id is the key and the
|
||||
# value contains the raw Airtable column values, Baserow field and
|
||||
# the Baserow field type object for later use.
|
||||
field_mapping[column["id"]] = {
|
||||
"baserow_field": baserow_field,
|
||||
"baserow_field_type": baserow_field_type,
|
||||
"raw_airtable_column": column,
|
||||
"airtable_column_type": airtable_column_type,
|
||||
}
|
||||
if baserow_field.primary:
|
||||
primary = baserow_field
|
||||
|
||||
if primary is None:
|
||||
# First check if another field can act as the primary field type.
|
||||
found_existing_field = False
|
||||
for value in field_mapping.values():
|
||||
if field_type_registry.get_by_model(
|
||||
value["baserow_field"]
|
||||
).can_be_primary_field(value["baserow_field"]):
|
||||
value["baserow_field"].primary = True
|
||||
found_existing_field = True
|
||||
break
|
||||
|
||||
# If none of the existing fields can be primary, we will add a new
|
||||
# text field.
|
||||
if not found_existing_field:
|
||||
airtable_column = {
|
||||
"id": "primary_field",
|
||||
"name": "Primary field (auto created)",
|
||||
"type": "text",
|
||||
}
|
||||
(
|
||||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(
|
||||
table, airtable_column, config, import_report
|
||||
)
|
||||
baserow_field.primary = True
|
||||
field_mapping["primary_id"] = {
|
||||
"baserow_field": baserow_field,
|
||||
"baserow_field_type": baserow_field_type,
|
||||
"raw_airtable_column": airtable_column,
|
||||
"airtable_column_type": airtable_column_type,
|
||||
}
|
||||
|
||||
# Loop over all the fields and convert them to Baserow serialized format.
|
||||
exported_fields = [
|
||||
value["baserow_field_type"].export_serialized(value["baserow_field"])
|
||||
for value in field_mapping.values()
|
||||
]
|
||||
|
||||
# Loop over all the rows in the table and convert them to Baserow format. We
|
||||
# need to provide the `row_id_mapping` and `field_mapping` because there
|
||||
# could be references to other rows and fields. the
|
||||
# `files_to_download_for_table` is needed because every value could be
|
||||
# depending on additional files that must later be downloaded.
|
||||
exported_rows = []
|
||||
for row_index, row in enumerate(tables[table["id"]]["rows"]):
|
||||
exported_rows.append(
|
||||
cls.to_baserow_row_export(
|
||||
table,
|
||||
row_id_mapping,
|
||||
field_mapping,
|
||||
row,
|
||||
row_index,
|
||||
files_to_download_for_table,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# Create an empty grid view because the importing of views doesn't work
|
||||
# yet. It's a bit quick and dirty, but it will be replaced soon.
|
||||
grid_view = GridView(pk=0, id=None, name="Grid", order=1)
|
||||
grid_view.get_field_options = lambda *args, **kwargs: []
|
||||
grid_view_type = view_type_registry.get_by_model(grid_view)
|
||||
empty_serialized_grid_view = grid_view_type.export_serialized(
|
||||
grid_view, None, None, None
|
||||
)
|
||||
view_id += 1
|
||||
empty_serialized_grid_view["id"] = view_id
|
||||
exported_views = [empty_serialized_grid_view]
|
||||
|
||||
# Loop over all views to add them to them as failed to the import report
|
||||
# because the views are not yet supported.
|
||||
for view in table["views"]:
|
||||
import_report.add_failed(
|
||||
view["name"],
|
||||
SCOPE_VIEW,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"View \"{view['name']}\" was not imported because views are not "
|
||||
f"yet supported during import.",
|
||||
)
|
||||
|
||||
exported_table = DatabaseExportSerializedStructure.table(
|
||||
id=table["id"],
|
||||
name=table["name"],
|
||||
order=table_index,
|
||||
fields=exported_fields,
|
||||
views=exported_views,
|
||||
rows=exported_rows,
|
||||
data_sync=None,
|
||||
)
|
||||
exported_tables.append(exported_table)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# Airtable has a mapping of signed URLs for the uploaded files. The
|
||||
# mapping is provided in the table payload, and if it exists, we need
|
||||
# that URL for download instead of the one originally provided.
|
||||
signed_user_content_urls = tables[table["id"]]["signedUserContentUrls"]
|
||||
for file_name, url in files_to_download_for_table.items():
|
||||
if url in signed_user_content_urls:
|
||||
url = signed_user_content_urls[url]
|
||||
files_to_download[file_name] = url
|
||||
field_mapping_per_table = AirtableHandler._parse_table_fields(
|
||||
schema, converting_progress, config, import_report
|
||||
)
|
||||
exported_tables, files_to_download = AirtableHandler._parse_rows_and_views(
|
||||
schema,
|
||||
tables,
|
||||
converting_progress,
|
||||
row_id_mapping,
|
||||
field_mapping_per_table,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
|
||||
# Just to be really clear that the automations and interfaces are not included.
|
||||
import_report.add_failed(
|
||||
|
@ -643,6 +821,98 @@ class AirtableHandler:
|
|||
|
||||
return exported_database, user_files_zip
|
||||
|
||||
@classmethod
|
||||
def fetch_and_combine_airtable_data(
|
||||
cls,
|
||||
share_id: str,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
) -> Union[dict, dict, list]:
|
||||
"""
|
||||
@TODO docs
|
||||
|
||||
:param share_id: The shared Airtable ID of which the data must be fetched.
|
||||
:param progress_builder: If provided will be used to build a child progress bar
|
||||
and report on this methods progress to the parent of the progress_builder.
|
||||
:return: The fetched init_data, schema, and list of tables enrichted with all
|
||||
the row and view data.
|
||||
"""
|
||||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=100)
|
||||
|
||||
# Execute the initial request to obtain the initial data that's needed to
|
||||
# make the request.
|
||||
request_id, init_data, cookies = cls.fetch_publicly_shared_base(share_id)
|
||||
progress.increment(state=AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE)
|
||||
|
||||
# Loop over all the tables and make a request for each table to obtain the raw
|
||||
# Airtable table data.
|
||||
tables = []
|
||||
raw_tables = list(
|
||||
init_data["singleApplicationScaffoldingData"]["tableById"].keys()
|
||||
)
|
||||
for index, table_id in enumerate(
|
||||
progress.track(
|
||||
represents_progress=49,
|
||||
state=AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE,
|
||||
iterable=raw_tables,
|
||||
)
|
||||
):
|
||||
response = cls.fetch_table_data(
|
||||
table_id=table_id,
|
||||
init_data=init_data,
|
||||
request_id=request_id,
|
||||
cookies=cookies,
|
||||
# At least one request must also fetch the application structure that
|
||||
# contains the schema of all the tables, so we do this for the first
|
||||
# table.
|
||||
fetch_application_structure=index == 0,
|
||||
stream=False,
|
||||
)
|
||||
json_decoded_content = parse_json_and_remove_invalid_surrogate_characters(
|
||||
response
|
||||
)
|
||||
|
||||
tables.append(json_decoded_content)
|
||||
|
||||
# Split database schema from the tables because we need this to be separated
|
||||
# later on.
|
||||
schema, tables = cls.extract_schema(tables)
|
||||
|
||||
# Collect which for which view the data is missing, so that they can be
|
||||
# fetched while respecting the progress afterward.
|
||||
view_data_to_fetch = []
|
||||
for table in schema["tableSchemas"]:
|
||||
existing_view_data = [
|
||||
view_data["id"] for view_data in tables[table["id"]]["viewDatas"]
|
||||
]
|
||||
for view in table["views"]:
|
||||
# Skip the view data that has already been loaded.
|
||||
if view["id"] in existing_view_data:
|
||||
continue
|
||||
|
||||
view_data_to_fetch.append((table["id"], view["id"]))
|
||||
|
||||
# Fetch the missing view data, and add them to the table object so that we have
|
||||
# a complete object.
|
||||
for table_id, view_id in progress.track(
|
||||
represents_progress=50,
|
||||
state=AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE,
|
||||
iterable=view_data_to_fetch,
|
||||
):
|
||||
response = cls.fetch_view_data(
|
||||
view_id=view_id,
|
||||
init_data=init_data,
|
||||
request_id=request_id,
|
||||
cookies=cookies,
|
||||
stream=False,
|
||||
)
|
||||
json_decoded_content = parse_json_and_remove_invalid_surrogate_characters(
|
||||
response
|
||||
)
|
||||
tables[table_id]["viewDatas"].append(json_decoded_content["data"])
|
||||
|
||||
return init_data, schema, tables
|
||||
|
||||
@classmethod
|
||||
def import_from_airtable_to_workspace(
|
||||
cls,
|
||||
|
@ -650,7 +920,7 @@ class AirtableHandler:
|
|||
share_id: str,
|
||||
storage: Optional[Storage] = None,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
download_files_buffer: Union[None, IOBase] = None,
|
||||
download_files_buffer: Optional[IOBase] = None,
|
||||
config: Optional[AirtableImportConfig] = None,
|
||||
) -> Database:
|
||||
"""
|
||||
|
@ -674,52 +944,10 @@ class AirtableHandler:
|
|||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=1000)
|
||||
|
||||
# Execute the initial request to obtain the initial data that's needed to
|
||||
# make the request.
|
||||
request_id, init_data, cookies = cls.fetch_publicly_shared_base(share_id)
|
||||
progress.increment(state=AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE)
|
||||
|
||||
# Loop over all the tables and make a request for each table to obtain the raw
|
||||
# Airtable table data.
|
||||
tables = []
|
||||
raw_tables = list(
|
||||
init_data["singleApplicationScaffoldingData"]["tableById"].keys()
|
||||
init_data, schema, tables = AirtableHandler.fetch_and_combine_airtable_data(
|
||||
share_id,
|
||||
progress.create_child_builder(represents_progress=100),
|
||||
)
|
||||
for index, table_id in enumerate(
|
||||
progress.track(
|
||||
represents_progress=99,
|
||||
state=AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE,
|
||||
iterable=raw_tables,
|
||||
)
|
||||
):
|
||||
response = cls.fetch_table_data(
|
||||
table_id=table_id,
|
||||
init_data=init_data,
|
||||
request_id=request_id,
|
||||
cookies=cookies,
|
||||
# At least one request must also fetch the application structure that
|
||||
# contains the schema of all the tables, so we do this for the first
|
||||
# table.
|
||||
fetch_application_structure=index == 0,
|
||||
stream=False,
|
||||
)
|
||||
try:
|
||||
decoded_content = remove_invalid_surrogate_characters(
|
||||
response.content, response.encoding
|
||||
)
|
||||
json_decoded_content = json.loads(decoded_content)
|
||||
except json.decoder.JSONDecodeError:
|
||||
# In some cases, the `remove_invalid_surrogate_characters` results in
|
||||
# invalid JSON. It's not completely clear why that is, but this
|
||||
# fallback can still produce valid JSON to import in most cases if
|
||||
# the original json didn't contain invalid surrogate characters.
|
||||
json_decoded_content = response.json()
|
||||
|
||||
tables.append(json_decoded_content)
|
||||
|
||||
# Split database schema from the tables because we need this to be separated
|
||||
# later on.
|
||||
schema, tables = cls.extract_schema(tables)
|
||||
|
||||
# Convert the raw Airtable data to Baserow export format so we can import that
|
||||
# later.
|
||||
|
|
|
@ -17,6 +17,10 @@ def import_airtable_date_type_options(type_options) -> dict:
|
|||
}
|
||||
|
||||
|
||||
def to_import_select_option_id(field_id, choice_id):
|
||||
return f"{field_id}_{choice_id}"
|
||||
|
||||
|
||||
def import_airtable_choices(field_id: str, type_options: dict) -> List[SelectOption]:
|
||||
order = type_options.get("choiceOrder", [])
|
||||
choices = type_options.get("choices", [])
|
||||
|
@ -24,7 +28,7 @@ def import_airtable_choices(field_id: str, type_options: dict) -> List[SelectOpt
|
|||
SelectOption(
|
||||
# Combine select id with choice id as choice id is not guaranteed to be
|
||||
# unique across table
|
||||
id=f"{field_id}_{choice['id']}",
|
||||
id=to_import_select_option_id(field_id, choice["id"]),
|
||||
value=choice["name"],
|
||||
color=AIRTABLE_BASEROW_COLOR_MAPPING.get(
|
||||
# The color isn't always provided, hence the fallback to an empty
|
||||
|
|
|
@ -16,13 +16,41 @@ from baserow.core.constants import BASEROW_COLORS
|
|||
SCOPE_FIELD = SelectOption(id="scope_field", value="Field", color="light-blue", order=1)
|
||||
SCOPE_CELL = SelectOption(id="scope_cell", value="Cell", color="light-green", order=2)
|
||||
SCOPE_VIEW = SelectOption(id="scope_view", value="View", color="light-cyan", order=3)
|
||||
SCOPE_VIEW_SORT = SelectOption(
|
||||
id="scope_view_sort", value="View sort", color="light-red", order=4
|
||||
)
|
||||
SCOPE_VIEW_GROUP_BY = SelectOption(
|
||||
id="scope_view_group_by", value="View group by", color="light-brown", order=5
|
||||
)
|
||||
SCOPE_VIEW_FILTER = SelectOption(
|
||||
id="scope_view_filter", value="View filter", color="light-pink", order=6
|
||||
)
|
||||
SCOPE_VIEW_COLOR = SelectOption(
|
||||
id="scope_view_color", value="View color", color="light-gray", order=7
|
||||
)
|
||||
SCOPE_VIEW_FIELD_OPTIONS = SelectOption(
|
||||
id="scope_view_field_options",
|
||||
value="View field options",
|
||||
color="light-purple",
|
||||
order=8,
|
||||
)
|
||||
SCOPE_AUTOMATIONS = SelectOption(
|
||||
id="scope_automations", value="Automations", color="light-orange", order=4
|
||||
id="scope_automations", value="Automations", color="light-orange", order=9
|
||||
)
|
||||
SCOPE_INTERFACES = SelectOption(
|
||||
id="scope_interfaces", value="Interfaces", color="light-yellow", order=5
|
||||
id="scope_interfaces", value="Interfaces", color="light-yellow", order=10
|
||||
)
|
||||
ALL_SCOPES = [SCOPE_FIELD, SCOPE_CELL, SCOPE_VIEW, SCOPE_AUTOMATIONS, SCOPE_INTERFACES]
|
||||
ALL_SCOPES = [
|
||||
SCOPE_FIELD,
|
||||
SCOPE_CELL,
|
||||
SCOPE_VIEW,
|
||||
SCOPE_VIEW_SORT,
|
||||
SCOPE_VIEW_GROUP_BY,
|
||||
SCOPE_VIEW_FILTER,
|
||||
SCOPE_VIEW_COLOR,
|
||||
SCOPE_AUTOMATIONS,
|
||||
SCOPE_INTERFACES,
|
||||
]
|
||||
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE = SelectOption(
|
||||
id="error_type_unsupported_feature",
|
||||
|
|
|
@ -1,9 +1,55 @@
|
|||
from datetime import tzinfo
|
||||
from typing import Any, Dict, Tuple, Union
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from baserow_premium.views.decorator_types import LeftBorderColorDecoratorType
|
||||
from baserow_premium.views.decorator_value_provider_types import (
|
||||
ConditionalColorValueProviderType,
|
||||
SelectColorValueProviderType,
|
||||
)
|
||||
|
||||
from baserow.contrib.database.airtable.config import AirtableImportConfig
|
||||
from baserow.contrib.database.airtable.import_report import AirtableImportReport
|
||||
from baserow.contrib.database.airtable.constants import (
|
||||
AIRTABLE_ASCENDING_MAP,
|
||||
AIRTABLE_BASEROW_COLOR_MAPPING,
|
||||
)
|
||||
from baserow.contrib.database.airtable.exceptions import (
|
||||
AirtableSkipCellValue,
|
||||
AirtableSkipFilter,
|
||||
)
|
||||
from baserow.contrib.database.airtable.import_report import (
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
SCOPE_FIELD,
|
||||
SCOPE_VIEW_COLOR,
|
||||
SCOPE_VIEW_FILTER,
|
||||
SCOPE_VIEW_GROUP_BY,
|
||||
SCOPE_VIEW_SORT,
|
||||
AirtableImportReport,
|
||||
)
|
||||
from baserow.contrib.database.airtable.utils import (
|
||||
get_airtable_column_name,
|
||||
unknown_value_to_human_readable,
|
||||
)
|
||||
from baserow.contrib.database.fields.field_filters import (
|
||||
FILTER_TYPE_AND,
|
||||
FILTER_TYPE_OR,
|
||||
)
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.views.models import (
|
||||
DEFAULT_SORT_TYPE_KEY,
|
||||
SORT_ORDER_ASC,
|
||||
SORT_ORDER_DESC,
|
||||
View,
|
||||
ViewDecoration,
|
||||
ViewFilter,
|
||||
ViewFilterGroup,
|
||||
ViewGroupBy,
|
||||
ViewSort,
|
||||
)
|
||||
from baserow.contrib.database.views.registries import (
|
||||
ViewFilterType,
|
||||
ViewType,
|
||||
view_type_registry,
|
||||
)
|
||||
from baserow.core.registry import Instance, Registry
|
||||
|
||||
|
||||
|
@ -12,7 +58,6 @@ class AirtableColumnType(Instance):
|
|||
self,
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_column: dict,
|
||||
timezone: tzinfo,
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
) -> Union[Field, None]:
|
||||
|
@ -34,6 +79,18 @@ class AirtableColumnType(Instance):
|
|||
|
||||
raise NotImplementedError("The `to_baserow_field` must be implemented.")
|
||||
|
||||
def after_field_objects_prepared(
|
||||
self,
|
||||
field_mapping_per_table: Dict[str, Dict[str, Any]],
|
||||
baserow_field: Field,
|
||||
raw_airtable_column: dict,
|
||||
):
|
||||
"""
|
||||
Hook that is called after all field objects of all tables are prepared. This
|
||||
allows to do some post-processing on the fields in case they depend on each
|
||||
other.
|
||||
"""
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
|
@ -70,6 +127,40 @@ class AirtableColumnType(Instance):
|
|||
|
||||
return value
|
||||
|
||||
def to_baserow_export_empty_value(
|
||||
self,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_row: dict,
|
||||
raw_airtable_column: dict,
|
||||
baserow_field: Field,
|
||||
files_to_download: Dict[str, str],
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
):
|
||||
# By default, raise the `AirtableSkipCellValue` so that the value is not
|
||||
# included in the export.
|
||||
raise AirtableSkipCellValue
|
||||
|
||||
def add_import_report_failed_if_default_is_provided(
|
||||
self,
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_column: dict,
|
||||
import_report: AirtableImportReport,
|
||||
to_human_readable_default=(lambda x: x),
|
||||
):
|
||||
default = raw_airtable_column.get("default", "")
|
||||
if default:
|
||||
default = to_human_readable_default(default)
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but the default value "
|
||||
f"{default} was dropped because that's not supported in Baserow.",
|
||||
)
|
||||
|
||||
|
||||
class AirtableColumnTypeRegistry(Registry):
|
||||
name = "airtable_column"
|
||||
|
@ -109,6 +200,699 @@ class AirtableColumnTypeRegistry(Registry):
|
|||
return None, None
|
||||
|
||||
|
||||
class AirtableViewType(Instance):
|
||||
baserow_view_type: Optional[str] = None
|
||||
|
||||
def get_sorts(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
view_type: ViewType,
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_view_data: dict,
|
||||
import_report: AirtableImportReport,
|
||||
) -> List[ViewSort]:
|
||||
"""
|
||||
Maps the sorts from the raw Airtable view data to a list of Baserow
|
||||
compatible ViewSort objects.
|
||||
"""
|
||||
|
||||
last_sorts_applied = raw_airtable_view_data.get("lastSortsApplied", None)
|
||||
|
||||
if not view_type.can_sort or last_sorts_applied is None:
|
||||
return []
|
||||
|
||||
sort_set = last_sorts_applied.get("sortSet", None) or []
|
||||
|
||||
view_sorts = []
|
||||
for sort in sort_set:
|
||||
if sort["columnId"] not in field_mapping:
|
||||
column_name = get_airtable_column_name(
|
||||
raw_airtable_table, sort["columnId"]
|
||||
)
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field ID "{column_name}"',
|
||||
SCOPE_VIEW_SORT,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The sort on field "{column_name}" was ignored in view'
|
||||
f' {raw_airtable_view["name"]} because the field is not imported.',
|
||||
)
|
||||
continue
|
||||
|
||||
mapping_entry = field_mapping[sort["columnId"]]
|
||||
baserow_field_type = mapping_entry["baserow_field_type"]
|
||||
baserow_field = mapping_entry["baserow_field"]
|
||||
can_order_by = baserow_field_type.check_can_order_by(
|
||||
baserow_field, DEFAULT_SORT_TYPE_KEY
|
||||
)
|
||||
|
||||
if not can_order_by:
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field "{baserow_field.name}"',
|
||||
SCOPE_VIEW_SORT,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The sort on field "{baserow_field.name}" was ignored in view'
|
||||
f' {raw_airtable_view["name"]} because it\'s not possible to '
|
||||
f"order by that field type.",
|
||||
)
|
||||
continue
|
||||
|
||||
view_sort = ViewSort(
|
||||
id=sort["id"],
|
||||
field_id=sort["columnId"],
|
||||
order=SORT_ORDER_ASC if sort["ascending"] else SORT_ORDER_DESC,
|
||||
)
|
||||
view_sorts.append(view_sort)
|
||||
|
||||
return view_sorts
|
||||
|
||||
def get_group_bys(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
view_type: ViewType,
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_view_data: dict,
|
||||
import_report: AirtableImportReport,
|
||||
) -> List[ViewGroupBy]:
|
||||
"""
|
||||
Maps the group bys from the raw Airtable view data to a list of Baserow
|
||||
compatible ViewGroupBy objects.
|
||||
"""
|
||||
|
||||
group_levels = raw_airtable_view_data.get("groupLevels", None)
|
||||
|
||||
if not view_type.can_sort or group_levels is None:
|
||||
return []
|
||||
|
||||
view_group_by = []
|
||||
for group in group_levels:
|
||||
if group["columnId"] not in field_mapping:
|
||||
column_name = get_airtable_column_name(
|
||||
raw_airtable_table, group["columnId"]
|
||||
)
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field ID "{column_name}"',
|
||||
SCOPE_VIEW_GROUP_BY,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The group by on field "{column_name}" was ignored in view'
|
||||
f' {raw_airtable_view["name"]} because the field was not imported.',
|
||||
)
|
||||
continue
|
||||
|
||||
mapping_entry = field_mapping[group["columnId"]]
|
||||
baserow_field_type = mapping_entry["baserow_field_type"]
|
||||
baserow_field = mapping_entry["baserow_field"]
|
||||
can_order_by = baserow_field_type.check_can_group_by(
|
||||
baserow_field, DEFAULT_SORT_TYPE_KEY
|
||||
)
|
||||
|
||||
if not can_order_by:
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field "{baserow_field.name}"',
|
||||
SCOPE_VIEW_GROUP_BY,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The group by on field "{baserow_field.name}" was ignored in view {raw_airtable_view["name"]} because it\'s not possible to group by that field type.',
|
||||
)
|
||||
continue
|
||||
|
||||
ascending = AIRTABLE_ASCENDING_MAP.get(group["order"], None)
|
||||
|
||||
if ascending is None:
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field "{baserow_field.name}"',
|
||||
SCOPE_VIEW_GROUP_BY,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The group by on field "{baserow_field.name}" was ignored in view {raw_airtable_view["name"]} because the order {group["order"]} is incompatible.',
|
||||
)
|
||||
continue
|
||||
|
||||
view_group = ViewGroupBy(
|
||||
id=group["id"],
|
||||
field_id=group["columnId"],
|
||||
order=SORT_ORDER_ASC if ascending else SORT_ORDER_DESC,
|
||||
)
|
||||
view_group_by.append(view_group)
|
||||
|
||||
return view_group_by
|
||||
|
||||
def get_filter(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_table: dict,
|
||||
import_report: AirtableImportReport,
|
||||
filter_object: dict,
|
||||
parent_group: Optional[ViewFilterGroup] = None,
|
||||
):
|
||||
"""
|
||||
This method converts a raw airtable filter object into a Baserow filter object
|
||||
that's ready for the export system.
|
||||
"""
|
||||
|
||||
# If it's not a group, then it's an individual filter, and it must be
|
||||
# parsed accordingly.
|
||||
if filter_object["columnId"] not in field_mapping:
|
||||
column_name = get_airtable_column_name(
|
||||
raw_airtable_table, filter_object["columnId"]
|
||||
)
|
||||
filter_value = unknown_value_to_human_readable(filter_object["value"])
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field ID "{column_name}"',
|
||||
SCOPE_VIEW_FILTER,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The "{filter_object["operator"]}" filter with value '
|
||||
f'"{filter_value}" on field "{column_name}" was ignored '
|
||||
f'in view {raw_airtable_view["name"]} because the field was not '
|
||||
f"imported.",
|
||||
)
|
||||
return None
|
||||
|
||||
mapping_entry = field_mapping[filter_object["columnId"]]
|
||||
baserow_field_type = mapping_entry["baserow_field_type"]
|
||||
baserow_field = mapping_entry["baserow_field"]
|
||||
raw_airtable_column = mapping_entry["raw_airtable_column"]
|
||||
can_filter_by = baserow_field_type.check_can_filter_by(baserow_field)
|
||||
|
||||
if not can_filter_by:
|
||||
filter_value = unknown_value_to_human_readable(filter_object["value"])
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field "{baserow_field.name}"',
|
||||
SCOPE_VIEW_FILTER,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The "{filter_object["operator"]}" filter with value '
|
||||
f'"{filter_value}" on field "{baserow_field.name}" was '
|
||||
f'ignored in view {raw_airtable_view["name"]} because it\'s not '
|
||||
f"possible to filter by that field type.",
|
||||
)
|
||||
return None
|
||||
|
||||
try:
|
||||
filter_operator = airtable_filter_operator_registry.get(
|
||||
filter_object["operator"]
|
||||
)
|
||||
filter_type, value = filter_operator.to_baserow_filter_and_value(
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
import_report,
|
||||
filter_object["value"],
|
||||
)
|
||||
|
||||
if not filter_type.field_is_compatible(baserow_field):
|
||||
raise AirtableSkipFilter
|
||||
except (
|
||||
airtable_filter_operator_registry.does_not_exist_exception_class,
|
||||
# If the `AirtableSkipFilter` exception is raised, then the Airtable
|
||||
# filter existing, but is not compatible with the Baserow filters. This
|
||||
# can be raised in the `to_baserow_filter_and_value`, but also if it
|
||||
# appears to not be compatible afterward.
|
||||
AirtableSkipFilter,
|
||||
):
|
||||
filter_value = unknown_value_to_human_readable(filter_object["value"])
|
||||
import_report.add_failed(
|
||||
f'View "{raw_airtable_view["name"]}", Field "{baserow_field.name}"',
|
||||
SCOPE_VIEW_FILTER,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f'The "{filter_object["operator"]}" filter with value '
|
||||
f'"{filter_value}" on field "{baserow_field.name}" was '
|
||||
f'ignored in view {raw_airtable_view["name"]} because not no '
|
||||
f"compatible filter exists.",
|
||||
)
|
||||
return None
|
||||
|
||||
return ViewFilter(
|
||||
id=filter_object["id"],
|
||||
type=filter_type.type,
|
||||
value=value,
|
||||
field_id=filter_object["columnId"],
|
||||
view_id=raw_airtable_view["id"],
|
||||
group_id=parent_group.id if parent_group else None,
|
||||
)
|
||||
|
||||
def get_filters(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_table: dict,
|
||||
import_report: AirtableImportReport,
|
||||
filter_object: dict,
|
||||
filter_groups: Optional[List[ViewFilterGroup]] = None,
|
||||
parent_group: Optional[ViewFilterGroup] = None,
|
||||
) -> Union[List[ViewFilter], List[ViewFilterGroup]]:
|
||||
"""
|
||||
Recursive method that either loops over the filters in the `filter_object`, and
|
||||
converts it to two flat lists containing the Baserow ViewFilter and
|
||||
ViewFilterGroup objects.
|
||||
"""
|
||||
|
||||
if filter_groups is None:
|
||||
filter_groups = []
|
||||
|
||||
filters = []
|
||||
conjunction = filter_object.get("conjunction", None)
|
||||
filter_set = filter_object.get("filterSet", None)
|
||||
column_id = filter_object.get("columnId", None)
|
||||
|
||||
if conjunction and filter_set:
|
||||
# The filter_object is a nested structure, where if the `conjunction` and
|
||||
# `filterSet` are in the object, it means that it's a filter group.
|
||||
view_group = ViewFilterGroup(
|
||||
# Specifically keep the id `None` for the root group because that
|
||||
# doesn't exist in Baserow.
|
||||
id=filter_object.get("id", None),
|
||||
parent_group=parent_group,
|
||||
filter_type=FILTER_TYPE_OR if conjunction == "or" else FILTER_TYPE_AND,
|
||||
view_id=raw_airtable_view["id"],
|
||||
)
|
||||
|
||||
if view_group not in filter_groups:
|
||||
filter_groups.append(view_group)
|
||||
|
||||
for child_filter in filter_set:
|
||||
child_filters, _ = self.get_filters(
|
||||
field_mapping,
|
||||
row_id_mapping,
|
||||
raw_airtable_view,
|
||||
raw_airtable_table,
|
||||
import_report,
|
||||
child_filter,
|
||||
filter_groups,
|
||||
view_group,
|
||||
)
|
||||
filters.extend(child_filters)
|
||||
|
||||
return filters, filter_groups
|
||||
elif column_id:
|
||||
baserow_filter = self.get_filter(
|
||||
field_mapping,
|
||||
row_id_mapping,
|
||||
raw_airtable_view,
|
||||
raw_airtable_table,
|
||||
import_report,
|
||||
filter_object,
|
||||
parent_group,
|
||||
)
|
||||
|
||||
if baserow_filter is None:
|
||||
return [], []
|
||||
else:
|
||||
return [baserow_filter], []
|
||||
|
||||
return [], []
|
||||
|
||||
def get_select_column_decoration(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
view_type: ViewType,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_view_data: dict,
|
||||
import_report: AirtableImportReport,
|
||||
) -> Optional[ViewDecoration]:
|
||||
color_config = raw_airtable_view_data["colorConfig"]
|
||||
select_column_id = color_config["selectColumnId"]
|
||||
|
||||
if select_column_id not in field_mapping:
|
||||
column_name = get_airtable_column_name(raw_airtable_table, select_column_id)
|
||||
import_report.add_failed(
|
||||
raw_airtable_view["name"],
|
||||
SCOPE_VIEW_COLOR,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
f'The select field coloring was ignored in {raw_airtable_view["name"]} '
|
||||
f"because {column_name} does not exist.",
|
||||
)
|
||||
return None
|
||||
|
||||
return ViewDecoration(
|
||||
id=f"{raw_airtable_view['id']}_decoration",
|
||||
view_id=raw_airtable_view["id"],
|
||||
type=LeftBorderColorDecoratorType.type,
|
||||
value_provider_type=SelectColorValueProviderType.type,
|
||||
value_provider_conf={"field_id": select_column_id},
|
||||
order=1,
|
||||
)
|
||||
|
||||
def get_color_definitions_decoration(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
view_type: ViewType,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_view_data: dict,
|
||||
import_report: AirtableImportReport,
|
||||
) -> Optional[ViewDecoration]:
|
||||
color_config = raw_airtable_view_data["colorConfig"]
|
||||
color_definitions = color_config["colorDefinitions"]
|
||||
default_color = AIRTABLE_BASEROW_COLOR_MAPPING.get(
|
||||
color_config.get("defaultColor", ""),
|
||||
"",
|
||||
)
|
||||
baserow_colors = []
|
||||
|
||||
for color_definition in color_definitions:
|
||||
filters, filter_groups = self.get_filters(
|
||||
field_mapping,
|
||||
row_id_mapping,
|
||||
raw_airtable_view,
|
||||
raw_airtable_table,
|
||||
import_report,
|
||||
color_definition,
|
||||
)
|
||||
# Pop the first group because that shouldn't be in Baserow, and the type is
|
||||
# defined on the view.
|
||||
if len(filter_groups) > 0:
|
||||
root_group = filter_groups.pop(0)
|
||||
color = AIRTABLE_BASEROW_COLOR_MAPPING.get(
|
||||
color_definition.get("color", ""),
|
||||
"blue",
|
||||
)
|
||||
baserow_colors.append(
|
||||
{
|
||||
"filter_groups": [
|
||||
{
|
||||
"id": filter_group.id,
|
||||
"filter_type": filter_group.filter_type,
|
||||
"parent_group": (
|
||||
None
|
||||
if filter_group.parent_group_id == root_group.id
|
||||
else filter_group.parent_group_id
|
||||
),
|
||||
}
|
||||
for filter_group in filter_groups
|
||||
],
|
||||
"filters": [
|
||||
{
|
||||
"id": filter_object.id,
|
||||
"type": filter_object.type,
|
||||
"field": filter_object.field_id,
|
||||
"group": (
|
||||
None
|
||||
if filter_object.group_id == root_group.id
|
||||
else filter_object.group_id
|
||||
),
|
||||
"value": filter_object.value,
|
||||
}
|
||||
for filter_object in filters
|
||||
],
|
||||
"operator": root_group.filter_type,
|
||||
"color": color,
|
||||
}
|
||||
)
|
||||
|
||||
if default_color != "":
|
||||
baserow_colors.append(
|
||||
{
|
||||
"filter_groups": [],
|
||||
"filters": [],
|
||||
"operator": "AND",
|
||||
"color": default_color,
|
||||
}
|
||||
)
|
||||
|
||||
return ViewDecoration(
|
||||
id=f"{raw_airtable_view['id']}_decoration",
|
||||
view_id=raw_airtable_view["id"],
|
||||
type=LeftBorderColorDecoratorType.type,
|
||||
value_provider_type=ConditionalColorValueProviderType.type,
|
||||
value_provider_conf={"colors": baserow_colors},
|
||||
order=1,
|
||||
)
|
||||
|
||||
def get_decorations(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
view_type: ViewType,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_view_data: dict,
|
||||
import_report: AirtableImportReport,
|
||||
) -> List[ViewDecoration]:
|
||||
"""
|
||||
Converts the raw Airtable color config into matching Baserow view decorations.
|
||||
"""
|
||||
|
||||
color_config = raw_airtable_view_data.get("colorConfig", None)
|
||||
|
||||
if not view_type.can_decorate or color_config is None:
|
||||
return []
|
||||
|
||||
color_config_type = color_config.get("type", "")
|
||||
decoration = None
|
||||
|
||||
if color_config_type == "selectColumn":
|
||||
decoration = self.get_select_column_decoration(
|
||||
field_mapping,
|
||||
view_type,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
import_report,
|
||||
)
|
||||
elif color_config_type == "colorDefinitions":
|
||||
decoration = self.get_color_definitions_decoration(
|
||||
field_mapping,
|
||||
view_type,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
import_report,
|
||||
)
|
||||
|
||||
if decoration:
|
||||
return [decoration]
|
||||
else:
|
||||
return []
|
||||
|
||||
def to_serialized_baserow_view(
|
||||
self,
|
||||
field_mapping,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
if self.baserow_view_type is None:
|
||||
raise NotImplementedError(
|
||||
"The `baserow_view_type` must be implemented for the AirtableViewType."
|
||||
)
|
||||
|
||||
view_type = view_type_registry.get(self.baserow_view_type)
|
||||
view = view_type.model_class(
|
||||
id=raw_airtable_view["id"],
|
||||
pk=raw_airtable_view["id"],
|
||||
name=raw_airtable_view["name"],
|
||||
order=raw_airtable_table["viewOrder"].index(raw_airtable_view["id"]) + 1,
|
||||
)
|
||||
|
||||
filters_object = raw_airtable_view_data.get("filters", None)
|
||||
filters = []
|
||||
filter_groups = []
|
||||
if view_type.can_filter and filters_object is not None:
|
||||
filters, filter_groups = self.get_filters(
|
||||
field_mapping,
|
||||
row_id_mapping,
|
||||
raw_airtable_view,
|
||||
raw_airtable_table,
|
||||
import_report,
|
||||
filters_object,
|
||||
)
|
||||
# Pop the first group because that shouldn't be in Baserow, and the type is
|
||||
# defined on the view.
|
||||
if len(filter_groups) > 0:
|
||||
view.filter_type = filter_groups.pop(0).filter_type
|
||||
|
||||
sorts = self.get_sorts(
|
||||
field_mapping,
|
||||
view_type,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
import_report,
|
||||
)
|
||||
group_bys = self.get_group_bys(
|
||||
field_mapping,
|
||||
view_type,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
import_report,
|
||||
)
|
||||
decorations = self.get_decorations(
|
||||
field_mapping,
|
||||
view_type,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
import_report,
|
||||
)
|
||||
|
||||
view.get_field_options = lambda *args, **kwargs: []
|
||||
view._prefetched_objects_cache = {
|
||||
"viewfilter_set": filters,
|
||||
"filter_groups": filter_groups,
|
||||
"viewsort_set": sorts,
|
||||
"viewgroupby_set": group_bys,
|
||||
"viewdecoration_set": decorations,
|
||||
}
|
||||
view = self.prepare_view_object(
|
||||
field_mapping,
|
||||
view,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
serialized = view_type.export_serialized(view)
|
||||
|
||||
return serialized
|
||||
|
||||
def prepare_view_object(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
view: View,
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_view_data: dict,
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
) -> Union[dict, None]:
|
||||
"""
|
||||
Prepares the given view object before it's passed into the view type specific
|
||||
`export_serialized` method. This should be used to set any properties that
|
||||
are needed for the view specific export operations.
|
||||
|
||||
Note that the common properties like name, filters, sorts, etc are added by
|
||||
default depending on the Baserow view support for it.
|
||||
|
||||
:param field_mapping: A dict containing all the imported fields.
|
||||
:param view: The view object that must be prepared.
|
||||
:param raw_airtable_table: The raw Airtable table data related to the column.
|
||||
:param raw_airtable_view: The raw Airtable view values that must be
|
||||
converted, this contains the name, for example.
|
||||
:param raw_airtable_view_data: The Airtable view data. This contains the
|
||||
filters, sorts, etc.
|
||||
:param config: Additional configuration related to the import.
|
||||
:param import_report: Used to collect what wasn't imported to report to the
|
||||
user.
|
||||
:return: The Baserow view type related to the Airtable column. If None is
|
||||
provided, then the view is ignored in the conversion.
|
||||
"""
|
||||
|
||||
raise NotImplementedError(
|
||||
"The `to_serialized_baserow_view` must be implemented."
|
||||
)
|
||||
|
||||
|
||||
class AirtableViewTypeRegistry(Registry):
|
||||
name = "airtable_view"
|
||||
|
||||
def from_airtable_view_to_serialized(
|
||||
self,
|
||||
field_mapping: dict,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_view: dict,
|
||||
raw_airtable_view_data: dict,
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
) -> dict:
|
||||
"""
|
||||
Tries to find a Baserow view that matches that raw Airtable view data. If
|
||||
None is returned, the view is not compatible with Baserow and must be ignored.
|
||||
|
||||
:param field_mapping: A dict containing all the imported fields.
|
||||
:param row_id_mapping: A dict mapping the Airable row IDs to Baserow row IDs
|
||||
per table ID.
|
||||
:param raw_airtable_table: The raw Airtable table data related to the column.
|
||||
:param raw_airtable_view: The raw Airtable column data that must be imported.
|
||||
:param raw_airtable_view_data: The raw Airtable view data containing filters,
|
||||
sorts, etc.
|
||||
:param config: Additional configuration related to the import.
|
||||
:param import_report: Used to collect what wasn't imported to report to the
|
||||
user.
|
||||
:return: The related Baserow view and AirtableViewType that should be used
|
||||
for the conversion.
|
||||
"""
|
||||
|
||||
try:
|
||||
type_name = raw_airtable_view.get("type", "")
|
||||
airtable_view_type = self.get(type_name)
|
||||
serialized_view = airtable_view_type.to_serialized_baserow_view(
|
||||
field_mapping,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_view,
|
||||
raw_airtable_view_data,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
|
||||
return serialized_view
|
||||
except self.does_not_exist_exception_class:
|
||||
# Returning `None` because it's okay to not important the incompatible
|
||||
# views. It will be added to the `import_later` from the handler.
|
||||
return None
|
||||
|
||||
|
||||
class AirtableFilterOperator(Instance):
|
||||
def to_baserow_filter_and_value(
|
||||
self,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
raw_airtable_table: dict,
|
||||
raw_airtable_column: dict,
|
||||
baserow_field: Field,
|
||||
import_report: AirtableImportReport,
|
||||
value: str,
|
||||
) -> Union[ViewFilterType, str]:
|
||||
"""
|
||||
Converts the given Airtable value into the matching Baserow filter type and
|
||||
correct value.
|
||||
|
||||
:param row_id_mapping: A dict mapping the Airable row IDs to Baserow row IDs
|
||||
per table ID.
|
||||
:param raw_airtable_table: The raw Airtable table data related to the filter.
|
||||
:param raw_airtable_column: The raw Airtable column data related to the filter.
|
||||
:param baserow_field: The Baserow field related to the filter.
|
||||
:param import_report: Used to collect what wasn't imported to report to the
|
||||
user.
|
||||
:param value: The value that must be converted.
|
||||
:raises AirtableSkipFilter: If no compatible Baserow filter can be found.
|
||||
:return: The matching Baserow filter type and value.
|
||||
"""
|
||||
|
||||
raise NotImplementedError(
|
||||
f"The `to_baserow_filter` must be implemented for {self.type}."
|
||||
)
|
||||
|
||||
|
||||
class AirtableFilterOperatorRegistry(Registry):
|
||||
name = "airtable_filter_operator"
|
||||
|
||||
|
||||
# A default airtable column type registry is created here, this is the one that is used
|
||||
# throughout the whole Baserow application to add a new airtable column type.
|
||||
airtable_column_type_registry = AirtableColumnTypeRegistry()
|
||||
airtable_view_type_registry = AirtableViewTypeRegistry()
|
||||
airtable_filter_operator_registry = AirtableFilterOperatorRegistry()
|
||||
|
|
|
@ -1,4 +1,15 @@
|
|||
import json
|
||||
import re
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from requests import Response
|
||||
|
||||
from baserow.contrib.database.airtable.constants import (
|
||||
AIRTABLE_DATE_FILTER_VALUE_MAP,
|
||||
AIRTABLE_MAX_DURATION_VALUE,
|
||||
)
|
||||
from baserow.contrib.database.airtable.exceptions import AirtableSkipFilter
|
||||
from baserow.core.utils import get_value_at_path, remove_invalid_surrogate_characters
|
||||
|
||||
|
||||
def extract_share_id_from_url(public_base_url: str) -> str:
|
||||
|
@ -39,3 +50,250 @@ def get_airtable_row_primary_value(table, row):
|
|||
primary_value = row["id"]
|
||||
|
||||
return primary_value
|
||||
|
||||
|
||||
def get_airtable_column_name(raw_airtable_table, column_id) -> str:
|
||||
"""
|
||||
Tries to extract the name of the column from the provided Airtable table.
|
||||
|
||||
:param raw_airtable_table: The table where to get the column names from.
|
||||
:param column_id: The column ID to get the name for.
|
||||
:return: The found column name or column_id if not found.
|
||||
"""
|
||||
|
||||
for column in raw_airtable_table["columns"]:
|
||||
if column["id"] == column_id:
|
||||
return column["name"]
|
||||
|
||||
return column_id
|
||||
|
||||
|
||||
def unknown_value_to_human_readable(value: Any) -> str:
|
||||
"""
|
||||
If a value can't be converted to human-readable value, then this function can be
|
||||
used to generate something user-friendly.
|
||||
|
||||
:param value: The value that must be converted.
|
||||
:return: The human-readable string value.
|
||||
"""
|
||||
|
||||
if value is None:
|
||||
return ""
|
||||
if isinstance(value, list):
|
||||
value_len = len(value)
|
||||
return "1 item" if value_len == 1 else f"{value_len} items"
|
||||
if isinstance(value, str) and value.startswith("usr"):
|
||||
return "1 item"
|
||||
return str(value)
|
||||
|
||||
|
||||
def parse_json_and_remove_invalid_surrogate_characters(response: Response) -> dict:
|
||||
"""
|
||||
The response from Airtable can sometimes contain invalid surrogate characters. This
|
||||
helper method removed them, and parses it to JSON.
|
||||
|
||||
:param response: The response from the request to Airtable.
|
||||
:return: Parsed JSON from the response.
|
||||
"""
|
||||
|
||||
try:
|
||||
decoded_content = remove_invalid_surrogate_characters(
|
||||
response.content, response.encoding
|
||||
)
|
||||
json_decoded_content = json.loads(decoded_content)
|
||||
except json.decoder.JSONDecodeError:
|
||||
# In some cases, the `remove_invalid_surrogate_characters` results in
|
||||
# invalid JSON. It's not completely clear why that is, but this
|
||||
# fallback can still produce valid JSON to import in most cases if
|
||||
# the original json didn't contain invalid surrogate characters.
|
||||
json_decoded_content = response.json()
|
||||
|
||||
return json_decoded_content
|
||||
|
||||
|
||||
def quill_parse_inline(insert, attributes):
|
||||
if "bold" in attributes:
|
||||
insert = f"**{insert}**"
|
||||
if "italic" in attributes:
|
||||
insert = f"_{insert}_"
|
||||
if "strike" in attributes:
|
||||
insert = f"~{insert}~"
|
||||
if "code" in attributes:
|
||||
insert = f"`{insert}`"
|
||||
if "link" in attributes:
|
||||
insert = f"[{insert}]({attributes['link']})"
|
||||
if isinstance(insert, object) and "mention" in insert:
|
||||
insert = f"@{insert['mention'].get('userId', '')}"
|
||||
|
||||
return insert
|
||||
|
||||
|
||||
def quill_wrap_block(attributes):
|
||||
prepend = ""
|
||||
append = ""
|
||||
multi_line = False
|
||||
if "header" in attributes:
|
||||
prepend = "#" * attributes["header"] + " "
|
||||
if "list" in attributes:
|
||||
list_type = attributes["list"]
|
||||
prepend = " " * attributes.get("indent", 0) * 4
|
||||
if list_type == "ordered":
|
||||
prepend += f"1. "
|
||||
elif list_type == "bullet":
|
||||
prepend += "- "
|
||||
elif list_type == "unchecked":
|
||||
prepend += "- [ ] "
|
||||
elif list_type == "checked":
|
||||
prepend += "- [x] "
|
||||
if "blockquote" in attributes:
|
||||
prepend = "> "
|
||||
if "≈≈" in attributes:
|
||||
prepend = "> "
|
||||
if "code-block" in attributes:
|
||||
prepend = "```\n"
|
||||
append = "```\n"
|
||||
multi_line = True
|
||||
return prepend, append, multi_line
|
||||
|
||||
|
||||
def quill_split_with_newlines(value):
|
||||
parts = re.split(r"(\n)", value)
|
||||
if parts and parts[0] == "":
|
||||
parts.pop(0)
|
||||
if parts and parts[-1] == "":
|
||||
parts.pop()
|
||||
return parts
|
||||
|
||||
|
||||
def quill_to_markdown(ops: list) -> str:
|
||||
"""
|
||||
Airtable uses the QuillJS editor for their rich text field. There is no library
|
||||
to convert it in Baserow compatible markdown. This is a simple, custom written
|
||||
function to convert it to Baserow compatible markdown.
|
||||
|
||||
The format is a bit odd because a newline entry can define how it should have been
|
||||
formatted as on block level, making it a bit tricky because it's not sequential.
|
||||
|
||||
See the `test_quill_to_markdown_airtable_example` test for an example.
|
||||
|
||||
:param ops: The QuillJS delta object that must be converted to markdown.
|
||||
:return: The converted markdown string.
|
||||
"""
|
||||
|
||||
md_output = []
|
||||
# Holds everything that must be written as a line. Each entry in the ops can add to
|
||||
# it until a "\n" character is detected.
|
||||
current_object = ""
|
||||
# Temporarily holds markdown code that has start and ending block, like with
|
||||
# code "```", for example. Need to temporarily store the prepend and append values,
|
||||
# so that we can add to it if it consists of multiple lines.
|
||||
current_multi_line = None
|
||||
|
||||
def flush_line():
|
||||
nonlocal md_output
|
||||
nonlocal current_object
|
||||
if current_object != "":
|
||||
md_output.append(current_object)
|
||||
current_object = ""
|
||||
|
||||
def flush_multi_line(current_prepend, current_append):
|
||||
nonlocal current_object
|
||||
nonlocal current_multi_line
|
||||
if current_multi_line is not None and current_multi_line != (
|
||||
current_prepend,
|
||||
current_append,
|
||||
):
|
||||
current_object = (
|
||||
current_multi_line[0] + current_object + current_multi_line[1]
|
||||
)
|
||||
flush_line()
|
||||
current_multi_line = None
|
||||
|
||||
for index, op in enumerate(ops):
|
||||
raw_insert = op.get("insert", "")
|
||||
attributes = op.get("attributes", {})
|
||||
|
||||
if isinstance(raw_insert, str):
|
||||
insert_lines = quill_split_with_newlines(raw_insert)
|
||||
else:
|
||||
insert_lines = [raw_insert]
|
||||
|
||||
# Break the insert by "\n" because the block formatting options should only
|
||||
# refer to the previous line.
|
||||
for insert_line in insert_lines:
|
||||
is_new_line = insert_line == "\n"
|
||||
|
||||
if is_new_line:
|
||||
prepend, append, multi_line = quill_wrap_block(attributes)
|
||||
flush_multi_line(prepend, append)
|
||||
|
||||
# Starting a new multi-line block. All the following lines will be
|
||||
# enclosed by the prepend and append.
|
||||
if multi_line and current_multi_line is None:
|
||||
current_multi_line = (prepend, append)
|
||||
|
||||
parsed_insert = quill_parse_inline(insert_line, attributes)
|
||||
current_object += parsed_insert
|
||||
|
||||
if is_new_line and not multi_line:
|
||||
current_object = prepend + current_object + append
|
||||
flush_line()
|
||||
|
||||
flush_multi_line(None, None)
|
||||
flush_line()
|
||||
|
||||
return "".join(md_output).strip()
|
||||
|
||||
|
||||
def airtable_date_filter_value_to_baserow(value: Optional[Union[dict, str]]) -> str:
|
||||
"""
|
||||
Converts the provided Airtable filter date value to the Baserow compatible date
|
||||
value string.
|
||||
|
||||
:param value: A dict containing the Airtable date value.
|
||||
:return: e.g. Europe/Amsterdam?2025-01-01?exact_date
|
||||
"""
|
||||
|
||||
if value is None:
|
||||
return ""
|
||||
|
||||
# If the value is a string, then it contains an exact date. This is the old format
|
||||
# of Airtable. In that case, we can conert it to the correct format.
|
||||
if isinstance(value, str):
|
||||
value = {
|
||||
"mode": "exactDate",
|
||||
"exactDate": value,
|
||||
"timeZone": "", # it's okay to leave the timezone empty in Baserow.
|
||||
}
|
||||
|
||||
mode = value["mode"]
|
||||
if "exactDate" in value:
|
||||
# By default, Airtable adds the time, but that is not needed in Baserow.
|
||||
value["exactDate"] = value["exactDate"][:10]
|
||||
date_string = AIRTABLE_DATE_FILTER_VALUE_MAP[mode]
|
||||
return date_string.format(**value)
|
||||
|
||||
|
||||
def skip_filter_if_type_duration_and_value_too_high(raw_airtable_column, value):
|
||||
"""
|
||||
If the provided Airtable column is a number with duration formatting, and if the
|
||||
value exceeds the maximum we can process, then the `AirtableSkipFilter` is raised.
|
||||
|
||||
:param raw_airtable_column: The related raw Airtable column.
|
||||
:param value: The value that must be checked.
|
||||
:raises: AirtableSkipFilter
|
||||
"""
|
||||
|
||||
is_duration = (
|
||||
get_value_at_path(raw_airtable_column, "typeOptions.format") == "duration"
|
||||
)
|
||||
|
||||
if not is_duration:
|
||||
return
|
||||
|
||||
try:
|
||||
value = int(value)
|
||||
if abs(value) > AIRTABLE_MAX_DURATION_VALUE:
|
||||
raise AirtableSkipFilter
|
||||
except ValueError:
|
||||
pass
|
||||
|
|
|
@ -38,8 +38,8 @@ ERROR_ORDER_BY_FIELD_NOT_FOUND = (
|
|||
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE = (
|
||||
"ERROR_ORDER_BY_FIELD_NOT_POSSIBLE",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"It is not possible to order by {e.field_name} because the field type "
|
||||
"{e.field_type} does not support filtering.",
|
||||
"It is not possible to order by {e.field_name} using sort type {e.sort_type} "
|
||||
"because the field type {e.field_type} does not support it.",
|
||||
)
|
||||
ERROR_FILTER_FIELD_NOT_FOUND = (
|
||||
"ERROR_FILTER_FIELD_NOT_FOUND",
|
||||
|
|
|
@ -1,9 +1,71 @@
|
|||
from django.utils.functional import lazy
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from baserow.contrib.database.api.data_sync.serializers import DataSyncSerializer
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.table.models import Table
|
||||
|
||||
|
||||
class TableImportConfiguration(serializers.Serializer):
|
||||
"""
|
||||
Additional table import configuration.
|
||||
"""
|
||||
|
||||
upsert_fields = serializers.ListField(
|
||||
child=serializers.IntegerField(min_value=1),
|
||||
min_length=1,
|
||||
allow_null=True,
|
||||
allow_empty=True,
|
||||
default=None,
|
||||
help_text=lazy(
|
||||
lambda: (
|
||||
"A list of field IDs in the table used to generate a value for "
|
||||
"identifying a row during the upsert process in file import. Each "
|
||||
"field ID must reference an existing field in the table, which will "
|
||||
"be used to match provided values against existing ones to determine "
|
||||
"whether a row should be inserted or updated.\n "
|
||||
"Field types that can be used in upsert fields: "
|
||||
f"{','.join([f.type for f in field_type_registry.get_all() if f.can_upsert])}. "
|
||||
"If specified, `upsert_values` should also be provided."
|
||||
)
|
||||
),
|
||||
)
|
||||
upsert_values = serializers.ListField(
|
||||
allow_empty=True,
|
||||
allow_null=True,
|
||||
default=None,
|
||||
child=serializers.ListField(
|
||||
min_length=1,
|
||||
),
|
||||
help_text=(
|
||||
"A list of values that are identifying rows in imported data.\n "
|
||||
"The number of rows in `upsert_values` should be equal to the number of "
|
||||
"rows in imported data. Each row in `upsert_values` should contain a "
|
||||
"list of values that match the number and field types of fields selected "
|
||||
"in `upsert_fields`. Based on `upsert_fields`, a similar upsert values "
|
||||
"will be calculated for each row in the table.\n "
|
||||
"There's no guarantee of uniqueness of row identification calculated from "
|
||||
"`upsert_values` nor from the table. Repeated upsert values are compared "
|
||||
"in order with matching values in the table. The imported data must be in "
|
||||
"the same order as the table rows for correct matching."
|
||||
),
|
||||
)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs.get("upsert_fields") and not len(attrs.get("upsert_values") or []):
|
||||
raise ValidationError(
|
||||
{
|
||||
"upsert_value": (
|
||||
"upsert_values must not be empty "
|
||||
"when upsert_fields are provided."
|
||||
)
|
||||
}
|
||||
)
|
||||
return attrs
|
||||
|
||||
|
||||
class TableSerializer(serializers.ModelSerializer):
|
||||
data_sync = DataSyncSerializer()
|
||||
|
||||
|
@ -74,10 +136,26 @@ class TableImportSerializer(serializers.Serializer):
|
|||
"for adding two rows to a table with two writable fields."
|
||||
),
|
||||
)
|
||||
configuration = TableImportConfiguration(required=False, default=None)
|
||||
|
||||
class Meta:
|
||||
fields = ("data",)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs.get("configuration"):
|
||||
if attrs["configuration"].get("upsert_values"):
|
||||
if len(attrs["configuration"].get("upsert_values")) != len(
|
||||
attrs["data"]
|
||||
):
|
||||
msg = (
|
||||
"`data` and `configuration.upsert_values` "
|
||||
"should have the same length."
|
||||
)
|
||||
raise ValidationError(
|
||||
{"data": msg, "configuration": {"upsert_values": msg}}
|
||||
)
|
||||
return attrs
|
||||
|
||||
|
||||
class TableUpdateSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
|
|
|
@ -489,14 +489,14 @@ class AsyncTableImportView(APIView):
|
|||
workspace=table.database.workspace,
|
||||
context=table,
|
||||
)
|
||||
|
||||
configuration = data.get("configuration")
|
||||
data = data["data"]
|
||||
|
||||
file_import_job = JobHandler().create_and_start_job(
|
||||
request.user,
|
||||
"file_import",
|
||||
data=data,
|
||||
table=table,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
serializer = job_type_registry.get_serializer(file_import_job, JobSerializer)
|
||||
|
|
|
@ -57,7 +57,7 @@ ERROR_VIEW_SORT_FIELD_ALREADY_EXISTS = (
|
|||
ERROR_VIEW_SORT_FIELD_NOT_SUPPORTED = (
|
||||
"ERROR_VIEW_SORT_FIELD_NOT_SUPPORTED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"The field does not support view sorting.",
|
||||
"The field does not support view sorting on the given type.",
|
||||
)
|
||||
ERROR_VIEW_GROUP_BY_DOES_NOT_EXIST = (
|
||||
"ERROR_VIEW_GROUP_BY_DOES_NOT_EXIST",
|
||||
|
|
|
@ -220,14 +220,14 @@ class UpdateViewFilterGroupSerializer(serializers.ModelSerializer):
|
|||
class ViewSortSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ViewSort
|
||||
fields = ("id", "view", "field", "order")
|
||||
fields = ("id", "view", "field", "order", "type")
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
||||
class CreateViewSortSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ViewSort
|
||||
fields = ("field", "order")
|
||||
fields = ("field", "order", "type")
|
||||
extra_kwargs = {
|
||||
"order": {"default": ViewSort._meta.get_field("order").default},
|
||||
}
|
||||
|
@ -236,11 +236,12 @@ class CreateViewSortSerializer(serializers.ModelSerializer):
|
|||
class UpdateViewSortSerializer(serializers.ModelSerializer):
|
||||
class Meta(CreateViewFilterSerializer.Meta):
|
||||
model = ViewSort
|
||||
fields = ("field", "order")
|
||||
fields = ("field", "order", "type")
|
||||
extra_kwargs = {
|
||||
"field": {"required": False},
|
||||
"order": {"required": False},
|
||||
"width": {"required": False},
|
||||
"type": {"required": False},
|
||||
}
|
||||
|
||||
|
||||
|
@ -253,6 +254,7 @@ class ViewGroupBySerializer(serializers.ModelSerializer):
|
|||
"field",
|
||||
"order",
|
||||
"width",
|
||||
"type",
|
||||
)
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
@ -264,10 +266,12 @@ class CreateViewGroupBySerializer(serializers.ModelSerializer):
|
|||
"field",
|
||||
"order",
|
||||
"width",
|
||||
"type",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"order": {"default": ViewGroupBy._meta.get_field("order").default},
|
||||
"width": {"default": ViewGroupBy._meta.get_field("width").default},
|
||||
"type": {"default": ViewGroupBy._meta.get_field("type").default},
|
||||
}
|
||||
|
||||
|
||||
|
@ -278,11 +282,13 @@ class UpdateViewGroupBySerializer(serializers.ModelSerializer):
|
|||
"field",
|
||||
"order",
|
||||
"width",
|
||||
"type",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"field": {"required": False},
|
||||
"order": {"required": False},
|
||||
"width": {"required": False},
|
||||
"type": {"required": False},
|
||||
}
|
||||
|
||||
|
||||
|
@ -540,7 +546,7 @@ class PublicViewSortSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = ViewSort
|
||||
fields = ("id", "view", "field", "order")
|
||||
fields = ("id", "view", "field", "order", "type")
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
||||
|
@ -555,6 +561,7 @@ class PublicViewGroupBySerializer(serializers.ModelSerializer):
|
|||
"field",
|
||||
"order",
|
||||
"width",
|
||||
"type",
|
||||
)
|
||||
extra_kwargs = {"id": {"read_only": True}}
|
||||
|
||||
|
|
|
@ -1540,7 +1540,11 @@ class ViewSortingsView(APIView):
|
|||
field = FieldHandler().get_field(data["field"])
|
||||
|
||||
view_sort = action_type_registry.get_by_type(CreateViewSortActionType).do(
|
||||
request.user, view, field, data["order"]
|
||||
request.user,
|
||||
view,
|
||||
field,
|
||||
data["order"],
|
||||
data.get("type"),
|
||||
)
|
||||
|
||||
serializer = ViewSortSerializer(view_sort)
|
||||
|
@ -1645,6 +1649,7 @@ class ViewSortView(APIView):
|
|||
view_sort,
|
||||
data.get("field"),
|
||||
data.get("order"),
|
||||
data.get("type"),
|
||||
)
|
||||
|
||||
serializer = ViewSortSerializer(view_sort)
|
||||
|
@ -2219,7 +2224,7 @@ class ViewGroupBysView(APIView):
|
|||
|
||||
view_group_by = action_type_registry.get_by_type(
|
||||
CreateViewGroupByActionType
|
||||
).do(request.user, view, field, data["order"], data["width"])
|
||||
).do(request.user, view, field, data["order"], data["width"], data.get("type"))
|
||||
|
||||
serializer = ViewGroupBySerializer(view_group_by)
|
||||
return Response(serializer.data)
|
||||
|
@ -2326,6 +2331,7 @@ class ViewGroupByView(APIView):
|
|||
data.get("field"),
|
||||
data.get("order"),
|
||||
data.get("width"),
|
||||
data.get("type"),
|
||||
)
|
||||
|
||||
serializer = ViewGroupBySerializer(view_group_by)
|
||||
|
|
|
@ -26,8 +26,15 @@ class TableWebhookEventConfig(serializers.Serializer):
|
|||
choices=webhook_event_type_registry.get_types(),
|
||||
)
|
||||
fields = serializers.ListField(
|
||||
required=False,
|
||||
child=serializers.IntegerField(),
|
||||
help_text="A list of field IDs that are related to the event.",
|
||||
allow_empty=True,
|
||||
)
|
||||
views = serializers.ListField(
|
||||
required=False,
|
||||
child=serializers.IntegerField(),
|
||||
help_text="A list of view IDs that are related to the event.",
|
||||
)
|
||||
|
||||
|
||||
|
@ -186,13 +193,15 @@ class TableWebhookSerializer(serializers.ModelSerializer):
|
|||
|
||||
@extend_schema_field(TableWebhookEventConfig(many=True))
|
||||
def get_event_config(self, instance):
|
||||
events = [
|
||||
{
|
||||
"event_type": event.event_type,
|
||||
"fields": [f.id for f in event.fields.all()],
|
||||
}
|
||||
for event in instance.events.all()
|
||||
]
|
||||
events = []
|
||||
for event in instance.events.all():
|
||||
evt = {"event_type": event.event_type}
|
||||
if fields := [f.id for f in event.fields.all()]:
|
||||
evt["fields"] = fields
|
||||
if views := [v.id for v in event.views.all()]:
|
||||
evt["views"] = views
|
||||
events.append(evt)
|
||||
|
||||
return [TableWebhookEventConfig(event).data for event in events]
|
||||
|
||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||
|
|
|
@ -33,12 +33,12 @@ from baserow.core.registries import (
|
|||
)
|
||||
from baserow.core.storage import ExportZipFile
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.utils import ChildProgressBuilder, grouper
|
||||
from baserow.core.utils import ChildProgressBuilder, Progress, grouper
|
||||
|
||||
from .constants import (
|
||||
EXPORT_SERIALIZED_EXPORTING_TABLE,
|
||||
IMPORT_SERIALIZED_IMPORTING,
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE,
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE_DATA,
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE,
|
||||
)
|
||||
from .data_sync.registries import data_sync_type_registry
|
||||
from .db.atomic import read_repeatable_single_database_atomic_transaction
|
||||
|
@ -293,7 +293,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
import_export_config: ImportExportConfig,
|
||||
external_table_fields_to_import: List[Tuple[Table, Dict[str, Any]]],
|
||||
deferred_fk_update_collector: DeferredForeignKeyUpdater,
|
||||
progress: ChildProgressBuilder,
|
||||
progress: Progress,
|
||||
) -> ImportedFields:
|
||||
"""
|
||||
Import the fields from the serialized data in the correct order based on their
|
||||
|
@ -308,6 +308,10 @@ class DatabaseApplicationType(ApplicationType):
|
|||
also be imported. These fields will be imported into the existing table
|
||||
provided in the first item in the tuple, the second being the serialized
|
||||
field to import.
|
||||
:param deferred_fk_update_collector: A collector that collects all the foreign
|
||||
keys to update them later when the model with all the fields is created.
|
||||
:param progress: A progress used to report progress of the import.
|
||||
:return: The imported fields.
|
||||
"""
|
||||
|
||||
field_cache = FieldCache()
|
||||
|
@ -348,7 +352,10 @@ class DatabaseApplicationType(ApplicationType):
|
|||
if table_instance not in table_fields_by_name:
|
||||
table_fields_by_name[table_instance] = {}
|
||||
table_fields_by_name[table_instance][field_instance.name] = field_instance
|
||||
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
|
||||
table_name = serialized_table["name"]
|
||||
progress.increment(
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{table_name}"
|
||||
)
|
||||
return field_instance
|
||||
|
||||
fields_without_dependencies: List[Field] = []
|
||||
|
@ -404,7 +411,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
deferred_fk_update_collector,
|
||||
)
|
||||
SearchHandler.after_field_created(external_field)
|
||||
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
|
||||
progress.increment()
|
||||
|
||||
deferred_fk_update_collector.run_deferred_fk_updates(
|
||||
id_mapping, "database_fields"
|
||||
|
@ -537,7 +544,9 @@ class DatabaseApplicationType(ApplicationType):
|
|||
self._create_table_schema(
|
||||
serialized_table, already_created_through_table_names
|
||||
)
|
||||
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
|
||||
progress.increment(
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{serialized_table['name']}"
|
||||
)
|
||||
|
||||
# Now that everything is in place we can start filling the table with the rows
|
||||
# in an efficient matter by using the bulk_create functionality.
|
||||
|
@ -594,9 +603,9 @@ class DatabaseApplicationType(ApplicationType):
|
|||
user_email_mapping: Dict[str, Any],
|
||||
deferred_fk_update_collector: DeferredForeignKeyUpdater,
|
||||
id_mapping: Dict[str, Any],
|
||||
files_zip: Optional[ZipFile] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
progress: Optional[ChildProgressBuilder] = None,
|
||||
files_zip: ZipFile | None,
|
||||
storage: Storage | None,
|
||||
progress: Progress,
|
||||
):
|
||||
"""
|
||||
Imports the rows of a table from the serialized data in an efficient manner.
|
||||
|
@ -610,7 +619,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
imported files from
|
||||
:param storage: An optional place to persist any user files if importing files
|
||||
from a the above file_zip.
|
||||
:param progress: A progress builder used to report progress of the import.
|
||||
:param progress: A progress used to report progress of the import.
|
||||
"""
|
||||
|
||||
table_cache: Dict[str, Any] = {}
|
||||
|
@ -668,7 +677,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
|
||||
rows_to_be_inserted.append(row_instance)
|
||||
progress.increment(
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE}{serialized_table['id']}"
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_DATA}{serialized_table['name']}"
|
||||
)
|
||||
|
||||
# We want to insert the rows in bulk because there could potentially be
|
||||
|
@ -678,7 +687,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
table_model.objects.bulk_create(chunk, batch_size=512)
|
||||
progress.increment(
|
||||
len(chunk),
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE}{serialized_table['id']}",
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_DATA}{serialized_table['name']}",
|
||||
)
|
||||
|
||||
# Every row import can have additional objects that must be inserted,
|
||||
|
@ -811,7 +820,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
field_type.after_rows_imported(
|
||||
field, field_cache=imported_fields.field_cache
|
||||
)
|
||||
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
|
||||
progress.increment()
|
||||
|
||||
def _create_table_schema(
|
||||
self, serialized_table, already_created_through_table_names
|
||||
|
@ -869,10 +878,13 @@ class DatabaseApplicationType(ApplicationType):
|
|||
"""
|
||||
|
||||
table = serialized_table["_object"]
|
||||
table_name = serialized_table["name"]
|
||||
for serialized_view in serialized_table["views"]:
|
||||
view_type = view_type_registry.get(serialized_view["type"])
|
||||
view_type.import_serialized(table, serialized_view, id_mapping, files_zip)
|
||||
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
|
||||
progress.increment(
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{table_name}"
|
||||
)
|
||||
|
||||
def _import_tables(
|
||||
self,
|
||||
|
@ -906,7 +918,10 @@ class DatabaseApplicationType(ApplicationType):
|
|||
serialized_table["_object"] = table_instance
|
||||
serialized_table["field_instances"] = []
|
||||
imported_tables.append(table_instance)
|
||||
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
|
||||
table_name = serialized_table["name"]
|
||||
progress.increment(
|
||||
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{table_name}"
|
||||
)
|
||||
|
||||
return imported_tables
|
||||
|
||||
|
@ -924,7 +939,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
Imports a database application exported by the `export_serialized` method.
|
||||
"""
|
||||
|
||||
database_progress, table_progress = 1, 99
|
||||
database_progress, table_progress = 1, len(serialized_values["tables"])
|
||||
progress = ChildProgressBuilder.build(
|
||||
progress_builder, child_total=database_progress + table_progress
|
||||
)
|
||||
|
@ -940,10 +955,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
)
|
||||
|
||||
database = application.specific
|
||||
|
||||
if not serialized_values["tables"]:
|
||||
progress.increment(state=IMPORT_SERIALIZED_IMPORTING, by=table_progress)
|
||||
else:
|
||||
if serialized_values["tables"]:
|
||||
self.import_tables_serialized(
|
||||
database,
|
||||
serialized_values["tables"],
|
||||
|
|
|
@ -158,7 +158,11 @@ class DatabaseConfig(AppConfig):
|
|||
action_type_registry.register(UpdateDataSyncTableActionType())
|
||||
action_type_registry.register(SyncDataSyncTableActionType())
|
||||
|
||||
from .airtable.registry import airtable_column_type_registry
|
||||
from .airtable.registry import (
|
||||
airtable_column_type_registry,
|
||||
airtable_filter_operator_registry,
|
||||
airtable_view_type_registry,
|
||||
)
|
||||
from .data_sync.registries import data_sync_type_registry
|
||||
from .export.registries import table_exporter_registry
|
||||
from .fields.registries import (
|
||||
|
@ -616,6 +620,7 @@ class DatabaseConfig(AppConfig):
|
|||
webhook_event_type_registry.register(ViewDeletedEventType())
|
||||
|
||||
from .airtable.airtable_column_types import (
|
||||
AutoNumberAirtableColumnType,
|
||||
CheckboxAirtableColumnType,
|
||||
CountAirtableColumnType,
|
||||
DateAirtableColumnType,
|
||||
|
@ -646,6 +651,49 @@ class DatabaseConfig(AppConfig):
|
|||
airtable_column_type_registry.register(MultipleAttachmentAirtableColumnType())
|
||||
airtable_column_type_registry.register(RichTextTextAirtableColumnType())
|
||||
airtable_column_type_registry.register(CountAirtableColumnType())
|
||||
airtable_column_type_registry.register(AutoNumberAirtableColumnType())
|
||||
|
||||
from .airtable.airtable_view_types import GridAirtableViewType
|
||||
|
||||
airtable_view_type_registry.register(GridAirtableViewType())
|
||||
|
||||
from .airtable.airtable_filter_operators import (
|
||||
AirtableContainsOperator,
|
||||
AirtableDoesNotContainOperator,
|
||||
AirtableEqualOperator,
|
||||
AirtableFilenameOperator,
|
||||
AirtableFiletypeOperator,
|
||||
AirtableHasAllOfOperator,
|
||||
AirtableHasAnyOfOperator,
|
||||
AirtableIsAnyOfOperator,
|
||||
AirtableIsEmptyOperator,
|
||||
AirtableIsNoneOfOperator,
|
||||
AirtableIsNotEmptyOperator,
|
||||
AirtableIsWithinOperator,
|
||||
AirtableLessThanOperator,
|
||||
AirtableLessThanOrEqualOperator,
|
||||
AirtableMoreThanOperator,
|
||||
AirtableMoreThanOrEqualOperator,
|
||||
AirtableNotEqualOperator,
|
||||
)
|
||||
|
||||
airtable_filter_operator_registry.register(AirtableContainsOperator())
|
||||
airtable_filter_operator_registry.register(AirtableDoesNotContainOperator())
|
||||
airtable_filter_operator_registry.register(AirtableEqualOperator())
|
||||
airtable_filter_operator_registry.register(AirtableNotEqualOperator())
|
||||
airtable_filter_operator_registry.register(AirtableIsEmptyOperator())
|
||||
airtable_filter_operator_registry.register(AirtableIsNotEmptyOperator())
|
||||
airtable_filter_operator_registry.register(AirtableFilenameOperator())
|
||||
airtable_filter_operator_registry.register(AirtableFiletypeOperator())
|
||||
airtable_filter_operator_registry.register(AirtableIsAnyOfOperator())
|
||||
airtable_filter_operator_registry.register(AirtableIsNoneOfOperator())
|
||||
airtable_filter_operator_registry.register(AirtableHasAnyOfOperator())
|
||||
airtable_filter_operator_registry.register(AirtableHasAllOfOperator())
|
||||
airtable_filter_operator_registry.register(AirtableLessThanOperator())
|
||||
airtable_filter_operator_registry.register(AirtableMoreThanOperator())
|
||||
airtable_filter_operator_registry.register(AirtableLessThanOrEqualOperator())
|
||||
airtable_filter_operator_registry.register(AirtableMoreThanOrEqualOperator())
|
||||
airtable_filter_operator_registry.register(AirtableIsWithinOperator())
|
||||
|
||||
from .data_sync.data_sync_types import (
|
||||
ICalCalendarDataSyncType,
|
||||
|
@ -947,6 +995,10 @@ class DatabaseConfig(AppConfig):
|
|||
from baserow.contrib.database.views.notification_types import (
|
||||
FormSubmittedNotificationType,
|
||||
)
|
||||
from baserow.contrib.database.webhooks.notification_types import (
|
||||
WebhookDeactivatedNotificationType,
|
||||
WebhookPayloadTooLargeNotificationType,
|
||||
)
|
||||
from baserow.core.notifications.registries import notification_type_registry
|
||||
|
||||
notification_type_registry.register(CollaboratorAddedToRowNotificationType())
|
||||
|
@ -954,6 +1006,8 @@ class DatabaseConfig(AppConfig):
|
|||
UserMentionInRichTextFieldNotificationType()
|
||||
)
|
||||
notification_type_registry.register(FormSubmittedNotificationType())
|
||||
notification_type_registry.register(WebhookDeactivatedNotificationType())
|
||||
notification_type_registry.register(WebhookPayloadTooLargeNotificationType())
|
||||
|
||||
# The signals must always be imported last because they use the registries
|
||||
# which need to be filled first.
|
||||
|
@ -970,6 +1024,7 @@ class DatabaseConfig(AppConfig):
|
|||
import baserow.contrib.database.rows.tasks # noqa: F401
|
||||
import baserow.contrib.database.search.tasks # noqa: F401
|
||||
import baserow.contrib.database.table.receivers # noqa: F401
|
||||
import baserow.contrib.database.views.receivers # noqa: F401
|
||||
import baserow.contrib.database.views.tasks # noqa: F401
|
||||
|
||||
# date/datetime min/max year handling - we need that for psycopg 3.x only
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
IMPORT_SERIALIZED_IMPORTING = "importing"
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE = "importing-table-"
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE = "importing-table-structure-"
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE_DATA = "importing-table-data-"
|
||||
EXPORT_SERIALIZED_EXPORTING = "exporting"
|
||||
EXPORT_SERIALIZED_EXPORTING_TABLE = "exporting-table-"
|
||||
EXPORT_WORKSPACE_CREATE_ARCHIVE = "create-archive"
|
||||
|
|
|
@ -47,6 +47,7 @@ class FileWriter(abc.ABC):
|
|||
self,
|
||||
queryset: QuerySet,
|
||||
write_row: Callable[[Any, bool], None],
|
||||
progress_weight: int,
|
||||
):
|
||||
"""
|
||||
A specialized method which knows how to write an entire queryset to the file
|
||||
|
@ -54,6 +55,10 @@ class FileWriter(abc.ABC):
|
|||
:param queryset: The queryset to write to the file.
|
||||
:param write_row: A callable function which takes each row from the queryset in
|
||||
turn and writes to the file.
|
||||
:param progress_weight: Indicates how much of the progress should count for
|
||||
writing the rows in total. This can be used to reduce the total
|
||||
percentage if there is some post-processing after writing to the rows
|
||||
that must use some of the progress.
|
||||
"""
|
||||
|
||||
def get_csv_dict_writer(self, headers, **kwargs):
|
||||
|
@ -74,13 +79,16 @@ class PaginatedExportJobFileWriter(FileWriter):
|
|||
self.job = job
|
||||
self.last_check = None
|
||||
|
||||
def update_check(self):
|
||||
self.last_check = time.perf_counter()
|
||||
|
||||
def write_bytes(self, value: bytes):
|
||||
self._file.write(value)
|
||||
|
||||
def write(self, value: str, encoding="utf-8"):
|
||||
self._file.write(value.encode(encoding))
|
||||
|
||||
def write_rows(self, queryset, write_row):
|
||||
def write_rows(self, queryset, write_row, progress_weight=100):
|
||||
"""
|
||||
Writes the queryset to the file using the provided write_row callback.
|
||||
Every EXPORT_JOB_UPDATE_FREQUENCY_SECONDS will check if the job has been
|
||||
|
@ -92,19 +100,27 @@ class PaginatedExportJobFileWriter(FileWriter):
|
|||
:param queryset: The queryset to write to the file.
|
||||
:param write_row: A callable function which takes each row from the queryset in
|
||||
turn and writes to the file.
|
||||
:param progress_weight: Indicates how much of the progress should count for
|
||||
writing the rows in total. This can be used to reduce the total
|
||||
percentage if there is some post-processing after writing to the rows
|
||||
that must use some of the progress.
|
||||
"""
|
||||
|
||||
self.last_check = time.perf_counter()
|
||||
self.update_check()
|
||||
paginator = Paginator(queryset.all(), 2000)
|
||||
i = 0
|
||||
results = []
|
||||
for page in paginator.page_range:
|
||||
for row in paginator.page(page).object_list:
|
||||
i = i + 1
|
||||
is_last_row = i == paginator.count
|
||||
write_row(row, is_last_row)
|
||||
self._check_and_update_job(i, paginator.count)
|
||||
result = write_row(row, is_last_row)
|
||||
if result is not None:
|
||||
results.append(result)
|
||||
self._check_and_update_job(i, paginator.count, progress_weight)
|
||||
return results
|
||||
|
||||
def _check_and_update_job(self, current_row, total_rows):
|
||||
def _check_and_update_job(self, current_row, total_rows, progress_weight=100):
|
||||
"""
|
||||
Checks if enough time has passed and if so checks the state of the job and
|
||||
updates its progress percentage.
|
||||
|
@ -124,12 +140,17 @@ class PaginatedExportJobFileWriter(FileWriter):
|
|||
)
|
||||
is_last_row = current_row == total_rows
|
||||
if enough_time_has_passed or is_last_row:
|
||||
self.last_check = time.perf_counter()
|
||||
self.update_check()
|
||||
self.job.refresh_from_db()
|
||||
if self.job.is_cancelled_or_expired():
|
||||
raise ExportJobCanceledException()
|
||||
else:
|
||||
self.job.progress_percentage = current_row / total_rows * 100
|
||||
# min is used here because in case of files we get total size from
|
||||
# files, but for progress measurement we use size of chunks that might
|
||||
# be slightly bigger than the total size of the files
|
||||
self.job.progress_percentage = min(
|
||||
current_row / total_rows * progress_weight, 100
|
||||
)
|
||||
self.job.save()
|
||||
|
||||
|
||||
|
@ -144,6 +165,7 @@ class QuerysetSerializer(abc.ABC):
|
|||
def __init__(self, queryset, ordered_field_objects):
|
||||
self.queryset = queryset
|
||||
self.field_serializers = [lambda row: ("id", "id", row.id)]
|
||||
self.ordered_field_objects = ordered_field_objects
|
||||
|
||||
for field_object in ordered_field_objects:
|
||||
self.field_serializers.append(self._get_field_serializer(field_object))
|
||||
|
|
|
@ -34,9 +34,13 @@ class DatabaseExportSerializedStructure:
|
|||
}
|
||||
|
||||
@staticmethod
|
||||
def file_field_value(name, visible_name, original_name):
|
||||
return {
|
||||
def file_field_value(name, visible_name, original_name, size=None):
|
||||
data = {
|
||||
"name": name,
|
||||
"visible_name": visible_name,
|
||||
"original_name": original_name,
|
||||
}
|
||||
|
||||
if size is not None:
|
||||
data["size"] = size
|
||||
return data
|
||||
|
|
|
@ -30,6 +30,7 @@ BASEROW_BOOLEAN_FIELD_FALSE_VALUES = [
|
|||
"unchecked",
|
||||
False
|
||||
]
|
||||
SINGLE_SELECT_SORT_BY_ORDER = "order"
|
||||
|
||||
|
||||
class DeleteFieldStrategyEnum(Enum):
|
||||
|
|
|
@ -86,9 +86,17 @@ class OrderByFieldNotFound(Exception):
|
|||
class OrderByFieldNotPossible(Exception):
|
||||
"""Raised when it is not possible to order by a field."""
|
||||
|
||||
def __init__(self, field_name=None, field_type=None, *args, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
field_name: str = None,
|
||||
field_type: str = None,
|
||||
sort_type: str = None,
|
||||
*args: list,
|
||||
**kwargs: dict,
|
||||
):
|
||||
self.field_name = field_name
|
||||
self.field_type = field_type
|
||||
self.sort_type = sort_type
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
|
|
|
@ -38,6 +38,42 @@ def parse_ids_from_csv_string(value: str) -> list[int]:
|
|||
return []
|
||||
|
||||
|
||||
def map_ids_from_csv_string(
|
||||
value_string: str, mapping: Optional[dict] = None
|
||||
) -> list[Union[str, int]]:
|
||||
"""
|
||||
Parses the provided value if needed and returns a list ids.
|
||||
|
||||
:param value_string: The value that has been provided by the user.
|
||||
:param mapping: Key is given option id, and the value is th target option id.
|
||||
:return: A list of integers that represent ids.
|
||||
"""
|
||||
|
||||
# There is a small chance the value is an int in case a raw ID was provided in
|
||||
# the row coloring, where the filters are stored as JSON. Cast it to a string to
|
||||
# make it compatible.
|
||||
if not isinstance(value_string, str):
|
||||
value_string = str(value_string)
|
||||
|
||||
parsed_values = []
|
||||
for value in value_string.split(","):
|
||||
# In some cases, the select option ID is a string, like with the Airtable
|
||||
# import. If the value can be found in the mapping, then we'll directly use
|
||||
# that value.
|
||||
if value in mapping:
|
||||
parsed_values.append(str(mapping[value]))
|
||||
continue
|
||||
|
||||
if value.strip().isdigit():
|
||||
# Convert to int because the serialized value can be a string, but the key
|
||||
# in the mapping is an int.
|
||||
value = int(value)
|
||||
if value in mapping:
|
||||
parsed_values.append(str(mapping[value]))
|
||||
|
||||
return parsed_values
|
||||
|
||||
|
||||
class AnnotatedQ:
|
||||
"""
|
||||
A simple wrapper class combining a params for a Queryset.annotate call with a
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
from typing import Any, Dict, List
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from baserow.contrib.database.export_serialized import DatabaseExportSerializedStructure
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.fields.utils.duration import D_H_M
|
||||
from baserow.core.import_export.utils import file_chunk_generator
|
||||
from baserow.core.storage import ExportZipFile, Storage
|
||||
from baserow.core.user_files.handler import UserFileHandler
|
||||
|
||||
|
||||
def construct_all_possible_field_kwargs(
|
||||
|
@ -295,3 +299,64 @@ def construct_all_possible_field_kwargs(
|
|||
all_interesting_field_kwargs.keys()
|
||||
), "Please add the new field type to the testing dictionary of interesting kwargs"
|
||||
return all_interesting_field_kwargs
|
||||
|
||||
|
||||
def prepare_files_for_export(
|
||||
records: List[Dict[str, Any]],
|
||||
cache: Dict[str, Any],
|
||||
files_zip: Optional[ExportZipFile] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
name_prefix: str = "",
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Prepares file field values for export by either adding them to a zip file or
|
||||
returning the serialized file data
|
||||
|
||||
:param records: List of file records containing file metadata.
|
||||
:param cache: A dictionary used to track which files have already been processed
|
||||
to avoid duplicates.
|
||||
:param files_zip: Optional ExportZipFile to add the actual file contents to.
|
||||
:param storage: Optional storage backend to read the file contents from when adding
|
||||
to zip.
|
||||
:param name_prefix: Optional prefix to prepend to file names in the export.
|
||||
:return: List of serialized file metadata
|
||||
"""
|
||||
|
||||
file_names = []
|
||||
user_file_handler = UserFileHandler()
|
||||
|
||||
for record in records:
|
||||
# Check if the user file object is already in the cache and if not,
|
||||
# it must be fetched and added to it.
|
||||
file_name = f"{name_prefix}{record['name']}"
|
||||
cache_entry = f"user_file_{file_name}"
|
||||
if cache_entry not in cache:
|
||||
if files_zip is not None and file_name not in [
|
||||
item["name"] for item in files_zip.info_list()
|
||||
]:
|
||||
file_path = user_file_handler.user_file_path(record["name"])
|
||||
# Create chunk generator for the file content and add it to the zip
|
||||
# stream. That file will be read when zip stream is being
|
||||
# written to final zip file
|
||||
chunk_generator = file_chunk_generator(storage, file_path)
|
||||
files_zip.add(chunk_generator, file_name)
|
||||
|
||||
# This is just used to avoid writing the same file twice.
|
||||
cache[cache_entry] = True
|
||||
|
||||
if files_zip is None:
|
||||
# If the zip file is `None`, it means we're duplicating this row. To
|
||||
# avoid unnecessary queries, we jump add the complete file, and will
|
||||
# use that during import instead of fetching the user file object.
|
||||
file_names.append(record)
|
||||
else:
|
||||
file_names.append(
|
||||
DatabaseExportSerializedStructure.file_field_value(
|
||||
name=file_name,
|
||||
visible_name=record["visible_name"],
|
||||
original_name=record["name"],
|
||||
size=record["size"],
|
||||
)
|
||||
)
|
||||
|
||||
return file_names
|
||||
|
|
|
@ -93,7 +93,6 @@ from baserow.contrib.database.api.views.errors import (
|
|||
ERROR_VIEW_NOT_IN_TABLE,
|
||||
)
|
||||
from baserow.contrib.database.db.functions import RandomUUID
|
||||
from baserow.contrib.database.export_serialized import DatabaseExportSerializedStructure
|
||||
from baserow.contrib.database.fields.filter_support.formula import (
|
||||
FormulaFieldTypeArrayFilterSupport,
|
||||
)
|
||||
|
@ -132,7 +131,11 @@ from baserow.contrib.database.types import SerializedRowHistoryFieldMetadata
|
|||
from baserow.contrib.database.validators import UnicodeRegexValidator
|
||||
from baserow.contrib.database.views.exceptions import ViewDoesNotExist, ViewNotInTable
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.views.models import OWNERSHIP_TYPE_COLLABORATIVE, View
|
||||
from baserow.contrib.database.views.models import (
|
||||
DEFAULT_SORT_TYPE_KEY,
|
||||
OWNERSHIP_TYPE_COLLABORATIVE,
|
||||
View,
|
||||
)
|
||||
from baserow.core.db import (
|
||||
CombinedForeignKeyAndManyToManyMultipleFieldPrefetch,
|
||||
collate_expression,
|
||||
|
@ -143,7 +146,6 @@ from baserow.core.fields import SyncedDateTimeField
|
|||
from baserow.core.formula import BaserowFormulaException
|
||||
from baserow.core.formula.parser.exceptions import FormulaFunctionTypeDoesNotExist
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.import_export.utils import file_chunk_generator
|
||||
from baserow.core.models import UserFile, WorkspaceUser
|
||||
from baserow.core.registries import ImportExportConfig
|
||||
from baserow.core.storage import ExportZipFile, get_default_storage
|
||||
|
@ -154,6 +156,7 @@ from baserow.core.utils import list_to_comma_separated_string
|
|||
from .constants import (
|
||||
BASEROW_BOOLEAN_FIELD_FALSE_VALUES,
|
||||
BASEROW_BOOLEAN_FIELD_TRUE_VALUES,
|
||||
SINGLE_SELECT_SORT_BY_ORDER,
|
||||
UPSERT_OPTION_DICT_KEY,
|
||||
DeleteFieldStrategyEnum,
|
||||
)
|
||||
|
@ -190,6 +193,7 @@ from .field_filters import (
|
|||
filename_contains_filter,
|
||||
parse_ids_from_csv_string,
|
||||
)
|
||||
from .field_helpers import prepare_files_for_export
|
||||
from .field_sortings import OptionallyAnnotatedOrderBy
|
||||
from .fields import BaserowExpressionField, BaserowLastModifiedField
|
||||
from .fields import DurationField as DurationModelField
|
||||
|
@ -260,7 +264,7 @@ if TYPE_CHECKING:
|
|||
|
||||
class CollationSortMixin:
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, table_model=None
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
) -> OptionallyAnnotatedOrderBy:
|
||||
field_expr = collate_expression(F(field_name))
|
||||
|
||||
|
@ -408,6 +412,8 @@ class TextFieldType(CollationSortMixin, FieldType):
|
|||
serializer_field_names = ["text_default"]
|
||||
_can_group_by = True
|
||||
|
||||
can_upsert = True
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
required = kwargs.get("required", False)
|
||||
return serializers.CharField(
|
||||
|
@ -452,8 +458,9 @@ class LongTextFieldType(CollationSortMixin, FieldType):
|
|||
model_class = LongTextField
|
||||
allowed_fields = ["long_text_enable_rich_text"]
|
||||
serializer_field_names = ["long_text_enable_rich_text"]
|
||||
can_upsert = True
|
||||
|
||||
def check_can_group_by(self, field: Field) -> bool:
|
||||
def check_can_group_by(self, field: Field, sort_type: str) -> bool:
|
||||
return not field.long_text_enable_rich_text
|
||||
|
||||
def can_be_primary_field(self, field_or_values: Union[Field, dict]) -> bool:
|
||||
|
@ -566,6 +573,7 @@ class NumberFieldType(FieldType):
|
|||
}
|
||||
_can_group_by = True
|
||||
_db_column_fields = ["number_decimal_places"]
|
||||
can_upsert = True
|
||||
|
||||
def prepare_value_for_db(self, instance: NumberField, value):
|
||||
if value is None:
|
||||
|
@ -807,6 +815,7 @@ class RatingFieldType(FieldType):
|
|||
serializer_field_names = ["max_value", "color", "style"]
|
||||
_can_group_by = True
|
||||
_db_column_fields = []
|
||||
can_upsert = True
|
||||
|
||||
def prepare_value_for_db(self, instance, value):
|
||||
if not value:
|
||||
|
@ -932,6 +941,7 @@ class BooleanFieldType(FieldType):
|
|||
type = "boolean"
|
||||
model_class = BooleanField
|
||||
_can_group_by = True
|
||||
can_upsert = True
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
"""
|
||||
|
@ -1021,6 +1031,7 @@ class DateFieldType(FieldType):
|
|||
}
|
||||
_can_group_by = True
|
||||
_db_column_fields = ["date_include_time"]
|
||||
can_upsert = True
|
||||
|
||||
def can_represent_date(self, field):
|
||||
return True
|
||||
|
@ -1619,7 +1630,7 @@ class LastModifiedByFieldType(ReadOnlyFieldType):
|
|||
return user.email if user else None
|
||||
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, table_model=None
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
) -> OptionallyAnnotatedOrderBy:
|
||||
"""
|
||||
If the user wants to sort the results they expect them to be ordered
|
||||
|
@ -1627,7 +1638,7 @@ class LastModifiedByFieldType(ReadOnlyFieldType):
|
|||
"""
|
||||
|
||||
order = collate_expression(
|
||||
self.get_sortable_column_expression(field, field_name)
|
||||
self.get_sortable_column_expression(field, field_name, sort_type)
|
||||
)
|
||||
|
||||
if order_direction == "ASC":
|
||||
|
@ -1694,7 +1705,10 @@ class LastModifiedByFieldType(ReadOnlyFieldType):
|
|||
)
|
||||
|
||||
def get_sortable_column_expression(
|
||||
self, field: Field, field_name: str
|
||||
self,
|
||||
field: Field,
|
||||
field_name: str,
|
||||
sort_type: str,
|
||||
) -> Expression | F:
|
||||
return F(f"{field_name}__first_name")
|
||||
|
||||
|
@ -1833,7 +1847,7 @@ class CreatedByFieldType(ReadOnlyFieldType):
|
|||
return user.email if user else None
|
||||
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, table_model=None
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
) -> OptionallyAnnotatedOrderBy:
|
||||
"""
|
||||
If the user wants to sort the results they expect them to be ordered
|
||||
|
@ -1841,7 +1855,7 @@ class CreatedByFieldType(ReadOnlyFieldType):
|
|||
"""
|
||||
|
||||
order = collate_expression(
|
||||
self.get_sortable_column_expression(field, field_name)
|
||||
self.get_sortable_column_expression(field, field_name, sort_type)
|
||||
)
|
||||
|
||||
if order_direction == "ASC":
|
||||
|
@ -1908,7 +1922,10 @@ class CreatedByFieldType(ReadOnlyFieldType):
|
|||
)
|
||||
|
||||
def get_sortable_column_expression(
|
||||
self, field: Field, field_name: str
|
||||
self,
|
||||
field: Field,
|
||||
field_name: str,
|
||||
sort_type: str,
|
||||
) -> Expression | F:
|
||||
return F(f"{field_name}__first_name")
|
||||
|
||||
|
@ -1923,9 +1940,10 @@ class DurationFieldType(FieldType):
|
|||
serializer_field_names = ["duration_format"]
|
||||
_can_group_by = True
|
||||
_db_column_fields = []
|
||||
can_upsert = True
|
||||
|
||||
def get_model_field(self, instance: DurationField, **kwargs):
|
||||
return DurationModelField(instance.duration_format, null=True)
|
||||
return DurationModelField(instance.duration_format, null=True, **kwargs)
|
||||
|
||||
def get_serializer_field(self, instance: DurationField, **kwargs):
|
||||
return DurationFieldSerializer(
|
||||
|
@ -2070,7 +2088,10 @@ class DurationFieldType(FieldType):
|
|||
setattr(row, field_name, value)
|
||||
|
||||
def get_sortable_column_expression(
|
||||
self, field: Field, field_name: str
|
||||
self,
|
||||
field: Field,
|
||||
field_name: str,
|
||||
sort_type: str,
|
||||
) -> Expression | F:
|
||||
return F(f"{field_name}")
|
||||
|
||||
|
@ -2190,16 +2211,18 @@ class LinkRowFieldType(
|
|||
return field.specific.link_row_table_primary_field
|
||||
|
||||
def _check_related_field_can_order_by(
|
||||
self, related_primary_field: Type[Field]
|
||||
self,
|
||||
related_primary_field: Type[Field],
|
||||
order_type: str,
|
||||
) -> bool:
|
||||
related_primary_field_type = field_type_registry.get_by_model(
|
||||
related_primary_field.specific_class
|
||||
)
|
||||
return related_primary_field_type.check_can_order_by(
|
||||
related_primary_field.specific
|
||||
related_primary_field.specific, order_type
|
||||
)
|
||||
|
||||
def check_can_group_by(self, field):
|
||||
def check_can_group_by(self, field, sort_type):
|
||||
related_primary_field = self._get_related_table_primary_field(field)
|
||||
if related_primary_field is None:
|
||||
return False
|
||||
|
@ -2207,7 +2230,9 @@ class LinkRowFieldType(
|
|||
related_primary_field_type = field_type_registry.get_by_model(
|
||||
related_primary_field
|
||||
)
|
||||
return related_primary_field_type.check_can_group_by(related_primary_field)
|
||||
return related_primary_field_type.check_can_group_by(
|
||||
related_primary_field, sort_type
|
||||
)
|
||||
|
||||
def _get_group_by_agg_expression(self, field_name: str) -> dict:
|
||||
return ArrayAgg(
|
||||
|
@ -2221,11 +2246,13 @@ class LinkRowFieldType(
|
|||
distinct=True,
|
||||
)
|
||||
|
||||
def check_can_order_by(self, field: Field) -> bool:
|
||||
def check_can_order_by(self, field: Field, sort_type: str) -> bool:
|
||||
related_primary_field = self._get_related_table_primary_field(field)
|
||||
if related_primary_field is None:
|
||||
return False
|
||||
return self._check_related_field_can_order_by(related_primary_field.specific)
|
||||
return self._check_related_field_can_order_by(
|
||||
related_primary_field.specific, sort_type
|
||||
)
|
||||
|
||||
def get_value_for_filter(self, row: "GeneratedTableModel", field):
|
||||
related_primary_field = self._get_related_table_primary_field(
|
||||
|
@ -2241,7 +2268,9 @@ class LinkRowFieldType(
|
|||
row, related_primary_field
|
||||
)
|
||||
|
||||
def get_order(self, field, field_name, order_direction, table_model=None):
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
):
|
||||
related_primary_field = self._get_related_table_primary_field(
|
||||
field, table_model
|
||||
)
|
||||
|
@ -2249,7 +2278,9 @@ class LinkRowFieldType(
|
|||
raise ValueError("Cannot find the related primary field.")
|
||||
|
||||
related_primary_field = related_primary_field.specific
|
||||
if not self._check_related_field_can_order_by(related_primary_field):
|
||||
if not self._check_related_field_can_order_by(
|
||||
related_primary_field, DEFAULT_SORT_TYPE_KEY
|
||||
):
|
||||
raise ValueError(
|
||||
"The primary field for the related table cannot be ordered by."
|
||||
)
|
||||
|
@ -2260,6 +2291,7 @@ class LinkRowFieldType(
|
|||
related_primary_field_type.get_sortable_column_expression(
|
||||
related_primary_field,
|
||||
f"{field_name}__{related_primary_field.db_column}",
|
||||
sort_type,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -3434,6 +3466,9 @@ class LinkRowFieldType(
|
|||
**already_serialized_linked_rows,
|
||||
**new_serialized_linked_rows,
|
||||
},
|
||||
"linked_table_id": field.link_row_table_id,
|
||||
"linked_field_id": field.link_row_related_field_id,
|
||||
"primary_value": str(row),
|
||||
}
|
||||
|
||||
def are_row_values_equal(self, value1: any, value2: any) -> bool:
|
||||
|
@ -3461,6 +3496,7 @@ class LinkRowFieldType(
|
|||
class EmailFieldType(CollationSortMixin, CharFieldMatchingRegexFieldType):
|
||||
type = "email"
|
||||
model_class = EmailField
|
||||
can_upsert = True
|
||||
|
||||
@property
|
||||
def regex(self):
|
||||
|
@ -3496,7 +3532,7 @@ class FileFieldType(FieldType):
|
|||
model_class = FileField
|
||||
can_be_in_form_view = True
|
||||
can_get_unique_values = False
|
||||
_can_order_by = False
|
||||
_can_order_by_types = []
|
||||
|
||||
def to_baserow_formula_type(self, field) -> BaserowFormulaType:
|
||||
return BaserowFormulaArrayType(BaserowFormulaSingleFileType(nullable=True))
|
||||
|
@ -3731,43 +3767,15 @@ class FileFieldType(FieldType):
|
|||
cache: Dict[str, Any],
|
||||
files_zip: Optional[ExportZipFile] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
name_prefix: str = "",
|
||||
) -> List[Dict[str, Any]]:
|
||||
file_names = []
|
||||
user_file_handler = UserFileHandler()
|
||||
|
||||
for file in self.get_internal_value_from_db(row, field_name):
|
||||
# Check if the user file object is already in the cache and if not,
|
||||
# it must be fetched and added to it.
|
||||
cache_entry = f"user_file_{file['name']}"
|
||||
if cache_entry not in cache:
|
||||
if files_zip is not None and file["name"] not in [
|
||||
item["name"] for item in files_zip.info_list()
|
||||
]:
|
||||
file_path = user_file_handler.user_file_path(file["name"])
|
||||
# Create chunk generator for the file content and add it to the zip
|
||||
# stream. That file will be read when zip stream is being
|
||||
# written to final zip file
|
||||
chunk_generator = file_chunk_generator(storage, file_path)
|
||||
files_zip.add(chunk_generator, file["name"])
|
||||
|
||||
# This is just used to avoid writing the same file twice.
|
||||
cache[cache_entry] = True
|
||||
|
||||
if files_zip is None:
|
||||
# If the zip file is `None`, it means we're duplicating this row. To
|
||||
# avoid unnecessary queries, we jump add the complete file, and will
|
||||
# use that during import instead of fetching the user file object.
|
||||
file_names.append(file)
|
||||
else:
|
||||
file_names.append(
|
||||
DatabaseExportSerializedStructure.file_field_value(
|
||||
name=file["name"],
|
||||
visible_name=file["visible_name"],
|
||||
original_name=file["name"],
|
||||
)
|
||||
)
|
||||
|
||||
return file_names
|
||||
return prepare_files_for_export(
|
||||
self.get_internal_value_from_db(row, field_name),
|
||||
cache,
|
||||
files_zip,
|
||||
storage,
|
||||
name_prefix,
|
||||
)
|
||||
|
||||
def set_import_serialized_value(
|
||||
self,
|
||||
|
@ -3877,7 +3885,10 @@ class SelectOptionBaseFieldType(FieldType):
|
|||
return queryset
|
||||
|
||||
def get_sortable_column_expression(
|
||||
self, field: Field, field_name: str
|
||||
self,
|
||||
field: Field,
|
||||
field_name: str,
|
||||
sort_type: str,
|
||||
) -> Expression | F:
|
||||
return F(f"{field_name}__value")
|
||||
|
||||
|
@ -3899,6 +3910,7 @@ class SelectOptionBaseFieldType(FieldType):
|
|||
class SingleSelectFieldType(CollationSortMixin, SelectOptionBaseFieldType):
|
||||
type = "single_select"
|
||||
model_class = SingleSelectField
|
||||
_can_order_by_types = [DEFAULT_SORT_TYPE_KEY, SINGLE_SELECT_SORT_BY_ORDER]
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
required = kwargs.get("required", False)
|
||||
|
@ -4152,8 +4164,19 @@ class SingleSelectFieldType(CollationSortMixin, SelectOptionBaseFieldType):
|
|||
connection, from_field, to_field
|
||||
)
|
||||
|
||||
def get_sortable_column_expression(
|
||||
self,
|
||||
field: Field,
|
||||
field_name: str,
|
||||
sort_type: str,
|
||||
) -> Expression | F:
|
||||
if sort_type == SINGLE_SELECT_SORT_BY_ORDER:
|
||||
return F(f"{field_name}__order")
|
||||
else:
|
||||
return super().get_sortable_column_expression(field, field_name, sort_type)
|
||||
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, table_model=None
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
) -> OptionallyAnnotatedOrderBy:
|
||||
"""
|
||||
If the user wants to sort the results they expect them to be ordered
|
||||
|
@ -4162,10 +4185,15 @@ class SingleSelectFieldType(CollationSortMixin, SelectOptionBaseFieldType):
|
|||
to the correct position.
|
||||
"""
|
||||
|
||||
order = collate_expression(
|
||||
self.get_sortable_column_expression(field, field_name)
|
||||
column_expression = self.get_sortable_column_expression(
|
||||
field, field_name, sort_type
|
||||
)
|
||||
|
||||
if sort_type == SINGLE_SELECT_SORT_BY_ORDER:
|
||||
order = column_expression
|
||||
else:
|
||||
order = collate_expression(column_expression)
|
||||
|
||||
if order_direction == "ASC":
|
||||
order = order.asc(nulls_first=True)
|
||||
else:
|
||||
|
@ -4613,7 +4641,9 @@ class MultipleSelectFieldType(
|
|||
q={f"select_option_value_{field_name}__iregex": rf"\m{value}\M"},
|
||||
)
|
||||
|
||||
def get_order(self, field, field_name, order_direction, table_model=None):
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
):
|
||||
"""
|
||||
Order by the concatenated values of the select options, separated by a comma.
|
||||
"""
|
||||
|
@ -4626,7 +4656,7 @@ class MultipleSelectFieldType(
|
|||
sort_column_name = f"{field_name}_agg_sort"
|
||||
query = Coalesce(
|
||||
StringAgg(
|
||||
self.get_sortable_column_expression(field, field_name),
|
||||
self.get_sortable_column_expression(field, field_name, sort_type),
|
||||
",",
|
||||
output_field=models.TextField(),
|
||||
),
|
||||
|
@ -4726,6 +4756,7 @@ class PhoneNumberFieldType(CollationSortMixin, CharFieldMatchingRegexFieldType):
|
|||
|
||||
type = "phone_number"
|
||||
model_class = PhoneNumberField
|
||||
can_upsert = True
|
||||
|
||||
MAX_PHONE_NUMBER_LENGTH = 100
|
||||
|
||||
|
@ -5284,15 +5315,22 @@ class FormulaFieldType(FormulaFieldTypeArrayFilterSupport, ReadOnlyFieldType):
|
|||
if apply_updates:
|
||||
update_collector.apply_updates_and_get_updated_fields(field_cache)
|
||||
|
||||
def check_can_order_by(self, field):
|
||||
def check_can_order_by(self, field, order_type):
|
||||
# The formula types are not compatible with the order type. Therefore,
|
||||
# if the `order_type` is not the default, it will always return False.
|
||||
if order_type != DEFAULT_SORT_TYPE_KEY:
|
||||
return False
|
||||
return self.to_baserow_formula_type(field.specific).can_order_by
|
||||
|
||||
def check_can_group_by(self, field):
|
||||
def check_can_group_by(self, field, sort_type):
|
||||
# The formula types are not compatible with the order type. Therefore,
|
||||
# if the `order_type` is not the default, it will always return False.
|
||||
return self.to_baserow_formula_type(field.specific).can_group_by
|
||||
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, table_model=None
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
) -> OptionallyAnnotatedOrderBy:
|
||||
# Ignore the `sort_type` because that is not yet supported in formulas.
|
||||
return self.to_baserow_formula_type(field.specific).get_order(
|
||||
field, field_name, order_direction, table_model=table_model
|
||||
)
|
||||
|
@ -6039,7 +6077,10 @@ class LookupFieldType(FormulaFieldType):
|
|||
|
||||
|
||||
class MultipleCollaboratorsFieldType(
|
||||
CollationSortMixin, ManyToManyFieldTypeSerializeToInputValueMixin, FieldType
|
||||
CollationSortMixin,
|
||||
ManyToManyFieldTypeSerializeToInputValueMixin,
|
||||
ManyToManyGroupByMixin,
|
||||
FieldType,
|
||||
):
|
||||
type = "multiple_collaborators"
|
||||
model_class = MultipleCollaboratorsField
|
||||
|
@ -6055,6 +6096,7 @@ class MultipleCollaboratorsFieldType(
|
|||
"notify_user_when_added": serializers.BooleanField(required=False),
|
||||
}
|
||||
is_many_to_many_field = True
|
||||
_can_group_by = True
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
required = kwargs.pop("required", False)
|
||||
|
@ -6368,7 +6410,9 @@ class MultipleCollaboratorsFieldType(
|
|||
def random_to_input_value(self, field, value):
|
||||
return [{"id": user_id} for user_id in value]
|
||||
|
||||
def get_order(self, field, field_name, order_direction, table_model=None):
|
||||
def get_order(
|
||||
self, field, field_name, order_direction, sort_type, table_model=None
|
||||
):
|
||||
"""
|
||||
If the user wants to sort the results they expect them to be ordered
|
||||
alphabetically based on the user's name and not in the id which is
|
||||
|
@ -6379,7 +6423,7 @@ class MultipleCollaboratorsFieldType(
|
|||
sort_column_name = f"{field_name}_agg_sort"
|
||||
query = Coalesce(
|
||||
StringAgg(
|
||||
self.get_sortable_column_expression(field, field_name),
|
||||
self.get_sortable_column_expression(field, field_name, sort_type),
|
||||
"",
|
||||
output_field=models.TextField(),
|
||||
),
|
||||
|
@ -6404,7 +6448,10 @@ class MultipleCollaboratorsFieldType(
|
|||
return value
|
||||
|
||||
def get_sortable_column_expression(
|
||||
self, field: Field, field_name: str
|
||||
self,
|
||||
field: Field,
|
||||
field_name: str,
|
||||
sort_type: str,
|
||||
) -> Expression | F:
|
||||
return F(f"{field_name}__first_name")
|
||||
|
||||
|
@ -6422,13 +6469,17 @@ class MultipleCollaboratorsFieldType(
|
|||
JSONBAgg(
|
||||
get_collaborator_extractor(db_column, model_field),
|
||||
filter=Q(**{f"{db_column}__isnull": False}),
|
||||
order=f"{db_column}__id",
|
||||
),
|
||||
Value([], output_field=JSONField()),
|
||||
)
|
||||
else:
|
||||
return Coalesce(
|
||||
wrap_in_subquery(
|
||||
JSONBAgg(get_collaborator_extractor(db_column, model_field)),
|
||||
JSONBAgg(
|
||||
get_collaborator_extractor(db_column, model_field),
|
||||
order=f"{db_column}__id",
|
||||
),
|
||||
db_column,
|
||||
model_field.model,
|
||||
),
|
||||
|
@ -6757,7 +6808,7 @@ class PasswordFieldType(FieldType):
|
|||
model_class = PasswordField
|
||||
can_be_in_form_view = True
|
||||
keep_data_on_duplication = True
|
||||
_can_order_by = False
|
||||
_can_order_by_types = []
|
||||
_can_be_primary_field = False
|
||||
can_get_unique_values = False
|
||||
|
||||
|
@ -6803,7 +6854,7 @@ class PasswordFieldType(FieldType):
|
|||
# `False` as string depending on whether the value is set.
|
||||
return bool(value)
|
||||
|
||||
def prepare_row_history_value_from_action_meta_data(self, value):
|
||||
def prepare_value_for_row_history(self, value):
|
||||
# We don't want to expose the hash of the password, so we just show `True` or
|
||||
# `False` as string depending on whether the value is set.
|
||||
return bool(value)
|
||||
|
|
|
@ -363,7 +363,7 @@ class FieldHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
# already exists. If so the field cannot be created and an exception is raised.
|
||||
if primary and Field.objects.filter(table=table, primary=True).exists():
|
||||
raise PrimaryFieldAlreadyExists(
|
||||
f"A primary field already exists for the " f"table {table}."
|
||||
f"A primary field already exists for the table {table}."
|
||||
)
|
||||
|
||||
# Figure out which model to use and which field types are allowed for the given
|
||||
|
@ -579,6 +579,7 @@ class FieldHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
raise IncompatiblePrimaryFieldTypeError(to_field_type_name)
|
||||
|
||||
if baserow_field_type_changed:
|
||||
ViewHandler().before_field_type_change(field)
|
||||
dependants_broken_due_to_type_change = (
|
||||
from_field_type.get_dependants_which_will_break_when_field_type_changes(
|
||||
field, to_field_type, field_cache
|
||||
|
|
|
@ -498,6 +498,11 @@ class LinkRowField(Field):
|
|||
|
||||
@property
|
||||
def link_row_table_primary_field(self):
|
||||
# It's possible to optionally preset the `link_row_table_primary_field` using
|
||||
# the setter. If that's the case, then it must be returned.
|
||||
if hasattr(self, "_link_row_table_primary_field"):
|
||||
return self._link_row_table_primary_field
|
||||
|
||||
# LinkRowFieldType.enhance_field_queryset prefetches the primary field
|
||||
# into RELATED_PPRIMARY_FIELD_ATTR. Let's check if it's already there first.
|
||||
if related_primary_field_set := getattr(
|
||||
|
@ -510,6 +515,10 @@ class LinkRowField(Field):
|
|||
except Field.DoesNotExist:
|
||||
return None
|
||||
|
||||
@link_row_table_primary_field.setter
|
||||
def link_row_table_primary_field(self, value):
|
||||
self._link_row_table_primary_field = value
|
||||
|
||||
@property
|
||||
def is_self_referencing(self):
|
||||
return self.link_row_table_id == self.table_id
|
||||
|
|
|
@ -66,6 +66,7 @@ from baserow.core.registry import (
|
|||
Registry,
|
||||
)
|
||||
|
||||
from ..views.models import DEFAULT_SORT_TYPE_KEY
|
||||
from .exceptions import (
|
||||
FieldTypeAlreadyRegistered,
|
||||
FieldTypeDoesNotExist,
|
||||
|
@ -126,8 +127,11 @@ class FieldType(
|
|||
field_type_registry.register(ExampleFieldType())
|
||||
"""
|
||||
|
||||
_can_order_by = True
|
||||
"""Indicates whether it is possible to order by this field type."""
|
||||
_can_order_by_types = [DEFAULT_SORT_TYPE_KEY]
|
||||
"""
|
||||
Indicates by which types can be ordered. Leave empty if it's not possible to sort
|
||||
by the field type.
|
||||
"""
|
||||
|
||||
_can_be_primary_field = True
|
||||
"""Some field types cannot be the primary field."""
|
||||
|
@ -206,6 +210,12 @@ class FieldType(
|
|||
some fields can depend on it like the `lookup` field.
|
||||
"""
|
||||
|
||||
can_upsert = False
|
||||
"""
|
||||
A field of this type may be used to calculate a match value during import, that
|
||||
allows to update existing rows with imported data instead of adding them.
|
||||
"""
|
||||
|
||||
@property
|
||||
def db_column_fields(self) -> Set[str]:
|
||||
if self._db_column_fields is not None:
|
||||
|
@ -280,9 +290,9 @@ class FieldType(
|
|||
|
||||
return getattr(row, field_name)
|
||||
|
||||
def prepare_row_history_value_from_action_meta_data(self, value):
|
||||
def prepare_value_for_row_history(self, value):
|
||||
"""
|
||||
Prepare the row action update action meta data value for the row history.
|
||||
Prepare the value for the row history.
|
||||
This can be used to change the value to a different format if needed. It's
|
||||
for example used by the password field to mask the hash.
|
||||
"""
|
||||
|
@ -855,15 +865,16 @@ class FieldType(
|
|||
field: Type[Field],
|
||||
field_name: str,
|
||||
order_direction: str,
|
||||
sort_type: str,
|
||||
table_model: Optional["GeneratedTableModel"] = None,
|
||||
) -> OptionallyAnnotatedOrderBy:
|
||||
"""
|
||||
This hook can be called to generate a different order by expression.
|
||||
By default the normal field sorting will be applied.
|
||||
By default, the normal field sorting will be applied.
|
||||
Optionally a different expression can be generated. This is for example used
|
||||
by the single select field generates a mapping achieve the correct sorting
|
||||
based on the select option value.
|
||||
Additionally an annotation can be returned which will get applied to the
|
||||
Additionally, an annotation can be returned which will get applied to the
|
||||
queryset.
|
||||
If you are implementing this method you should also implement the
|
||||
get_value_for_filter method.
|
||||
|
@ -871,13 +882,15 @@ class FieldType(
|
|||
:param field: The related field object instance.
|
||||
:param field_name: The name of the field.
|
||||
:param order_direction: The sort order direction (either "ASC" or "DESC").
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
:param table_model: The table model instance that the field is part of,
|
||||
if available.
|
||||
:return: Either the expression that is added directly to the
|
||||
model.objects.order(), an AnnotatedOrderBy class or None.
|
||||
"""
|
||||
|
||||
field_expr = self.get_sortable_column_expression(field, field_name)
|
||||
field_expr = self.get_sortable_column_expression(field, field_name, sort_type)
|
||||
|
||||
if order_direction == "ASC":
|
||||
field_order_by = field_expr.asc(nulls_first=True)
|
||||
|
@ -1602,36 +1615,38 @@ class FieldType(
|
|||
|
||||
return self._can_filter_by
|
||||
|
||||
def check_can_order_by(self, field: Field) -> bool:
|
||||
def check_can_order_by(self, field: Field, sort_type: str) -> bool:
|
||||
"""
|
||||
Override this method if this field type can sometimes be ordered or sometimes
|
||||
cannot be ordered depending on the individual field state. By default will just
|
||||
return the bool property _can_order_by so if your field type doesn't depend
|
||||
on the field state and is always just True or False just set _can_order_by
|
||||
to the desired value.
|
||||
cannot be ordered depending on the individual field state. By default, it will
|
||||
check if the provided `sort_type` is in the `_can_order_by_types` property.
|
||||
|
||||
:param field: The field to check to see if it can be ordered by or not.
|
||||
:param sort_type: The sort type to check if it's compatible.
|
||||
:return: True if a view can be ordered by this field, False otherwise.
|
||||
"""
|
||||
|
||||
return self._can_order_by
|
||||
return sort_type in self._can_order_by_types
|
||||
|
||||
def check_can_group_by(self, field: Field) -> bool:
|
||||
def check_can_group_by(self, field: Field, sort_type: str) -> bool:
|
||||
"""
|
||||
Override this method if this field type can sometimes be grouped or sometimes
|
||||
cannot be grouped depending on the individual field state. By default will just
|
||||
return the bool property _can_group_by so if your field type doesn't depend
|
||||
on the field state and is always just True or False just set _can_group_by
|
||||
to the desired value.
|
||||
return the bool property _can_group_by and checks if the sort_type is in the
|
||||
`_can_order_by_types` property.
|
||||
|
||||
:param field: The field to check to see if it can be grouped by or not.
|
||||
:param sort_type: The sort type to check if it's compatible.
|
||||
:return: True if a view can be grouped by this field, False otherwise.
|
||||
"""
|
||||
|
||||
return self._can_group_by
|
||||
return self._can_group_by and self.check_can_order_by(field, sort_type)
|
||||
|
||||
def get_sortable_column_expression(
|
||||
self, field: Field, field_name: str
|
||||
self,
|
||||
field: Field,
|
||||
field_name: str,
|
||||
sort_type: str,
|
||||
) -> Expression | F:
|
||||
"""
|
||||
Returns the expression that can be used to sort the field in the database.
|
||||
|
@ -1640,6 +1655,8 @@ class FieldType(
|
|||
|
||||
:param field: The field where to get the sortable column expression for.
|
||||
:param field_name: The name of the field in the table.
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
:return: The expression that can be used to sort the field in the database.
|
||||
"""
|
||||
|
||||
|
|
|
@ -95,10 +95,14 @@ def _run_periodic_field_type_update_per_workspace(
|
|||
|
||||
all_updated_fields = []
|
||||
|
||||
fields = qs.filter(
|
||||
table__database__workspace_id=workspace.id,
|
||||
table__trashed=False,
|
||||
table__database__trashed=False,
|
||||
fields = (
|
||||
qs.filter(
|
||||
table__database__workspace_id=workspace.id,
|
||||
table__trashed=False,
|
||||
table__database__trashed=False,
|
||||
)
|
||||
.select_related("table")
|
||||
.prefetch_related("table__view_set")
|
||||
)
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
|
@ -114,6 +118,8 @@ def _run_periodic_field_type_update_per_workspace(
|
|||
tb=tb,
|
||||
)
|
||||
|
||||
from baserow.contrib.database.views.handler import ViewSubscriptionHandler
|
||||
|
||||
# After a successful periodic update of all fields, we would need to update the
|
||||
# search index for all of them in one function per table to avoid ending up in a
|
||||
# deadlock because rows are updated simultaneously.
|
||||
|
@ -123,6 +129,11 @@ def _run_periodic_field_type_update_per_workspace(
|
|||
for _, fields in fields_per_table.items():
|
||||
SearchHandler().entire_field_values_changed_or_created(fields[0].table, fields)
|
||||
|
||||
with transaction.atomic():
|
||||
ViewSubscriptionHandler().notify_table_views_updates(
|
||||
fields[0].table.view_set.all()
|
||||
)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def delete_mentions_marked_for_deletion(self):
|
||||
|
|
|
@ -26,6 +26,7 @@ from baserow.contrib.database.fields.exceptions import (
|
|||
)
|
||||
from baserow.contrib.database.rows.actions import ImportRowsActionType
|
||||
from baserow.contrib.database.rows.exceptions import ReportMaxErrorCountExceeded
|
||||
from baserow.contrib.database.rows.types import FileImportDict
|
||||
from baserow.contrib.database.table.actions import CreateTableActionType
|
||||
from baserow.contrib.database.table.exceptions import (
|
||||
InitialTableDataDuplicateName,
|
||||
|
@ -91,6 +92,7 @@ class FileImportJobType(JobType):
|
|||
|
||||
filtered_dict = dict(**values)
|
||||
filtered_dict.pop("data")
|
||||
filtered_dict.pop("configuration", None)
|
||||
return filtered_dict
|
||||
|
||||
def after_job_creation(self, job, values):
|
||||
|
@ -99,7 +101,10 @@ class FileImportJobType(JobType):
|
|||
"""
|
||||
|
||||
data_file = ContentFile(
|
||||
json.dumps(values["data"], ensure_ascii=False).encode("utf8")
|
||||
json.dumps(
|
||||
{"data": values["data"], "configuration": values.get("configuration")},
|
||||
ensure_ascii=False,
|
||||
).encode("utf8")
|
||||
)
|
||||
job.data_file.save(None, data_file)
|
||||
|
||||
|
@ -154,8 +159,7 @@ class FileImportJobType(JobType):
|
|||
"""
|
||||
|
||||
with job.data_file.open("r") as fin:
|
||||
data = json.load(fin)
|
||||
|
||||
data: FileImportDict = json.load(fin)
|
||||
try:
|
||||
if job.table is None:
|
||||
new_table, error_report = action_type_registry.get_by_type(
|
||||
|
@ -164,7 +168,7 @@ class FileImportJobType(JobType):
|
|||
job.user,
|
||||
job.database,
|
||||
name=job.name,
|
||||
data=data,
|
||||
data=data["data"],
|
||||
first_row_header=job.first_row_header,
|
||||
progress=progress,
|
||||
)
|
||||
|
|
|
@ -8,7 +8,7 @@ msgid ""
|
|||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-01-15 11:59+0000\n"
|
||||
"POT-Creation-Date: 2025-03-18 19:55+0000\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
|
@ -38,11 +38,11 @@ msgid ""
|
|||
"\"%(database_name)s\" (%(database_id)s)."
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/airtable/actions.py:22
|
||||
#: src/baserow/contrib/database/airtable/actions.py:23
|
||||
msgid "Import database from Airtable"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/airtable/actions.py:24
|
||||
#: src/baserow/contrib/database/airtable/actions.py:25
|
||||
#, python-format
|
||||
msgid ""
|
||||
"Imported database "
|
||||
|
@ -80,7 +80,7 @@ msgstr ""
|
|||
msgid "The data sync synchronized"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/data_sync/handler.py:186
|
||||
#: src/baserow/contrib/database/data_sync/handler.py:187
|
||||
#: src/baserow/contrib/database/table/handler.py:548
|
||||
msgid "Grid"
|
||||
msgstr ""
|
||||
|
@ -148,8 +148,8 @@ msgid ""
|
|||
"%(new_primary_field_name)s"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/fields/models.py:415
|
||||
#: src/baserow/contrib/database/fields/models.py:594
|
||||
#: src/baserow/contrib/database/fields/models.py:453
|
||||
#: src/baserow/contrib/database/fields/models.py:641
|
||||
msgid "The format of the duration."
|
||||
msgstr ""
|
||||
|
||||
|
@ -458,155 +458,155 @@ msgstr ""
|
|||
msgid "View sorted on field \"%(field_name)s\" (%(field_id)s)"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:832
|
||||
#: src/baserow/contrib/database/views/actions.py:849
|
||||
msgid "Update a view sort"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:833
|
||||
#: src/baserow/contrib/database/views/actions.py:850
|
||||
#, python-format
|
||||
msgid "View sort updated on field \"%(field_name)s\" (%(field_id)s)"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:941
|
||||
#: src/baserow/contrib/database/views/actions.py:978
|
||||
msgid "Delete a view sort"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:942
|
||||
#: src/baserow/contrib/database/views/actions.py:979
|
||||
#, python-format
|
||||
msgid "View sort deleted from field \"%(field_name)s\" (%(field_id)s)"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1027
|
||||
#: src/baserow/contrib/database/views/actions.py:1073
|
||||
msgid "Order views"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1027
|
||||
#: src/baserow/contrib/database/views/actions.py:1073
|
||||
msgid "Views order changed"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1098
|
||||
#: src/baserow/contrib/database/views/actions.py:1144
|
||||
msgid "Update view field options"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1099
|
||||
#: src/baserow/contrib/database/views/actions.py:1145
|
||||
msgid "ViewFieldOptions updated"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1200
|
||||
#: src/baserow/contrib/database/views/actions.py:1247
|
||||
msgid "View slug URL updated"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1201
|
||||
#: src/baserow/contrib/database/views/actions.py:1248
|
||||
msgid "View changed public slug URL"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1274
|
||||
#: src/baserow/contrib/database/views/actions.py:1321
|
||||
msgid "Update view"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1275
|
||||
#: src/baserow/contrib/database/views/actions.py:1322
|
||||
#, python-format
|
||||
msgid "View \"%(view_name)s\" (%(view_id)s) updated"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1356
|
||||
#: src/baserow/contrib/database/views/actions.py:1403
|
||||
msgid "Create view"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1357
|
||||
#: src/baserow/contrib/database/views/actions.py:1404
|
||||
#, python-format
|
||||
msgid "View \"%(view_name)s\" (%(view_id)s) created"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1431
|
||||
#: src/baserow/contrib/database/views/actions.py:1478
|
||||
msgid "Duplicate view"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1433
|
||||
#: src/baserow/contrib/database/views/actions.py:1480
|
||||
#, python-format
|
||||
msgid ""
|
||||
"View \"%(view_name)s\" (%(view_id)s) duplicated from view "
|
||||
"\"%(original_view_name)s\" (%(original_view_id)s)"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1507
|
||||
#: src/baserow/contrib/database/views/actions.py:1554
|
||||
msgid "Delete view"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1508
|
||||
#: src/baserow/contrib/database/views/actions.py:1555
|
||||
#, python-format
|
||||
msgid "View \"%(view_name)s\" (%(view_id)s) deleted"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1570
|
||||
#: src/baserow/contrib/database/views/actions.py:1617
|
||||
msgid "Create decoration"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1571
|
||||
#: src/baserow/contrib/database/views/actions.py:1618
|
||||
#, python-format
|
||||
msgid "View decoration %(decorator_id)s created"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1675
|
||||
#: src/baserow/contrib/database/views/actions.py:1722
|
||||
msgid "Update decoration"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1676
|
||||
#: src/baserow/contrib/database/views/actions.py:1723
|
||||
#, python-format
|
||||
msgid "View decoration %(decorator_id)s updated"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1813
|
||||
#: src/baserow/contrib/database/views/actions.py:1860
|
||||
msgid "Delete decoration"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1814
|
||||
#: src/baserow/contrib/database/views/actions.py:1861
|
||||
#, python-format
|
||||
msgid "View decoration %(decorator_id)s deleted"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1910
|
||||
#: src/baserow/contrib/database/views/actions.py:1957
|
||||
msgid "Create a view group"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:1911
|
||||
#: src/baserow/contrib/database/views/actions.py:1958
|
||||
#, python-format
|
||||
msgid "View grouped on field \"%(field_name)s\" (%(field_id)s)"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:2011
|
||||
#: src/baserow/contrib/database/views/actions.py:2063
|
||||
msgid "Update a view group"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:2012
|
||||
#: src/baserow/contrib/database/views/actions.py:2064
|
||||
#, python-format
|
||||
msgid "View group by updated on field \"%(field_name)s\" (%(field_id)s)"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:2140
|
||||
#: src/baserow/contrib/database/views/actions.py:2213
|
||||
msgid "Delete a view group"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:2141
|
||||
#: src/baserow/contrib/database/views/actions.py:2214
|
||||
#, python-format
|
||||
msgid "View group by deleted from field \"%(field_name)s\" (%(field_id)s)"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:2235
|
||||
#: src/baserow/contrib/database/views/actions.py:2312
|
||||
msgid "Submit form"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/actions.py:2236
|
||||
#: src/baserow/contrib/database/views/actions.py:2313
|
||||
#, python-format
|
||||
msgid "Row (%(row_id)s) created via form submission"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/notification_types.py:84
|
||||
#: src/baserow/contrib/database/views/notification_types.py:86
|
||||
#, python-format
|
||||
msgid "%(form_name)s has been submitted in %(table_name)s"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/views/notification_types.py:101
|
||||
#: src/baserow/contrib/database/views/notification_types.py:103
|
||||
#, python-format
|
||||
msgid "and 1 more field"
|
||||
msgid_plural "and %(count)s more fields"
|
||||
|
@ -645,3 +645,28 @@ msgid ""
|
|||
"Webhook \"%(webhook_name)s\" (%(webhook_id)s) as %(webhook_request_method)s "
|
||||
"to %(webhook_url)s\" updated"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/webhooks/notification_types.py:92
|
||||
#, python-format
|
||||
msgid "%(name)s webhook has been deactivated."
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/webhooks/notification_types.py:99
|
||||
#, python-format
|
||||
msgid ""
|
||||
"The webhook failed more than %(max_failures)s consecutive times and was "
|
||||
"therefore deactivated."
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/webhooks/notification_types.py:155
|
||||
#, python-format
|
||||
msgid "%(name)s webhook payload too large."
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/database/webhooks/notification_types.py:162
|
||||
#, python-format
|
||||
msgid ""
|
||||
"The payload for the %(name)s webhook with event ID %(event_id)s was too "
|
||||
"large. The content has been split into multiple batches, but data above the "
|
||||
"batch limit of %(batch_limit)s was discarded."
|
||||
msgstr ""
|
||||
|
|
|
@ -8,7 +8,7 @@ msgstr ""
|
|||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-01-18 13:32+0000\n"
|
||||
"PO-Revision-Date: 2024-01-19 09:37+0000\n"
|
||||
"PO-Revision-Date: 2025-03-18 13:24+0000\n"
|
||||
"Last-Translator: Jérémie Pardou-Piquemal <jrmi@jeremiez.net>\n"
|
||||
"Language-Team: French <https://hosted.weblate.org/projects/baserow/"
|
||||
"backend-database/fr/>\n"
|
||||
|
@ -17,7 +17,7 @@ msgstr ""
|
|||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=n > 1;\n"
|
||||
"X-Generator: Weblate 5.4-dev\n"
|
||||
"X-Generator: Weblate 5.11-dev\n"
|
||||
|
||||
#: src/baserow/contrib/database/action/scopes.py:9
|
||||
#, python-format
|
||||
|
@ -180,7 +180,7 @@ msgstr "Machine de Turing"
|
|||
|
||||
#: src/baserow/contrib/database/plugins.py:103
|
||||
msgid "Computer architecture"
|
||||
msgstr "Architecture des ordinateur"
|
||||
msgstr "Architecture de l'ordinateur"
|
||||
|
||||
#: src/baserow/contrib/database/plugins.py:104
|
||||
msgid "Cellular Automata"
|
||||
|
|
|
@ -0,0 +1,146 @@
|
|||
import json
|
||||
import re
|
||||
import sys
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
import requests
|
||||
from tqdm import tqdm
|
||||
|
||||
from baserow.contrib.database.airtable.config import AirtableImportConfig
|
||||
from baserow.contrib.database.airtable.constants import AIRTABLE_BASE_URL
|
||||
from baserow.contrib.database.airtable.exceptions import AirtableBaseNotPublic
|
||||
from baserow.contrib.database.airtable.handler import BASE_HEADERS, AirtableHandler
|
||||
from baserow.contrib.database.airtable.utils import (
|
||||
parse_json_and_remove_invalid_surrogate_characters,
|
||||
)
|
||||
from baserow.core.models import Workspace
|
||||
from baserow.core.utils import Progress, remove_invalid_surrogate_characters
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"This command fetches all Airtable templates, and attemps to import them into "
|
||||
"the given workspace. It's created for testing purposes of the Airtable import."
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"workspace_id",
|
||||
type=int,
|
||||
help="The workspace ID where a copy of the imported Airtable base must be "
|
||||
"added to.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--start",
|
||||
type=int,
|
||||
help="From which index should the import start.",
|
||||
default=0,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
help="The maximum number of templates to install.",
|
||||
default=-1,
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
workspace_id = options["workspace_id"]
|
||||
start_index = options["start"]
|
||||
limit = options["limit"]
|
||||
|
||||
try:
|
||||
workspace = Workspace.objects.get(pk=workspace_id)
|
||||
except Workspace.DoesNotExist:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"The workspace with id {workspace_id} was not found.")
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
html_url = f"{AIRTABLE_BASE_URL}/templates"
|
||||
html_response = requests.get(html_url, headers=BASE_HEADERS) # nosec
|
||||
|
||||
if not html_response.ok:
|
||||
raise Exception("test")
|
||||
|
||||
decoded_content = remove_invalid_surrogate_characters(html_response.content)
|
||||
raw_init_data = re.search(
|
||||
"window.initData = (.*?)<\\/script>", decoded_content
|
||||
).group(1)
|
||||
init_data = json.loads(raw_init_data)
|
||||
client_code_version = init_data["codeVersion"]
|
||||
page_load_id = init_data["pageLoadId"]
|
||||
|
||||
templates_url = (
|
||||
f"{AIRTABLE_BASE_URL}/v0.3/exploreApplications"
|
||||
f"?templateStatus=listed"
|
||||
f"&shouldDisplayFull=true"
|
||||
f"&descriptionSnippetMaxLength=300"
|
||||
f"&categoryType=templateDesktopV2"
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
templates_url,
|
||||
headers={
|
||||
"x-airtable-inter-service-client": "webClient",
|
||||
"x-airtable-inter-service-client-code-version": client_code_version,
|
||||
"x-airtable-page-load-id": page_load_id,
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"x-time-zone": "Europe/Amsterdam",
|
||||
"x-user-locale": "en",
|
||||
**BASE_HEADERS,
|
||||
},
|
||||
timeout=3 * 60,
|
||||
) # nosec
|
||||
|
||||
json_decoded_content = parse_json_and_remove_invalid_surrogate_characters(
|
||||
response
|
||||
)
|
||||
|
||||
applications_by_id = json_decoded_content["exploreApplicationsById"].values()
|
||||
i = 0
|
||||
for index, application in enumerate(applications_by_id):
|
||||
share_id = application["shareId"]
|
||||
title = application["title"]
|
||||
|
||||
if limit != -1 and i >= limit:
|
||||
print("finished!")
|
||||
return
|
||||
|
||||
if index < start_index - 1:
|
||||
print(
|
||||
f"Skipping {title} {share_id} {index + 1}/{len(applications_by_id)}"
|
||||
)
|
||||
continue
|
||||
|
||||
i += 1
|
||||
print(
|
||||
f"Going to import {title} {share_id} {index + 1}/{len(applications_by_id)}"
|
||||
)
|
||||
|
||||
with tqdm(total=1000) as progress_bar:
|
||||
progress = Progress(1000)
|
||||
|
||||
def progress_updated(percentage, state):
|
||||
progress_bar.set_description(state)
|
||||
progress_bar.update(progress.progress - progress_bar.n)
|
||||
|
||||
progress.register_updated_event(progress_updated)
|
||||
|
||||
with NamedTemporaryFile() as download_files_buffer:
|
||||
config = AirtableImportConfig(skip_files=True)
|
||||
with transaction.atomic():
|
||||
try:
|
||||
AirtableHandler.import_from_airtable_to_workspace(
|
||||
workspace,
|
||||
share_id,
|
||||
progress_builder=progress.create_child_builder(
|
||||
represents_progress=progress.total
|
||||
),
|
||||
download_files_buffer=download_files_buffer,
|
||||
config=config,
|
||||
)
|
||||
except AirtableBaseNotPublic:
|
||||
print(" Skipping because it's not public.")
|
|
@ -0,0 +1,24 @@
|
|||
# Generated by Django 5.0.9 on 2025-03-04 18:18
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("database", "0180_view_allow_public_export"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="tablewebhookcall",
|
||||
name="batch_id",
|
||||
field=models.PositiveIntegerField(
|
||||
help_text="The batch ID for this call. Null if not part of a batch. Used for batching multiple calls of the same event_id due to large data.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="tablewebhookcall",
|
||||
unique_together={("event_id", "batch_id", "webhook", "event_type")},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,93 @@
|
|||
# Generated by Django 5.0.9 on 2025-03-07 15:43
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import baserow.core.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("database", "0181_tablewebhookcall_batch_id_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="tablewebhookevent",
|
||||
name="views",
|
||||
field=models.ManyToManyField(to="database.view"),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ViewRows",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created_on", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_on", baserow.core.fields.SyncedDateTimeField(auto_now=True)),
|
||||
(
|
||||
"row_ids",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.PositiveIntegerField(),
|
||||
default=list,
|
||||
help_text="The rows that are shown in the view. This list can be used by webhooks to determine which rows have been changed since the last check.",
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
(
|
||||
"view",
|
||||
models.OneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="rows",
|
||||
to="database.view",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ViewSubscription",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("subscriber_id", models.PositiveIntegerField()),
|
||||
(
|
||||
"subscriber_content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"view",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="subscribers",
|
||||
to="database.view",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"unique_together": {
|
||||
("view", "subscriber_content_type", "subscriber_id")
|
||||
},
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,32 @@
|
|||
# Generated by Django 5.0.9 on 2025-03-10 12:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("database", "0182_tablewebhookevent_views_viewrows_viewsubscription"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="viewgroupby",
|
||||
name="type",
|
||||
field=models.CharField(
|
||||
db_default="default",
|
||||
default="default",
|
||||
help_text="Indicates the sort type. Will automatically fall back to `default` if incompatible with field type.",
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="viewsort",
|
||||
name="type",
|
||||
field=models.CharField(
|
||||
db_default="default",
|
||||
default="default",
|
||||
help_text="Indicates the sort type. Will automatically fall back to `default` if incompatible with field type.",
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -66,7 +66,7 @@ class DatabasePlugin(Plugin):
|
|||
["John", "Von Neumann", "", True],
|
||||
["Blaise", "Pascal", "", True],
|
||||
]
|
||||
row_handler.import_rows(user, table, data, send_realtime_update=False)
|
||||
row_handler.import_rows(user, table, data=data, send_realtime_update=False)
|
||||
|
||||
# Creating the example projects table.
|
||||
table = table_handler.create_table_and_fields(
|
||||
|
@ -86,4 +86,4 @@ class DatabasePlugin(Plugin):
|
|||
[_("Computer architecture"), str(date(1945, 1, 1)), False],
|
||||
[_("Cellular Automata"), str(date(1952, 6, 1)), False],
|
||||
]
|
||||
row_handler.import_rows(user, table, data, send_realtime_update=False)
|
||||
row_handler.import_rows(user, table, data=data, send_realtime_update=False)
|
||||
|
|
|
@ -95,7 +95,9 @@ def load_test_data():
|
|||
("Rabbit", select_by_name["Meat"], fake.sentence(nb_words=10)),
|
||||
]
|
||||
|
||||
RowHandler().import_rows(user, products_table, data, send_realtime_update=False)
|
||||
RowHandler().import_rows(
|
||||
user, products_table, data=data, send_realtime_update=False
|
||||
)
|
||||
|
||||
try:
|
||||
suppliers_table = Table.objects.get(name="Suppliers", database=database)
|
||||
|
@ -195,7 +197,7 @@ def load_test_data():
|
|||
]
|
||||
|
||||
RowHandler().import_rows(
|
||||
user, suppliers_table, data, send_realtime_update=False
|
||||
user, suppliers_table, data=data, send_realtime_update=False
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -253,7 +255,7 @@ def load_test_data():
|
|||
]
|
||||
|
||||
RowHandler().import_rows(
|
||||
user, retailers_table, data, send_realtime_update=False
|
||||
user, retailers_table, data=data, send_realtime_update=False
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -358,5 +360,5 @@ def load_test_data():
|
|||
]
|
||||
|
||||
RowHandler().import_rows(
|
||||
user, user_accounts_table, data, send_realtime_update=False
|
||||
user, user_accounts_table, data=data, send_realtime_update=False
|
||||
)
|
||||
|
|
|
@ -6,6 +6,8 @@ from typing import Any, Dict, List, Optional, Tuple, Type
|
|||
from django.contrib.auth.models import AbstractUser
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from baserow.contrib.database.action.scopes import (
|
||||
TABLE_ACTION_CONTEXT,
|
||||
TableActionScopeType,
|
||||
|
@ -18,6 +20,7 @@ from baserow.contrib.database.rows.handler import (
|
|||
GeneratedTableModelForUpdate,
|
||||
RowHandler,
|
||||
)
|
||||
from baserow.contrib.database.rows.types import FileImportDict
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.contrib.database.table.models import GeneratedTableModel, Table
|
||||
from baserow.core.action.models import Action
|
||||
|
@ -178,13 +181,17 @@ class CreateRowsActionType(UndoableActionType):
|
|||
"Can't create rows because it has a data sync."
|
||||
)
|
||||
|
||||
rows = RowHandler().create_rows(
|
||||
user,
|
||||
table,
|
||||
rows_values,
|
||||
before_row=before_row,
|
||||
model=model,
|
||||
send_webhook_events=send_webhook_events,
|
||||
rows = (
|
||||
RowHandler()
|
||||
.create_rows(
|
||||
user,
|
||||
table,
|
||||
rows_values,
|
||||
before_row=before_row,
|
||||
model=model,
|
||||
send_webhook_events=send_webhook_events,
|
||||
)
|
||||
.created_rows
|
||||
)
|
||||
|
||||
workspace = table.database.workspace
|
||||
|
@ -244,7 +251,7 @@ class ImportRowsActionType(UndoableActionType):
|
|||
cls,
|
||||
user: AbstractUser,
|
||||
table: Table,
|
||||
data=List[List[Any]],
|
||||
data: FileImportDict,
|
||||
progress: Optional[Progress] = None,
|
||||
) -> Tuple[List[GeneratedTableModel], Dict[str, Any]]:
|
||||
"""
|
||||
|
@ -270,9 +277,14 @@ class ImportRowsActionType(UndoableActionType):
|
|||
)
|
||||
|
||||
created_rows, error_report = RowHandler().import_rows(
|
||||
user, table, data, progress=progress
|
||||
user,
|
||||
table,
|
||||
data=data["data"],
|
||||
configuration=data.get("configuration") or {},
|
||||
progress=progress,
|
||||
)
|
||||
|
||||
if error_report:
|
||||
logger.warning(f"Errors during rows import: {error_report}")
|
||||
workspace = table.database.workspace
|
||||
params = cls.Params(
|
||||
table.id,
|
||||
|
@ -836,7 +848,7 @@ class UpdateRowsActionType(UndoableActionType):
|
|||
|
||||
@classmethod
|
||||
def serialized_to_params(cls, serialized_params: Any) -> Any:
|
||||
"""
|
||||
"""`
|
||||
When storing integers as dictionary keys in a database, they are saved
|
||||
as strings. This method is designed to convert these string keys back
|
||||
into integers. This ensures that we can accurately use the row.id as a
|
||||
|
|
|
@ -36,3 +36,12 @@ class CannotDeleteRowsInTable(Exception):
|
|||
"""
|
||||
Raised when it's not possible to delete rows in the table.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidRowLength(Exception):
|
||||
"""
|
||||
Row's length doesn't match expected length based on schema.
|
||||
"""
|
||||
|
||||
def __init__(self, row_idx: int):
|
||||
self.row_idx = row_idx
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from decimal import Decimal
|
||||
from functools import cached_property
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
NamedTuple,
|
||||
NewType,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
|
@ -17,24 +16,37 @@ from typing import (
|
|||
cast,
|
||||
)
|
||||
|
||||
from django import db
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import connection, transaction
|
||||
from django.db.models import Field as DjangoField
|
||||
from django.db.models import Model, QuerySet, Window
|
||||
from django.db.models.expressions import RawSQL
|
||||
from django.db.models.fields.related import ForeignKey, ManyToManyField
|
||||
from django.db.models.functions import RowNumber
|
||||
from django.utils.encoding import force_str
|
||||
|
||||
from celery.utils import chunks
|
||||
from opentelemetry import metrics, trace
|
||||
|
||||
from baserow.contrib.database.fields.dependencies.handler import FieldDependencyHandler
|
||||
from baserow.contrib.database.fields.dependencies.update_collector import (
|
||||
FieldUpdateCollector,
|
||||
)
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
FieldNotInTable,
|
||||
IncompatibleField,
|
||||
)
|
||||
from baserow.contrib.database.fields.field_cache import FieldCache
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.fields.registries import FieldType, field_type_registry
|
||||
from baserow.contrib.database.fields.utils import get_field_id_from_field_key
|
||||
from baserow.contrib.database.search.handler import SearchHandler
|
||||
from baserow.contrib.database.table.constants import (
|
||||
CREATED_BY_COLUMN_NAME,
|
||||
LAST_MODIFIED_BY_COLUMN_NAME,
|
||||
ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME,
|
||||
)
|
||||
from baserow.contrib.database.table.models import GeneratedTableModel, Table
|
||||
from baserow.contrib.database.table.operations import (
|
||||
CreateRowDatabaseTableOperationType,
|
||||
|
@ -49,20 +61,15 @@ from baserow.core.db import (
|
|||
)
|
||||
from baserow.core.exceptions import CannotCalculateIntermediateOrder
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.psycopg import sql
|
||||
from baserow.core.telemetry.utils import baserow_trace_methods
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.trash.registries import trash_item_type_registry
|
||||
from baserow.core.utils import Progress, get_non_unique_values, grouper
|
||||
|
||||
from ..search.handler import SearchHandler
|
||||
from ..table.constants import (
|
||||
CREATED_BY_COLUMN_NAME,
|
||||
LAST_MODIFIED_BY_COLUMN_NAME,
|
||||
ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME,
|
||||
)
|
||||
from .constants import ROW_IMPORT_CREATION, ROW_IMPORT_VALIDATION
|
||||
from .error_report import RowErrorReport
|
||||
from .exceptions import RowDoesNotExist, RowIdsNotUnique
|
||||
from .exceptions import InvalidRowLength, RowDoesNotExist, RowIdsNotUnique
|
||||
from .operations import (
|
||||
DeleteDatabaseRowOperationType,
|
||||
MoveRowDatabaseRowOperationType,
|
||||
|
@ -77,19 +84,23 @@ from .signals import (
|
|||
rows_deleted,
|
||||
rows_updated,
|
||||
)
|
||||
from .types import (
|
||||
CreatedRowsData,
|
||||
FieldsMetadata,
|
||||
FileImportConfiguration,
|
||||
GeneratedTableModelForUpdate,
|
||||
RowId,
|
||||
RowsForUpdate,
|
||||
UpdatedRowsData,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
|
||||
tracer = trace.get_tracer(__name__)
|
||||
|
||||
GeneratedTableModelForUpdate = NewType(
|
||||
"GeneratedTableModelForUpdate", GeneratedTableModel
|
||||
)
|
||||
|
||||
RowsForUpdate = NewType("RowsForUpdate", QuerySet)
|
||||
|
||||
|
||||
BATCH_SIZE = 1024
|
||||
|
||||
meter = metrics.get_meter(__name__)
|
||||
|
@ -139,29 +150,18 @@ def prepare_field_errors(field_errors):
|
|||
}
|
||||
|
||||
|
||||
FieldsMetadata = NewType("FieldsMetadata", Dict[str, Any])
|
||||
RowValues = NewType("RowValues", Dict[str, Any])
|
||||
RowId = NewType("RowId", int)
|
||||
|
||||
|
||||
class UpdatedRowsWithOldValuesAndMetadata(NamedTuple):
|
||||
updated_rows: List[GeneratedTableModelForUpdate]
|
||||
original_rows_values_by_id: Dict[RowId, RowValues]
|
||||
updated_fields_metadata_by_row_id: Dict[RowId, FieldsMetadata]
|
||||
|
||||
|
||||
class RowM2MChangeTracker:
|
||||
def __init__(self):
|
||||
self._deleted_m2m_rels: Dict[
|
||||
str, Dict["Field", Dict[GeneratedTableModel, Set[int]]]
|
||||
str, Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]
|
||||
] = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
|
||||
self._created_m2m_rels: Dict[
|
||||
str, Dict["Field", Dict[GeneratedTableModel, Set[int]]]
|
||||
str, Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]
|
||||
] = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
|
||||
|
||||
def track_m2m_update_for_field_and_row(
|
||||
self,
|
||||
field: "Field",
|
||||
field: "DjangoField",
|
||||
field_name: str,
|
||||
row: GeneratedTableModel,
|
||||
new_values: Iterable[int],
|
||||
|
@ -181,7 +181,7 @@ class RowM2MChangeTracker:
|
|||
def track_m2m_created_for_new_row(
|
||||
self,
|
||||
row: GeneratedTableModel,
|
||||
field: "Field",
|
||||
field: "DjangoField",
|
||||
new_values: Iterable[Union[int, Model]],
|
||||
):
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
|
@ -197,7 +197,7 @@ class RowM2MChangeTracker:
|
|||
|
||||
def get_created_m2m_rels_per_field_for_type(
|
||||
self, field_type
|
||||
) -> Dict["Field", Dict[GeneratedTableModel, Set[int]]]:
|
||||
) -> Dict["DjangoField", Dict[GeneratedTableModel, Set[int]]]:
|
||||
return self._created_m2m_rels[field_type]
|
||||
|
||||
def get_deleted_link_row_rels_for_update_collector(
|
||||
|
@ -816,6 +816,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
send_webhook_events=send_webhook_events,
|
||||
rows_values_refreshed_from_db=False,
|
||||
m2m_change_tracker=m2m_change_tracker,
|
||||
fields=fields,
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
return instance
|
||||
|
@ -1005,6 +1007,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
before_return=before_return,
|
||||
updated_field_ids=updated_field_ids,
|
||||
m2m_change_tracker=m2m_change_tracker,
|
||||
fields=[f for f in updated_fields if f.id in updated_field_ids],
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
return row
|
||||
|
@ -1017,7 +1021,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
updated_field_ids: Set[int],
|
||||
m2m_change_tracker: Optional[RowM2MChangeTracker] = None,
|
||||
skip_search_updates: bool = False,
|
||||
) -> List["Field"]:
|
||||
) -> List["DjangoField"]:
|
||||
"""
|
||||
Prepares a list of fields that are dependent on the updated fields and updates
|
||||
them.
|
||||
|
@ -1084,7 +1088,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
send_webhook_events: bool = True,
|
||||
generate_error_report: bool = False,
|
||||
skip_search_update: bool = False,
|
||||
) -> List[GeneratedTableModel]:
|
||||
) -> CreatedRowsData:
|
||||
"""
|
||||
Creates new rows for a given table without checking permissions. It also calls
|
||||
the rows_created signal.
|
||||
|
@ -1215,11 +1219,11 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
send_webhook_events=send_webhook_events,
|
||||
prepared_rows_values=prepared_rows_values,
|
||||
m2m_change_tracker=m2m_change_tracker,
|
||||
fields=updated_fields,
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
if generate_error_report:
|
||||
return inserted_rows, report
|
||||
return rows_to_return
|
||||
return CreatedRowsData(rows_to_return, report)
|
||||
|
||||
def create_rows(
|
||||
self,
|
||||
|
@ -1232,7 +1236,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
send_webhook_events: bool = True,
|
||||
generate_error_report: bool = False,
|
||||
skip_search_update: bool = False,
|
||||
) -> List[GeneratedTableModel]:
|
||||
) -> CreatedRowsData:
|
||||
"""
|
||||
Creates new rows for a given table if the user
|
||||
belongs to the related workspace. It also calls the rows_created signal.
|
||||
|
@ -1283,7 +1287,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
self,
|
||||
model: Type[GeneratedTableModel],
|
||||
created_rows: List[GeneratedTableModel],
|
||||
) -> List["Field"]:
|
||||
) -> List["DjangoField"]:
|
||||
"""
|
||||
Generates a list of dependant fields that need to be updated after the rows have
|
||||
been created and updates them.
|
||||
|
@ -1437,11 +1441,11 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
return report
|
||||
|
||||
def create_rows_by_batch(
|
||||
def force_create_rows_by_batch(
|
||||
self,
|
||||
user: AbstractUser,
|
||||
table: Table,
|
||||
rows: List[Dict[str, Any]],
|
||||
rows_values: List[Dict[str, Any]],
|
||||
progress: Optional[Progress] = None,
|
||||
model: Optional[Type[GeneratedTableModel]] = None,
|
||||
) -> Tuple[List[GeneratedTableModel], Dict[str, Dict[str, Any]]]:
|
||||
|
@ -1451,13 +1455,13 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
:param user: The user of whose behalf the rows are created.
|
||||
:param table: The table for which the rows should be created.
|
||||
:param rows: List of rows values for rows that need to be created.
|
||||
:param rows_values: List of rows values for rows that need to be created.
|
||||
:param progress: Give a progress instance to track the progress of the import.
|
||||
:param model: Optional model to prevent recomputing table model.
|
||||
:return: The created rows and the error report.
|
||||
"""
|
||||
|
||||
if not rows:
|
||||
if not rows_values:
|
||||
return [], {}
|
||||
|
||||
if progress:
|
||||
|
@ -1468,7 +1472,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
report = {}
|
||||
all_created_rows = []
|
||||
for count, chunk in enumerate(grouper(BATCH_SIZE, rows)):
|
||||
for count, chunk in enumerate(grouper(BATCH_SIZE, rows_values)):
|
||||
row_start_index = count * BATCH_SIZE
|
||||
created_rows, creation_report = self.create_rows(
|
||||
user=user,
|
||||
|
@ -1497,11 +1501,64 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
return all_created_rows, report
|
||||
|
||||
def force_update_rows_by_batch(
|
||||
self,
|
||||
user: AbstractUser,
|
||||
table: Table,
|
||||
rows_values: List[Dict[str, Any]],
|
||||
progress: Progress,
|
||||
model: Optional[Type[GeneratedTableModel]] = None,
|
||||
) -> Tuple[List[Dict[str, Any] | None], Dict[str, Dict[str, Any]]]:
|
||||
"""
|
||||
Creates rows by batch and generates an error report instead of failing on first
|
||||
error.
|
||||
|
||||
:param user: The user of whose behalf the rows are created.
|
||||
:param table: The table for which the rows should be created.
|
||||
:param rows_values: List of rows values for rows that need to be created.
|
||||
:param progress: Give a progress instance to track the progress of the import.
|
||||
:param model: Optional model to prevent recomputing table model.
|
||||
:return: The created rows and the error report.
|
||||
"""
|
||||
|
||||
if not rows_values:
|
||||
return [], {}
|
||||
|
||||
progress.increment(state=ROW_IMPORT_CREATION)
|
||||
|
||||
if model is None:
|
||||
model = table.get_model()
|
||||
|
||||
report = {}
|
||||
all_updated_rows = []
|
||||
for count, chunk in enumerate(grouper(BATCH_SIZE, rows_values)):
|
||||
updated_rows = self.force_update_rows(
|
||||
user=user,
|
||||
table=table,
|
||||
model=model,
|
||||
rows_values=chunk,
|
||||
send_realtime_update=False,
|
||||
send_webhook_events=False,
|
||||
# Don't trigger loads of search updates for every batch of rows we
|
||||
# create but instead a single one for this entire table at the end.
|
||||
skip_search_update=True,
|
||||
generate_error_report=True,
|
||||
)
|
||||
|
||||
if progress:
|
||||
progress.increment(len(chunk))
|
||||
report.update(updated_rows.errors)
|
||||
all_updated_rows.extend(updated_rows.updated_rows)
|
||||
|
||||
SearchHandler.field_value_updated_or_created(table)
|
||||
return all_updated_rows, report
|
||||
|
||||
def import_rows(
|
||||
self,
|
||||
user: AbstractUser,
|
||||
table: Table,
|
||||
data: List[List[Any]],
|
||||
data: list[list[Any]],
|
||||
configuration: FileImportConfiguration | None = None,
|
||||
validate: bool = True,
|
||||
progress: Optional[Progress] = None,
|
||||
send_realtime_update: bool = True,
|
||||
|
@ -1517,12 +1574,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
:param user: The user of whose behalf the rows are created.
|
||||
:param table: The table for which the rows should be created.
|
||||
:param data: List of rows values for rows that need to be created.
|
||||
:param configuration: Optional import configuration dict.
|
||||
:param validate: If True the data are validated before the import.
|
||||
:param progress: Give a progress instance to track the progress of the
|
||||
import.
|
||||
:param send_realtime_update: The parameter passed to the rows_created
|
||||
signal indicating if a realtime update should be send.
|
||||
|
||||
:raises InvalidRowLength:
|
||||
|
||||
:return: The created row instances and the error report.
|
||||
"""
|
||||
|
||||
|
@ -1535,6 +1595,15 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
)
|
||||
|
||||
error_report = RowErrorReport(data)
|
||||
configuration = configuration or {}
|
||||
update_handler = UpsertRowsMappingHandler(
|
||||
table=table,
|
||||
upsert_fields=configuration.get("upsert_fields") or [],
|
||||
upsert_values=configuration.get("upsert_values") or [],
|
||||
)
|
||||
# Pre-run upsert configuration validation.
|
||||
# Can raise InvalidRowLength
|
||||
update_handler.validate()
|
||||
|
||||
model = table.get_model()
|
||||
|
||||
|
@ -1599,10 +1668,40 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
else None
|
||||
)
|
||||
|
||||
created_rows, creation_report = self.create_rows_by_batch(
|
||||
user, table, valid_rows, progress=creation_sub_progress, model=model
|
||||
# split rows to insert and update lists. If there's no upsert field selected,
|
||||
# this will not populate rows_values_to_update.
|
||||
update_map = update_handler.process_map
|
||||
|
||||
rows_values_to_create = []
|
||||
rows_values_to_update = []
|
||||
if update_map:
|
||||
for current_idx, import_idx in original_row_index_mapping.items():
|
||||
row = valid_rows[current_idx]
|
||||
if update_idx := update_map.get(import_idx):
|
||||
row["id"] = update_idx
|
||||
rows_values_to_update.append(row)
|
||||
else:
|
||||
rows_values_to_create.append(row)
|
||||
else:
|
||||
rows_values_to_create = valid_rows
|
||||
|
||||
created_rows, creation_report = self.force_create_rows_by_batch(
|
||||
user,
|
||||
table,
|
||||
rows_values_to_create,
|
||||
progress=creation_sub_progress,
|
||||
model=model,
|
||||
)
|
||||
|
||||
if rows_values_to_update:
|
||||
updated_rows, updated_report = self.force_update_rows_by_batch(
|
||||
user,
|
||||
table,
|
||||
rows_values_to_update,
|
||||
progress=creation_sub_progress,
|
||||
model=model,
|
||||
)
|
||||
|
||||
# Add errors to global report
|
||||
for index, error in creation_report.items():
|
||||
error_report.add_error(
|
||||
|
@ -1610,6 +1709,13 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
error,
|
||||
)
|
||||
|
||||
if rows_values_to_update:
|
||||
for index, error in updated_report.items():
|
||||
error_report.add_error(
|
||||
original_row_index_mapping[int(index)],
|
||||
error,
|
||||
)
|
||||
|
||||
if send_realtime_update:
|
||||
# Just send a single table_updated here as realtime update instead
|
||||
# of rows_created because we might import a lot of rows.
|
||||
|
@ -1620,7 +1726,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
def get_fields_metadata_for_row_history(
|
||||
self,
|
||||
row: GeneratedTableModelForUpdate,
|
||||
updated_fields: List["Field"],
|
||||
updated_fields: List["DjangoField"],
|
||||
metadata,
|
||||
) -> FieldsMetadata:
|
||||
"""
|
||||
|
@ -1642,7 +1748,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
def get_fields_metadata_for_rows(
|
||||
self,
|
||||
rows: List[GeneratedTableModelForUpdate],
|
||||
updated_fields: List["Field"],
|
||||
updated_fields: List["DjangoField"],
|
||||
fields_metadata_by_row_id=None,
|
||||
) -> Dict[RowId, FieldsMetadata]:
|
||||
"""
|
||||
|
@ -1678,7 +1784,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
send_realtime_update: bool = True,
|
||||
send_webhook_events: bool = True,
|
||||
skip_search_update: bool = False,
|
||||
) -> UpdatedRowsWithOldValuesAndMetadata:
|
||||
generate_error_report: bool = False,
|
||||
) -> UpdatedRowsData:
|
||||
"""
|
||||
Updates field values in batch based on provided rows with the new
|
||||
values.
|
||||
|
@ -1698,6 +1805,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
:param skip_search_update: If you want to instead trigger the search handler
|
||||
cells update later on after many create_rows calls then set this to True
|
||||
but make sure you trigger it eventually.
|
||||
:param generate_error_report: Generate error report if set to True.
|
||||
:raises RowIdsNotUnique: When trying to update the same row multiple
|
||||
times.
|
||||
:raises RowDoesNotExist: When any of the rows don't exist.
|
||||
|
@ -1710,9 +1818,12 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
user_id = user and user.id
|
||||
|
||||
prepared_rows_values, _ = self.prepare_rows_in_bulk(
|
||||
model._field_objects, rows_values
|
||||
prepared_rows_values, errors = self.prepare_rows_in_bulk(
|
||||
model._field_objects,
|
||||
rows_values,
|
||||
generate_error_report=generate_error_report,
|
||||
)
|
||||
report = {index: err for index, err in errors.items()}
|
||||
row_ids = [r["id"] for r in prepared_rows_values]
|
||||
|
||||
non_unique_ids = get_non_unique_values(row_ids)
|
||||
|
@ -1875,7 +1986,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
if (
|
||||
not isinstance(model_field, ManyToManyField)
|
||||
and field_id in updated_field_ids
|
||||
and field_type.valid_for_bulk_update(model_field)
|
||||
and field_type.valid_for_bulk_update(field_obj["field"])
|
||||
):
|
||||
bulk_update_fields.append(field_name)
|
||||
|
||||
|
@ -1895,6 +2006,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
if not skip_search_update:
|
||||
SearchHandler.field_value_updated_or_created(table)
|
||||
|
||||
# Reload rows from the database to get the updated values for formulas
|
||||
updated_rows_to_return = list(
|
||||
model.objects.all().enhance_by_fields().filter(id__in=row_ids)
|
||||
)
|
||||
|
@ -1910,18 +2022,22 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
m2m_change_tracker=m2m_change_tracker,
|
||||
send_realtime_update=send_realtime_update,
|
||||
send_webhook_events=send_webhook_events,
|
||||
fields=[f for f in updated_fields if f.id in updated_field_ids],
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
fields_metadata_by_row_id = self.get_fields_metadata_for_rows(
|
||||
updated_rows_to_return, updated_fields, fields_metadata_by_row_id
|
||||
)
|
||||
|
||||
return UpdatedRowsWithOldValuesAndMetadata(
|
||||
updated_rows = UpdatedRowsData(
|
||||
updated_rows_to_return,
|
||||
original_row_values_by_id,
|
||||
fields_metadata_by_row_id,
|
||||
report,
|
||||
)
|
||||
|
||||
return updated_rows
|
||||
|
||||
def update_rows(
|
||||
self,
|
||||
user: AbstractUser,
|
||||
|
@ -1932,7 +2048,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
send_realtime_update: bool = True,
|
||||
send_webhook_events: bool = True,
|
||||
skip_search_update: bool = False,
|
||||
) -> UpdatedRowsWithOldValuesAndMetadata:
|
||||
generate_error_report: bool = False,
|
||||
) -> UpdatedRowsData:
|
||||
"""
|
||||
Updates field values in batch based on provided rows with the new
|
||||
values.
|
||||
|
@ -1975,6 +2092,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
send_realtime_update,
|
||||
send_webhook_events,
|
||||
skip_search_update,
|
||||
generate_error_report=generate_error_report,
|
||||
)
|
||||
|
||||
def get_rows(
|
||||
|
@ -2103,6 +2221,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
updated_field_ids=[],
|
||||
prepared_rows_values=None,
|
||||
send_webhook_events=send_webhook_events,
|
||||
fields=[],
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
return row
|
||||
|
@ -2204,6 +2324,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
before_return=before_return,
|
||||
send_realtime_update=send_realtime_update,
|
||||
send_webhook_events=send_webhook_events,
|
||||
fields=updated_fields,
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
def update_dependencies_of_rows_deleted(self, table, row, model):
|
||||
|
@ -2265,7 +2387,6 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
triggered. Defaults to true.
|
||||
:param permanently_delete: If `true` the rows will be permanently deleted
|
||||
instead of trashed.
|
||||
:raises RowDoesNotExist: When the row with the provided id does not exist.
|
||||
"""
|
||||
|
||||
workspace = table.database.workspace
|
||||
|
@ -2275,8 +2396,46 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
workspace=workspace,
|
||||
context=table,
|
||||
)
|
||||
return self.force_delete_rows(
|
||||
user,
|
||||
table,
|
||||
row_ids,
|
||||
model=model,
|
||||
send_realtime_update=send_realtime_update,
|
||||
send_webhook_events=send_webhook_events,
|
||||
permanently_delete=permanently_delete,
|
||||
)
|
||||
|
||||
if not model:
|
||||
def force_delete_rows(
|
||||
self,
|
||||
user: AbstractUser,
|
||||
table: Table,
|
||||
row_ids: List[int],
|
||||
model: Optional[Type[GeneratedTableModel]] = None,
|
||||
send_realtime_update: bool = True,
|
||||
send_webhook_events: bool = True,
|
||||
permanently_delete: bool = False,
|
||||
) -> TrashedRows:
|
||||
"""
|
||||
Trashes existing rows of the given table based on row_ids, without checking
|
||||
user permissions.
|
||||
|
||||
:param user: The user of whose behalf the change is made.
|
||||
:param table: The table for which the row must be deleted.
|
||||
:param row_ids: The ids of the rows that must be deleted.
|
||||
:param model: If the correct model has already been generated, it can be
|
||||
provided so that it does not have to be generated for a second time.
|
||||
:param send_realtime_update: If set to false then it is up to the caller to
|
||||
send the rows_created or similar signal. Defaults to True.
|
||||
:param send_webhook_events: If set the false then the webhooks will not be
|
||||
triggered. Defaults to true.
|
||||
:param permanently_delete: If `true` the rows will be permanently deleted
|
||||
instead of trashed.
|
||||
:raises RowDoesNotExist: When the row with the provided id does not exist.
|
||||
"""
|
||||
|
||||
workspace = table.database.workspace
|
||||
if model is None:
|
||||
model = table.get_model()
|
||||
|
||||
non_unique_ids = get_non_unique_values(row_ids)
|
||||
|
@ -2310,9 +2469,7 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
TrashHandler.trash(user, workspace, table.database, trashed_rows)
|
||||
|
||||
rows_deleted_counter.add(
|
||||
len(row_ids),
|
||||
)
|
||||
rows_deleted_counter.add(len(row_ids))
|
||||
|
||||
updated_field_ids = []
|
||||
updated_fields = []
|
||||
|
@ -2359,6 +2516,8 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
before_return=before_return,
|
||||
send_realtime_update=send_realtime_update,
|
||||
send_webhook_events=send_webhook_events,
|
||||
fields=updated_fields,
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
return trashed_rows
|
||||
|
@ -2386,3 +2545,233 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
self,
|
||||
table=table,
|
||||
)
|
||||
|
||||
|
||||
def merge_values_expression(
|
||||
row: list[str | int | float | None],
|
||||
field_handlers: "list[UpsertFieldHandler]",
|
||||
query_params: list,
|
||||
) -> sql.Composable:
|
||||
"""
|
||||
Create a sql expression that will produce text value from a list of row values. Any
|
||||
value, that should be interpolated, will be added to provided `query_params` list.
|
||||
|
||||
:param row: a list of values in a row
|
||||
:param field_handlers: a list of field types for a row. The number of handlers
|
||||
should equal the number of values in a row.
|
||||
:param query_params: param values container
|
||||
:return:
|
||||
"""
|
||||
|
||||
fields = []
|
||||
|
||||
for val, field_handler in zip(row, field_handlers):
|
||||
fields.append(field_handler.get_field_concat_expression())
|
||||
query_params.append(field_handler.prepare_value(val))
|
||||
|
||||
return UpsertRowsMappingHandler.SEPARATOR.join(fields)
|
||||
|
||||
|
||||
class UpsertFieldHandler:
|
||||
"""
|
||||
Helper class to handle field's upsert handling.
|
||||
"""
|
||||
|
||||
def __init__(self, table: Table, field_id: id):
|
||||
self.table = table
|
||||
# TODO: here we are using field id, but it may be so the field_id
|
||||
# is `'id'` string.
|
||||
try:
|
||||
self._field_def = field_def = next(
|
||||
(
|
||||
f
|
||||
for f in table.get_model().get_field_objects()
|
||||
if f["field"].id == field_id
|
||||
)
|
||||
)
|
||||
except StopIteration:
|
||||
raise FieldNotInTable(field_id)
|
||||
|
||||
self.field: Field = field_def["field"]
|
||||
self.field_type: FieldType = field_def["type"]
|
||||
if not self.field_type.can_upsert:
|
||||
raise IncompatibleField(self.field.id)
|
||||
self.field_name = self.field.db_column
|
||||
|
||||
def prepare_value(self, value: str) -> Any:
|
||||
return self.field_type.prepare_value_for_db(self.field, value)
|
||||
|
||||
def get_field_concat_expression(self) -> sql.Composable:
|
||||
column_type = sql.SQL(self.get_column_type() or "text")
|
||||
return sql.SQL(" COALESCE(CAST({}::{} AS TEXT), '<NULL>')::TEXT ").format(
|
||||
sql.Placeholder(), column_type
|
||||
)
|
||||
|
||||
def get_column_type(self) -> str | None:
|
||||
table_field: DjangoField = self.field_type.get_model_field(self.field)
|
||||
return table_field.db_type(db.connection)
|
||||
|
||||
|
||||
class UpsertRowsMappingHandler:
|
||||
"""
|
||||
Helper class for mapping new rows values to existing table rows during an upsert
|
||||
operation.
|
||||
|
||||
This class processes upsert values from the provided data and matches them with
|
||||
existing row IDs in the database. The resulting mapping helps determine which
|
||||
imported rows should update existing ones.
|
||||
|
||||
### Usage:
|
||||
|
||||
>>> importrows = ImportRowsMappingHandler(table, [1234], [['a'], ['b']])
|
||||
|
||||
# Returns a dictionary where:
|
||||
# - Keys represent the index of the upsert values in the imported dataset.
|
||||
# - Values represent the corresponding row ID in the database.
|
||||
>>> importrows.process_map
|
||||
{0: 1, 1: 2}
|
||||
|
||||
# In this example:
|
||||
# - The first imported value ['a'] (index 0) corresponds to the row with ID 1.
|
||||
# - The second imported value ['b'] (index 1) corresponds to the row with ID 2.
|
||||
"""
|
||||
|
||||
SEPARATOR = sql.SQL(" || '__-__' || ")
|
||||
PER_CHUNK = 100
|
||||
|
||||
def __init__(
|
||||
self, table: Table, upsert_fields: list[int], upsert_values: list[list[Any]]
|
||||
):
|
||||
self.table = table
|
||||
self.table_name = table.get_database_table_name()
|
||||
self.import_fields = [UpsertFieldHandler(table, fidx) for fidx in upsert_fields]
|
||||
self.upsert_values = upsert_values
|
||||
|
||||
def validate(self):
|
||||
"""
|
||||
Validates if upsert configuration conforms formal requirements
|
||||
:raises InvalidRowLength:
|
||||
"""
|
||||
|
||||
expected_length = len(self.import_fields)
|
||||
for ridx, uval in enumerate(self.upsert_values):
|
||||
if len(uval) != expected_length:
|
||||
raise InvalidRowLength(ridx)
|
||||
|
||||
@cached_property
|
||||
def process_map(self) -> dict[int, int]:
|
||||
"""
|
||||
Calculates a map between import row indexes and table row ids.
|
||||
"""
|
||||
|
||||
# no upsert value fields, no need for mapping
|
||||
if not self.import_fields:
|
||||
return {}
|
||||
|
||||
script_template = sql.SQL(
|
||||
"""
|
||||
CREATE TEMP TABLE table_upsert_indexes (id INT, upsert_value TEXT, group_index INT);
|
||||
|
||||
CREATE TEMP TABLE table_import (id INT, upsert_value TEXT);
|
||||
|
||||
CREATE TEMP VIEW table_import_indexes AS
|
||||
SELECT id, upsert_value, RANK()
|
||||
OVER (PARTITION BY upsert_value ORDER BY id, upsert_value )
|
||||
AS group_index
|
||||
FROM table_import ORDER BY id ;
|
||||
"""
|
||||
)
|
||||
|
||||
self.execute(script_template)
|
||||
self.insert_table_values()
|
||||
self.insert_imported_values()
|
||||
# this is just a list of pairs, not very usable.
|
||||
calculated = self.calculate_map()
|
||||
|
||||
# map import row idx -> update row_id in table
|
||||
return {r[1]: r[0] for r in calculated}
|
||||
|
||||
@cached_property
|
||||
def connection(self):
|
||||
return db.connection
|
||||
|
||||
@cached_property
|
||||
def cursor(self):
|
||||
return self.connection.cursor()
|
||||
|
||||
def execute(self, query, *args, **kwargs) -> "CursorWrapper":
|
||||
self.cursor.execute(query, *args, **kwargs)
|
||||
return self.cursor
|
||||
|
||||
def insert_table_values(self):
|
||||
"""
|
||||
Populates temp upsert comparison table with values from an exsisting table.
|
||||
Values from multiple source columns will be normalized to one text value.
|
||||
"""
|
||||
|
||||
columns = self.SEPARATOR.join(
|
||||
[
|
||||
sql.SQL("COALESCE(CAST({} AS TEXT), '<NULL>')::TEXT").format(
|
||||
sql.Identifier(field.field_name)
|
||||
)
|
||||
for field in self.import_fields
|
||||
]
|
||||
)
|
||||
|
||||
query = sql.SQL(
|
||||
"""WITH subq AS (SELECT r.id, {} AS upsert_value FROM {} r WHERE NOT trashed)
|
||||
INSERT INTO table_upsert_indexes (id, upsert_value, group_index)
|
||||
SELECT id, upsert_value, RANK()
|
||||
OVER (PARTITION BY upsert_value ORDER BY id, upsert_value )
|
||||
AS group_index
|
||||
FROM subq ORDER BY id """
|
||||
).format(
|
||||
columns, sql.Identifier(self.table_name)
|
||||
) # nosec B608
|
||||
|
||||
self.execute(query)
|
||||
|
||||
def insert_imported_values(self):
|
||||
"""
|
||||
Builds and executes bulk insert queries for upsert comparison values
|
||||
from import data.
|
||||
"""
|
||||
|
||||
for _chunk in chunks(enumerate(self.upsert_values), self.PER_CHUNK):
|
||||
# put all params (processed values) for the query into a container
|
||||
query_params = []
|
||||
rows_query = []
|
||||
for rowidx, row in _chunk:
|
||||
# per-row insert query
|
||||
query_params.append(rowidx)
|
||||
row_to_add = sql.SQL("({}, {})").format(
|
||||
sql.Placeholder(),
|
||||
merge_values_expression(row, self.import_fields, query_params),
|
||||
)
|
||||
rows_query.append(row_to_add)
|
||||
|
||||
rows_placeholder = sql.SQL(",\n").join(rows_query)
|
||||
script_template = sql.SQL(
|
||||
"INSERT INTO table_import (id, upsert_value) VALUES {};"
|
||||
).format(
|
||||
rows_placeholder
|
||||
) # nosec B608
|
||||
self.execute(script_template, query_params)
|
||||
|
||||
def calculate_map(self) -> list[tuple[int, int]]:
|
||||
"""
|
||||
Calculates a map between imported row index -> table row id
|
||||
that can be used to detect if a row that is imported should be updated
|
||||
(mapping exists) or inserted as a new one.
|
||||
"""
|
||||
|
||||
q = sql.SQL(
|
||||
"""
|
||||
SELECT t.id, i.id
|
||||
FROM table_upsert_indexes t
|
||||
JOIN table_import_indexes i
|
||||
ON (i.upsert_value = t.upsert_value
|
||||
AND i.group_index = t.group_index);
|
||||
"""
|
||||
)
|
||||
return self.execute(q).fetchall()
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from itertools import groupby
|
||||
from typing import Any, Dict, List, NamedTuple, NewType, Optional
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -21,6 +24,19 @@ tracer = trace.get_tracer(__name__)
|
|||
|
||||
FieldName = NewType("FieldName", str)
|
||||
|
||||
# Dict of table_id -> row_id -> field_name ->
|
||||
# {added: List[row_id], removed:List[row_id], metadata: Dict}
|
||||
RelatedRowsDiff = Dict[int, Dict[int, Dict[str, Dict[str, Any]]]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActionData:
|
||||
uuid: str
|
||||
type: str
|
||||
timestamp: datetime
|
||||
command_type: ActionCommandType
|
||||
params: Dict[str, Any]
|
||||
|
||||
|
||||
class RowChangeDiff(NamedTuple):
|
||||
"""
|
||||
|
@ -29,6 +45,8 @@ class RowChangeDiff(NamedTuple):
|
|||
and after values of those fields.
|
||||
"""
|
||||
|
||||
row_id: int
|
||||
table_id: int
|
||||
changed_field_names: List[FieldName]
|
||||
before_values: Dict[FieldName, Any]
|
||||
after_values: Dict[FieldName, Any]
|
||||
|
@ -38,38 +56,34 @@ class RowHistoryHandler:
|
|||
@classmethod
|
||||
def _construct_entry_from_action_and_diff(
|
||||
cls,
|
||||
user,
|
||||
table_id,
|
||||
row_id,
|
||||
field_names,
|
||||
row_fields_metadata,
|
||||
action_type,
|
||||
action_uuid,
|
||||
action_timestamp,
|
||||
action_command_type,
|
||||
diff,
|
||||
user: AbstractBaseUser,
|
||||
action: ActionData,
|
||||
fields_metadata: Dict[str, Any],
|
||||
row_diff: RowChangeDiff,
|
||||
):
|
||||
return RowHistory(
|
||||
user_id=user.id,
|
||||
user_name=user.first_name,
|
||||
table_id=table_id,
|
||||
row_id=row_id,
|
||||
field_names=field_names,
|
||||
fields_metadata=row_fields_metadata,
|
||||
action_uuid=action_uuid,
|
||||
action_command_type=action_command_type.value,
|
||||
action_timestamp=action_timestamp,
|
||||
action_type=action_type,
|
||||
before_values=diff.before_values,
|
||||
after_values=diff.after_values,
|
||||
table_id=row_diff.table_id,
|
||||
row_id=row_diff.row_id,
|
||||
field_names=row_diff.changed_field_names,
|
||||
fields_metadata=fields_metadata,
|
||||
action_uuid=action.uuid,
|
||||
action_command_type=action.command_type.value,
|
||||
action_timestamp=action.timestamp,
|
||||
action_type=action.type,
|
||||
before_values=row_diff.before_values,
|
||||
after_values=row_diff.after_values,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _extract_row_diff(
|
||||
cls,
|
||||
table_id: int,
|
||||
row_id: int,
|
||||
fields_metadata: Dict[str, Any],
|
||||
before_values: Dict[str, Any],
|
||||
after_values: Dict[str, Any],
|
||||
fields_metadata,
|
||||
) -> Optional[RowChangeDiff]:
|
||||
"""
|
||||
Extracts the fields that have changed between the before and after values of a
|
||||
|
@ -94,18 +108,20 @@ class RowHistoryHandler:
|
|||
before_fields = {
|
||||
k: field_type_registry.get(
|
||||
fields_metadata[k]["type"]
|
||||
).prepare_row_history_value_from_action_meta_data(v)
|
||||
).prepare_value_for_row_history(v)
|
||||
for k, v in before_values.items()
|
||||
if k in changed_fields
|
||||
}
|
||||
after_fields = {
|
||||
k: field_type_registry.get(
|
||||
fields_metadata[k]["type"]
|
||||
).prepare_row_history_value_from_action_meta_data(v)
|
||||
).prepare_value_for_row_history(v)
|
||||
for k, v in after_values.items()
|
||||
if k in changed_fields
|
||||
}
|
||||
return RowChangeDiff(list(changed_fields), before_fields, after_fields)
|
||||
return RowChangeDiff(
|
||||
row_id, table_id, list(changed_fields), before_fields, after_fields
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _raise_if_ids_mismatch(cls, before_values, after_values, fields_metadata):
|
||||
|
@ -120,62 +136,205 @@ class RowHistoryHandler:
|
|||
"are the same. This should never happen."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _update_related_tables_entries(
|
||||
cls,
|
||||
related_rows_diff: RelatedRowsDiff,
|
||||
fields_metadata: Dict[str, Any],
|
||||
row_diff: RowChangeDiff,
|
||||
) -> RelatedRowsDiff:
|
||||
"""
|
||||
Updates the record of changes in related tables when link_row fields are
|
||||
modified.
|
||||
|
||||
When a row's link_row field is updated (adding or removing connections to rows
|
||||
in another table), this method tracks those changes from the perspective of the
|
||||
rows in the related table, so that history can be properly displayed for both
|
||||
sides of the relationship.
|
||||
|
||||
The method updates related_rows_diff in-place, maintaining a record of which
|
||||
rows were added or removed from each link relationship.
|
||||
|
||||
:param related_rows_diff: Nested dictionary tracking changes for each affected
|
||||
related row
|
||||
:param fields_metadata: Metadata about the fields that were changed in
|
||||
this update
|
||||
:param row_diff: The changes made to the current row, including before/after
|
||||
values
|
||||
:return: The updated related_rows_diff dictionary
|
||||
"""
|
||||
|
||||
def _init_linked_row_diff(linked_field_id):
|
||||
return {
|
||||
"added": [],
|
||||
"removed": [],
|
||||
"metadata": {
|
||||
"id": linked_field_id,
|
||||
"type": "link_row",
|
||||
"linked_rows": {},
|
||||
},
|
||||
}
|
||||
|
||||
def _update_linked_row_diff(
|
||||
field_metadata: Dict[str, Any], row_ids_set: set[int], key: str
|
||||
):
|
||||
linked_table_id = field_metadata["linked_table_id"]
|
||||
linked_field_id = field_metadata["linked_field_id"]
|
||||
linked_field_name = f"field_{linked_field_id}"
|
||||
|
||||
for linked_row_id in row_ids_set:
|
||||
linked_diff = related_rows_diff[linked_table_id][linked_row_id][
|
||||
linked_field_name
|
||||
]
|
||||
if not linked_diff:
|
||||
linked_diff = _init_linked_row_diff(linked_field_id)
|
||||
related_rows_diff[linked_table_id][linked_row_id][
|
||||
linked_field_name
|
||||
] = linked_diff
|
||||
linked_diff[key].append(row_id)
|
||||
linked_diff["metadata"]["linked_rows"][row_id] = {
|
||||
"value": field_metadata["primary_value"]
|
||||
}
|
||||
|
||||
row_id = row_diff.row_id
|
||||
for field_name in row_diff.changed_field_names:
|
||||
field_metadata = fields_metadata[field_name]
|
||||
|
||||
# Ignore fields that are not link_row fields or that doesn't have a related
|
||||
# field in the linked table.
|
||||
if (
|
||||
field_metadata["type"] != "link_row"
|
||||
or not field_metadata["linked_field_id"]
|
||||
):
|
||||
continue
|
||||
|
||||
after_set = set(row_diff.after_values[field_name])
|
||||
before_set = set(row_diff.before_values[field_name])
|
||||
|
||||
row_ids_added = after_set - before_set
|
||||
_update_linked_row_diff(field_metadata, row_ids_added, "added")
|
||||
|
||||
row_ids_removed = before_set - after_set
|
||||
_update_linked_row_diff(field_metadata, row_ids_removed, "removed")
|
||||
|
||||
return related_rows_diff
|
||||
|
||||
@classmethod
|
||||
def _construct_related_rows_entries(
|
||||
cls,
|
||||
related_rows_diff: RelatedRowsDiff,
|
||||
user: AbstractBaseUser,
|
||||
action: ActionData,
|
||||
) -> List[RowHistory]:
|
||||
"""
|
||||
Creates RowHistory entries for rows in related tables that were affected by
|
||||
changes to the current row. Specifically, when a link_row field is updated,
|
||||
this method ensures that the changes are also tracked from the perspective of
|
||||
the related rows.
|
||||
|
||||
:param related_rows_diff: A nested dictionary that tracks changes for each
|
||||
affected related row. It includes details about rows added or removed
|
||||
from link_row relationships.
|
||||
:param user: The user who performed the action that triggered the changes.
|
||||
:param action: The action metadata that describes the operation performed.
|
||||
:return: A list of RowHistory entries representing the changes for the
|
||||
related rows.
|
||||
"""
|
||||
|
||||
entries = []
|
||||
for linked_table_id, table_changes in related_rows_diff.items():
|
||||
for linked_row_id, row_changes in table_changes.items():
|
||||
field_names = list(row_changes.keys())
|
||||
fields_metadata, before_values, after_values = {}, {}, {}
|
||||
|
||||
for field_name in field_names:
|
||||
row_field_changes = row_changes[field_name]
|
||||
fields_metadata[field_name] = row_field_changes["metadata"]
|
||||
before_values[field_name] = row_field_changes["removed"]
|
||||
after_values[field_name] = row_field_changes["added"]
|
||||
|
||||
linked_entry = RowHistory(
|
||||
user_id=user.id,
|
||||
user_name=user.first_name,
|
||||
table_id=linked_table_id,
|
||||
row_id=linked_row_id,
|
||||
field_names=field_names,
|
||||
fields_metadata=fields_metadata,
|
||||
action_uuid=action.uuid,
|
||||
action_command_type=action.command_type.value,
|
||||
action_timestamp=action.timestamp,
|
||||
action_type=action.type,
|
||||
before_values=before_values,
|
||||
after_values=after_values,
|
||||
)
|
||||
entries.append(linked_entry)
|
||||
return entries
|
||||
|
||||
@classmethod
|
||||
@baserow_trace(tracer)
|
||||
def record_history_from_update_rows_action(
|
||||
cls,
|
||||
user: AbstractBaseUser,
|
||||
action_uuid: str,
|
||||
action_params: Dict[str, Any],
|
||||
action_timestamp: datetime,
|
||||
action_command_type: ActionCommandType,
|
||||
action: ActionData,
|
||||
):
|
||||
params = UpdateRowsActionType.serialized_to_params(action_params)
|
||||
params = UpdateRowsActionType.serialized_to_params(action.params)
|
||||
table_id = params.table_id
|
||||
after_values = params.row_values
|
||||
before_values = [
|
||||
params.original_rows_values_by_id[r["id"]] for r in after_values
|
||||
]
|
||||
|
||||
if action_command_type == ActionCommandType.UNDO:
|
||||
if action.command_type == ActionCommandType.UNDO:
|
||||
before_values, after_values = after_values, before_values
|
||||
|
||||
row_history_entries = []
|
||||
related_rows_diff: RelatedRowsDiff = defaultdict(
|
||||
lambda: defaultdict(lambda: defaultdict(dict))
|
||||
)
|
||||
for i, after in enumerate(after_values):
|
||||
before = before_values[i]
|
||||
fields_metadata = params.updated_fields_metadata_by_row_id[after["id"]]
|
||||
cls._raise_if_ids_mismatch(before, after, fields_metadata)
|
||||
|
||||
diff = cls._extract_row_diff(before, after, fields_metadata)
|
||||
if diff is None:
|
||||
row_id = after["id"]
|
||||
row_diff = cls._extract_row_diff(
|
||||
table_id, row_id, fields_metadata, before, after
|
||||
)
|
||||
if row_diff is None:
|
||||
continue
|
||||
|
||||
changed_fields_metadata = {
|
||||
k: v
|
||||
for k, v in fields_metadata.items()
|
||||
if k in diff.changed_field_names
|
||||
if k in row_diff.changed_field_names
|
||||
}
|
||||
row_id = after["id"]
|
||||
|
||||
entry = cls._construct_entry_from_action_and_diff(
|
||||
user,
|
||||
params.table_id,
|
||||
row_id,
|
||||
diff.changed_field_names,
|
||||
action,
|
||||
changed_fields_metadata,
|
||||
UpdateRowsActionType.type,
|
||||
action_uuid,
|
||||
action_timestamp,
|
||||
action_command_type,
|
||||
diff,
|
||||
row_diff,
|
||||
)
|
||||
row_history_entries.append(entry)
|
||||
cls._update_related_tables_entries(
|
||||
related_rows_diff, changed_fields_metadata, row_diff
|
||||
)
|
||||
|
||||
related_entries = cls._construct_related_rows_entries(
|
||||
related_rows_diff, user, action
|
||||
)
|
||||
row_history_entries.extend(related_entries)
|
||||
|
||||
if row_history_entries:
|
||||
row_history_entries = RowHistory.objects.bulk_create(row_history_entries)
|
||||
rows_history_updated.send(
|
||||
RowHistoryHandler,
|
||||
table_id=params.table_id,
|
||||
row_history_entries=row_history_entries,
|
||||
)
|
||||
for table_id, per_table_row_history_entries in groupby(
|
||||
row_history_entries, lambda e: e.table_id
|
||||
):
|
||||
rows_history_updated.send(
|
||||
RowHistoryHandler,
|
||||
table_id=table_id,
|
||||
row_history_entries=list(per_table_row_history_entries),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@baserow_trace(tracer)
|
||||
|
@ -233,5 +392,12 @@ def on_action_done_update_row_history(
|
|||
if action_type and action_type.type in ROW_HISTORY_ACTIONS:
|
||||
add_entry_handler = ROW_HISTORY_ACTIONS[action_type.type]
|
||||
add_entry_handler(
|
||||
user, action_uuid, action_params, action_timestamp, action_command_type
|
||||
user,
|
||||
ActionData(
|
||||
action_uuid,
|
||||
action_type.type,
|
||||
action_timestamp,
|
||||
action_command_type,
|
||||
action_params,
|
||||
),
|
||||
)
|
||||
|
|
39
backend/src/baserow/contrib/database/rows/types.py
Normal file
39
backend/src/baserow/contrib/database/rows/types.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
import typing
|
||||
from typing import Any, NamedTuple, NewType
|
||||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from baserow.contrib.database.table.models import GeneratedTableModel
|
||||
|
||||
GeneratedTableModelForUpdate = NewType(
|
||||
"GeneratedTableModelForUpdate", GeneratedTableModel
|
||||
)
|
||||
|
||||
RowsForUpdate = NewType("RowsForUpdate", QuerySet)
|
||||
|
||||
|
||||
class FileImportConfiguration(typing.TypedDict):
|
||||
upsert_fields: list[int]
|
||||
upsert_values: list[list[typing.Any]]
|
||||
|
||||
|
||||
class FileImportDict(typing.TypedDict):
|
||||
data: list[list[typing.Any]]
|
||||
configuration: FileImportConfiguration | None
|
||||
|
||||
|
||||
FieldsMetadata = NewType("FieldsMetadata", dict[str, Any])
|
||||
RowValues = NewType("RowValues", dict[str, Any])
|
||||
RowId = NewType("RowId", int)
|
||||
|
||||
|
||||
class UpdatedRowsData(NamedTuple):
|
||||
updated_rows: list[GeneratedTableModelForUpdate]
|
||||
original_rows_values_by_id: dict[RowId, RowValues]
|
||||
updated_fields_metadata_by_row_id: dict[RowId, FieldsMetadata]
|
||||
errors: dict[int, dict[str, Any]] | None = None
|
||||
|
||||
|
||||
class CreatedRowsData(NamedTuple):
|
||||
created_rows: list[GeneratedTableModel]
|
||||
errors: dict[int, dict[str, Any]] | None = None
|
|
@ -486,7 +486,11 @@ class TableHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
table = self.create_table_and_fields(user, database, name, fields)
|
||||
|
||||
_, error_report = RowHandler().import_rows(
|
||||
user, table, data, progress=progress, send_realtime_update=False
|
||||
user,
|
||||
table,
|
||||
data=data,
|
||||
progress=progress,
|
||||
send_realtime_update=False,
|
||||
)
|
||||
|
||||
table_created.send(self, table=table, user=user)
|
||||
|
|
|
@ -50,6 +50,7 @@ from baserow.contrib.database.table.constants import (
|
|||
USER_TABLE_DATABASE_NAME_PREFIX,
|
||||
)
|
||||
from baserow.contrib.database.views.exceptions import ViewFilterTypeNotAllowedForField
|
||||
from baserow.contrib.database.views.models import DEFAULT_SORT_TYPE_KEY
|
||||
from baserow.contrib.database.views.registries import view_filter_type_registry
|
||||
from baserow.core.db import MultiFieldPrefetchQuerysetMixin, specific_iterator
|
||||
from baserow.core.fields import AutoTrueBooleanField
|
||||
|
@ -332,6 +333,8 @@ class TableModelQuerySet(MultiFieldPrefetchQuerysetMixin, models.QuerySet):
|
|||
raise OrderByFieldNotFound(order)
|
||||
|
||||
order_direction = "DESC" if order[:1] == "-" else "ASC"
|
||||
type_match = re.search(r"\[(.*?)\]", order)
|
||||
sort_type = type_match.group(1) if type_match else DEFAULT_SORT_TYPE_KEY
|
||||
field_object = field_object_dict[field_name_or_id]
|
||||
field_type = field_object["type"]
|
||||
field_name = field_object["name"]
|
||||
|
@ -339,15 +342,18 @@ class TableModelQuerySet(MultiFieldPrefetchQuerysetMixin, models.QuerySet):
|
|||
user_field_name = field_object["field"].name
|
||||
error_display_name = user_field_name if user_field_names else field_name
|
||||
|
||||
if not field_object["type"].check_can_order_by(field_object["field"]):
|
||||
if not field_object["type"].check_can_order_by(
|
||||
field_object["field"], sort_type
|
||||
):
|
||||
raise OrderByFieldNotPossible(
|
||||
error_display_name,
|
||||
field_type.type,
|
||||
f"It is not possible to order by field type {field_type.type}.",
|
||||
sort_type,
|
||||
f"It is not possible to order by field type {field_type.type} using sort type {sort_type}.",
|
||||
)
|
||||
|
||||
field_annotated_order_by = field_type.get_order(
|
||||
field, field_name, order_direction, table_model=self.model
|
||||
field, field_name, order_direction, sort_type, table_model=self.model
|
||||
)
|
||||
|
||||
if field_annotated_order_by.annotation is not None:
|
||||
|
|
|
@ -309,6 +309,8 @@ class RowTrashableItemType(TrashableItemType):
|
|||
model=model,
|
||||
before=None,
|
||||
user=None,
|
||||
fields=updated_fields,
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
|
||||
def permanently_delete_item(self, row, trash_item_lookup_cache=None):
|
||||
|
@ -432,6 +434,8 @@ class RowsTrashableItemType(TrashableItemType):
|
|||
model=model,
|
||||
before=None,
|
||||
user=None,
|
||||
fields=updated_fields,
|
||||
dependant_fields=dependant_fields,
|
||||
)
|
||||
else:
|
||||
# Use table signal here instead of row signal because we don't want
|
||||
|
|
|
@ -755,6 +755,7 @@ class CreateViewSortActionType(UndoableActionType):
|
|||
"database_id",
|
||||
"view_sort_id",
|
||||
"sort_order",
|
||||
"sort_type",
|
||||
]
|
||||
|
||||
@dataclasses.dataclass
|
||||
|
@ -769,10 +770,16 @@ class CreateViewSortActionType(UndoableActionType):
|
|||
database_name: str
|
||||
view_sort_id: int
|
||||
sort_order: str
|
||||
sort_type: str
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
cls, user: AbstractUser, view: View, field: Field, sort_order: str
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
view: View,
|
||||
field: Field,
|
||||
sort_order: str,
|
||||
sort_type: Optional[str] = None,
|
||||
) -> ViewSort:
|
||||
"""
|
||||
Creates a new view sort.
|
||||
|
@ -785,9 +792,13 @@ class CreateViewSortActionType(UndoableActionType):
|
|||
:param field: The field that needs to be sorted.
|
||||
:param sort_order: The desired order, can either be ascending (A to Z) or
|
||||
descending (Z to A).
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
"""
|
||||
|
||||
view_sort = ViewHandler().create_sort(user, view, field, sort_order)
|
||||
view_sort = ViewHandler().create_sort(
|
||||
user, view, field, sort_order, sort_type=sort_type
|
||||
)
|
||||
|
||||
params = cls.Params(
|
||||
field.id,
|
||||
|
@ -800,6 +811,7 @@ class CreateViewSortActionType(UndoableActionType):
|
|||
view.table.database.name,
|
||||
view_sort.id,
|
||||
sort_order,
|
||||
sort_type,
|
||||
)
|
||||
workspace = view.table.database.workspace
|
||||
cls.register_action(user, params, cls.scope(view.id), workspace)
|
||||
|
@ -822,7 +834,12 @@ class CreateViewSortActionType(UndoableActionType):
|
|||
view = view_handler.get_view(params.view_id)
|
||||
|
||||
view_handler.create_sort(
|
||||
user, view, field, params.sort_order, params.view_sort_id
|
||||
user,
|
||||
view,
|
||||
field,
|
||||
params.sort_order,
|
||||
params.view_sort_id,
|
||||
params.sort_type,
|
||||
)
|
||||
|
||||
|
||||
|
@ -840,8 +857,10 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
"database_id",
|
||||
"view_sort_id",
|
||||
"sort_order",
|
||||
"sort_type",
|
||||
"original_field_id",
|
||||
"original_sort_order",
|
||||
"original_sort_type",
|
||||
]
|
||||
|
||||
@dataclasses.dataclass
|
||||
|
@ -856,9 +875,11 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
database_name: str
|
||||
view_sort_id: int
|
||||
sort_order: str
|
||||
sort_type: str
|
||||
original_field_id: int
|
||||
original_field_name: str
|
||||
original_sort_order: str
|
||||
original_sort_type: str
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
|
@ -867,6 +888,7 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
view_sort: ViewSort,
|
||||
field: Optional[Field] = None,
|
||||
order: Optional[str] = None,
|
||||
sort_type: Optional[str] = None,
|
||||
) -> ViewSort:
|
||||
"""
|
||||
Updates the values of an existing view sort.
|
||||
|
@ -878,6 +900,8 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
:param view_sort: The view sort that needs to be updated.
|
||||
:param field: The field that must be sorted on.
|
||||
:param order: Indicates the sort order direction.
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
"""
|
||||
|
||||
original_field_id = view_sort.field.id
|
||||
|
@ -885,9 +909,12 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
view_id = view_sort.view.id
|
||||
view_name = view_sort.view.name
|
||||
original_sort_order = view_sort.order
|
||||
original_sort_type = view_sort.type
|
||||
|
||||
handler = ViewHandler()
|
||||
updated_view_sort = handler.update_sort(user, view_sort, field, order)
|
||||
updated_view_sort = handler.update_sort(
|
||||
user, view_sort, field, order, sort_type
|
||||
)
|
||||
|
||||
cls.register_action(
|
||||
user=user,
|
||||
|
@ -902,9 +929,11 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
updated_view_sort.view.table.database.name,
|
||||
updated_view_sort.id,
|
||||
updated_view_sort.order,
|
||||
updated_view_sort.type,
|
||||
original_field_id,
|
||||
original_field_name,
|
||||
original_sort_order,
|
||||
original_sort_type,
|
||||
),
|
||||
scope=cls.scope(view_sort.view.id),
|
||||
workspace=view_sort.view.table.database.workspace,
|
||||
|
@ -923,7 +952,13 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
view_handler = ViewHandler()
|
||||
view_sort = view_handler.get_sort(user, params.view_sort_id)
|
||||
|
||||
view_handler.update_sort(user, view_sort, field, params.original_sort_order)
|
||||
view_handler.update_sort(
|
||||
user,
|
||||
view_sort,
|
||||
field,
|
||||
params.original_sort_order,
|
||||
params.original_sort_type,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def redo(cls, user: AbstractUser, params: Params, action_to_redo: Action):
|
||||
|
@ -932,7 +967,9 @@ class UpdateViewSortActionType(UndoableActionType):
|
|||
view_handler = ViewHandler()
|
||||
view_sort = view_handler.get_sort(user, params.view_sort_id)
|
||||
|
||||
view_handler.update_sort(user, view_sort, field, params.sort_order)
|
||||
view_handler.update_sort(
|
||||
user, view_sort, field, params.sort_order, params.sort_type
|
||||
)
|
||||
|
||||
|
||||
class DeleteViewSortActionType(UndoableActionType):
|
||||
|
@ -949,6 +986,7 @@ class DeleteViewSortActionType(UndoableActionType):
|
|||
"database_id",
|
||||
"view_sort_id",
|
||||
"sort_order",
|
||||
"sort_type",
|
||||
]
|
||||
|
||||
@dataclasses.dataclass
|
||||
|
@ -963,6 +1001,7 @@ class DeleteViewSortActionType(UndoableActionType):
|
|||
database_name: str
|
||||
view_sort_id: int
|
||||
sort_order: str
|
||||
sort_type: str
|
||||
|
||||
@classmethod
|
||||
def do(cls, user: AbstractUser, view_sort: ViewSort):
|
||||
|
@ -983,6 +1022,7 @@ class DeleteViewSortActionType(UndoableActionType):
|
|||
field_id = view_sort.field.id
|
||||
field_name = view_sort.field.name
|
||||
sort_order = view_sort.order
|
||||
sort_type = view_sort.type
|
||||
|
||||
ViewHandler().delete_sort(user, view_sort)
|
||||
|
||||
|
@ -997,6 +1037,7 @@ class DeleteViewSortActionType(UndoableActionType):
|
|||
view_sort.view.table.database.name,
|
||||
view_sort_id,
|
||||
sort_order,
|
||||
sort_type,
|
||||
)
|
||||
workspace = view_sort.view.table.database.workspace
|
||||
cls.register_action(user, params, cls.scope(view_sort.view.id), workspace)
|
||||
|
@ -1012,7 +1053,12 @@ class DeleteViewSortActionType(UndoableActionType):
|
|||
field = FieldHandler().get_field(params.field_id)
|
||||
|
||||
view_handler.create_sort(
|
||||
user, view, field, params.sort_order, params.view_sort_id
|
||||
user,
|
||||
view,
|
||||
field,
|
||||
params.sort_order,
|
||||
params.view_sort_id,
|
||||
params.sort_type,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -1100,7 +1146,8 @@ class UpdateViewFieldOptionsActionType(UndoableActionType):
|
|||
VIEW_ACTION_CONTEXT,
|
||||
)
|
||||
analytics_params = [
|
||||
"view_id" "table_id",
|
||||
"view_id",
|
||||
"table_id",
|
||||
"database_id",
|
||||
"field_options",
|
||||
"original_field_options",
|
||||
|
@ -1934,6 +1981,7 @@ class CreateViewGroupByActionType(UndoableActionType):
|
|||
view_group_by_id: int
|
||||
group_by_order: str
|
||||
group_by_width: int
|
||||
group_by_type: str
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
|
@ -1943,6 +1991,7 @@ class CreateViewGroupByActionType(UndoableActionType):
|
|||
field: Field,
|
||||
group_by_order: str,
|
||||
group_by_width: int,
|
||||
group_by_type: str,
|
||||
) -> ViewGroupBy:
|
||||
"""
|
||||
Creates a new view group_by.
|
||||
|
@ -1956,10 +2005,11 @@ class CreateViewGroupByActionType(UndoableActionType):
|
|||
:param group_by_order: The desired order, can either be ascending (A to Z) or
|
||||
descending (Z to A).
|
||||
:param group_by_width: The pixel width of the group by.
|
||||
:param group_by_type: @TODO docs
|
||||
"""
|
||||
|
||||
view_group_by = ViewHandler().create_group_by(
|
||||
user, view, field, group_by_order, group_by_width
|
||||
user, view, field, group_by_order, group_by_width, group_by_type
|
||||
)
|
||||
|
||||
params = cls.Params(
|
||||
|
@ -1974,6 +2024,7 @@ class CreateViewGroupByActionType(UndoableActionType):
|
|||
view_group_by.id,
|
||||
group_by_order,
|
||||
group_by_width,
|
||||
group_by_type,
|
||||
)
|
||||
workspace = view.table.database.workspace
|
||||
cls.register_action(user, params, cls.scope(view.id), workspace)
|
||||
|
@ -2001,6 +2052,7 @@ class CreateViewGroupByActionType(UndoableActionType):
|
|||
field,
|
||||
params.group_by_order,
|
||||
params.group_by_width,
|
||||
params.group_by_type,
|
||||
params.view_group_by_id,
|
||||
)
|
||||
|
||||
|
@ -2020,10 +2072,12 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
"view_group_by_id",
|
||||
"group_by_order",
|
||||
"group_by_width",
|
||||
"group_by_type",
|
||||
"original_field_id",
|
||||
"original_field_name",
|
||||
"original_group_by_order",
|
||||
"original_group_by_width",
|
||||
"original_group_by_type",
|
||||
]
|
||||
|
||||
@dataclasses.dataclass
|
||||
|
@ -2039,10 +2093,12 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
view_group_by_id: int
|
||||
group_by_order: str
|
||||
group_by_width: int
|
||||
group_by_type: str
|
||||
original_field_id: int
|
||||
original_field_name: str
|
||||
original_group_by_order: str
|
||||
original_group_by_width: int
|
||||
original_group_by_type: str
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
|
@ -2052,6 +2108,7 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
field: Optional[Field] = None,
|
||||
order: Optional[str] = None,
|
||||
width: Optional[int] = None,
|
||||
sort_type: Optional[str] = None,
|
||||
) -> ViewGroupBy:
|
||||
"""
|
||||
Updates the values of an existing view group_by.
|
||||
|
@ -2064,6 +2121,8 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
:param field: The field that must be grouped on.
|
||||
:param order: Indicates the group by order direction.
|
||||
:param width: The visual pixel width of the group by.
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
"""
|
||||
|
||||
original_field_id = view_group_by.field.id
|
||||
|
@ -2072,10 +2131,16 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
view_name = view_group_by.view.name
|
||||
original_group_by_order = view_group_by.order
|
||||
original_group_by_width = view_group_by.width
|
||||
original_group_by_type = view_group_by.type
|
||||
|
||||
handler = ViewHandler()
|
||||
updated_view_group_by = handler.update_group_by(
|
||||
user, view_group_by, field, order, width
|
||||
user,
|
||||
view_group_by,
|
||||
field,
|
||||
order,
|
||||
width,
|
||||
sort_type,
|
||||
)
|
||||
|
||||
cls.register_action(
|
||||
|
@ -2092,10 +2157,12 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
updated_view_group_by.id,
|
||||
updated_view_group_by.order,
|
||||
updated_view_group_by.width,
|
||||
updated_view_group_by.type,
|
||||
original_field_id,
|
||||
original_field_name,
|
||||
original_group_by_order,
|
||||
original_group_by_width,
|
||||
original_group_by_type,
|
||||
),
|
||||
scope=cls.scope(view_group_by.view.id),
|
||||
workspace=view_group_by.view.table.database.workspace,
|
||||
|
@ -2120,6 +2187,7 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
field,
|
||||
params.original_group_by_order,
|
||||
params.original_group_by_width,
|
||||
params.original_group_by_type,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -2130,7 +2198,12 @@ class UpdateViewGroupByActionType(UndoableActionType):
|
|||
view_group_by = view_handler.get_group_by(user, params.view_group_by_id)
|
||||
|
||||
view_handler.update_group_by(
|
||||
user, view_group_by, field, params.group_by_order, params.group_by_width
|
||||
user,
|
||||
view_group_by,
|
||||
field,
|
||||
params.group_by_order,
|
||||
params.group_by_width,
|
||||
params.group_by_type,
|
||||
)
|
||||
|
||||
|
||||
|
@ -2164,6 +2237,7 @@ class DeleteViewGroupByActionType(UndoableActionType):
|
|||
view_group_by_id: int
|
||||
group_by_order: str
|
||||
group_by_width: int
|
||||
group_by_type: str
|
||||
|
||||
@classmethod
|
||||
def do(cls, user: AbstractUser, view_group_by: ViewGroupBy):
|
||||
|
@ -2185,6 +2259,7 @@ class DeleteViewGroupByActionType(UndoableActionType):
|
|||
field_name = view_group_by.field.name
|
||||
group_by_order = view_group_by.order
|
||||
group_by_width = view_group_by.width
|
||||
group_by_type = view_group_by.type
|
||||
|
||||
ViewHandler().delete_group_by(user, view_group_by)
|
||||
|
||||
|
@ -2200,6 +2275,7 @@ class DeleteViewGroupByActionType(UndoableActionType):
|
|||
view_group_by_id,
|
||||
group_by_order,
|
||||
group_by_width,
|
||||
group_by_type,
|
||||
)
|
||||
workspace = view_group_by.view.table.database.workspace
|
||||
cls.register_action(user, params, cls.scope(view_group_by.view.id), workspace)
|
||||
|
@ -2220,6 +2296,7 @@ class DeleteViewGroupByActionType(UndoableActionType):
|
|||
field,
|
||||
params.group_by_order,
|
||||
params.group_by_width,
|
||||
params.group_by_type,
|
||||
params.view_group_by_id,
|
||||
)
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import dataclasses
|
||||
import itertools
|
||||
import re
|
||||
import traceback
|
||||
from collections import defaultdict, namedtuple
|
||||
|
@ -125,13 +126,16 @@ from .exceptions import (
|
|||
ViewSortNotSupported,
|
||||
)
|
||||
from .models import (
|
||||
DEFAULT_SORT_TYPE_KEY,
|
||||
OWNERSHIP_TYPE_COLLABORATIVE,
|
||||
View,
|
||||
ViewDecoration,
|
||||
ViewFilter,
|
||||
ViewFilterGroup,
|
||||
ViewGroupBy,
|
||||
ViewRows,
|
||||
ViewSort,
|
||||
ViewSubscription,
|
||||
)
|
||||
from .registries import (
|
||||
decorator_type_registry,
|
||||
|
@ -142,6 +146,8 @@ from .registries import (
|
|||
)
|
||||
from .signals import (
|
||||
form_submitted,
|
||||
rows_entered_view,
|
||||
rows_exited_view,
|
||||
view_created,
|
||||
view_decoration_created,
|
||||
view_decoration_deleted,
|
||||
|
@ -216,6 +222,41 @@ class ViewIndexingHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
return f"i{table_id}:"
|
||||
|
||||
@classmethod
|
||||
def before_field_type_change(cls, field: Field, model=None):
|
||||
"""
|
||||
Remove all the indexes for the views that have a sort on the field
|
||||
that is being changed.
|
||||
|
||||
:param field: The field that is being changed.
|
||||
:param model: The model to use for the table. If not provided it will be
|
||||
taken from the field.
|
||||
"""
|
||||
|
||||
views = View.objects.filter(
|
||||
id__in=ViewSort.objects.filter(field=field).values("view_id"),
|
||||
db_index_name__isnull=False,
|
||||
)
|
||||
if not views:
|
||||
return
|
||||
|
||||
if model is None:
|
||||
model = field.table.get_model()
|
||||
|
||||
dropped_indexes = set()
|
||||
for view in views:
|
||||
if view.db_index_name in dropped_indexes:
|
||||
continue
|
||||
|
||||
cls.drop_index(
|
||||
view=view,
|
||||
db_index=django_models.Index("id", name=view.db_index_name),
|
||||
model=model,
|
||||
)
|
||||
dropped_indexes.add(view.db_index_name)
|
||||
|
||||
View.objects.filter(id__in=[v.id for v in views]).update(db_index_name=None)
|
||||
|
||||
@classmethod
|
||||
def _get_index_hash(
|
||||
cls, field_order_bys: List[OptionallyAnnotatedOrderBy]
|
||||
|
@ -312,6 +353,7 @@ class ViewIndexingHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field_object["field"],
|
||||
field_object["name"],
|
||||
view_sort_or_group_by.order,
|
||||
view_sort_or_group_by.type,
|
||||
table_model=model,
|
||||
)
|
||||
|
||||
|
@ -440,6 +482,12 @@ class ViewIndexingHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
):
|
||||
return current_index_name
|
||||
|
||||
cls.drop_index(view, db_index, model)
|
||||
|
||||
return current_index_name
|
||||
|
||||
@classmethod
|
||||
def drop_index(cls, view, db_index, model=None):
|
||||
if model is None:
|
||||
model = view.table.get_model()
|
||||
|
||||
|
@ -452,8 +500,6 @@ class ViewIndexingHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
view_table_id=view.table_id,
|
||||
)
|
||||
|
||||
return current_index_name
|
||||
|
||||
@classmethod
|
||||
def update_index_by_view_id(cls, view_id: int, nowait=True):
|
||||
"""
|
||||
|
@ -591,6 +637,16 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
)
|
||||
return views
|
||||
|
||||
def before_field_type_change(self, field: Field):
|
||||
"""
|
||||
Allow trigger custom logic before field is changed.
|
||||
By default it calls ViewIndexingHandler.before_field_type_change.
|
||||
|
||||
:param field: The field that is being changed.
|
||||
"""
|
||||
|
||||
ViewIndexingHandler.before_field_type_change(field)
|
||||
|
||||
def list_workspace_views(
|
||||
self,
|
||||
user: AbstractUser,
|
||||
|
@ -874,7 +930,9 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
view_type = view_type_registry.get_by_model(original_view)
|
||||
|
||||
cache = {}
|
||||
cache = {
|
||||
"workspace_id": workspace.id,
|
||||
}
|
||||
|
||||
# Use export/import to duplicate the view easily
|
||||
serialized = view_type.export_serialized(original_view, cache)
|
||||
|
@ -896,6 +954,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
# the mapping remain the same. They haven't change because we're only
|
||||
# reimporting the view and not the table, fields, etc.
|
||||
id_mapping = {
|
||||
"workspace_id": workspace.id,
|
||||
"database_fields": MirrorDict(),
|
||||
"database_field_select_options": MirrorDict(),
|
||||
}
|
||||
|
@ -1317,13 +1376,17 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
# `after_fields_changed_or_deleted` can be called in bulk and make it query
|
||||
# efficient.
|
||||
changed_fields = set()
|
||||
all_fields_mapping = {field.id: field for field in fields}
|
||||
|
||||
# Fetch the sorts of all updated fields to check if the sort, including the
|
||||
# type, is still compatible.
|
||||
sorts_to_check = ViewSort.objects.filter(field_id__in=all_fields_mapping.keys())
|
||||
fields_to_delete_sortings = [
|
||||
f
|
||||
for f in fields
|
||||
all_fields_mapping[sort.field_id]
|
||||
for sort in sorts_to_check
|
||||
if not field_type_registry.get_by_model(
|
||||
f.specific_class
|
||||
).check_can_order_by(f)
|
||||
all_fields_mapping[sort.field_id].specific_class
|
||||
).check_can_order_by(all_fields_mapping[sort.field_id], sort.type)
|
||||
]
|
||||
|
||||
# If it's a primary field, we also need to remove any sortings on the
|
||||
|
@ -1344,12 +1407,17 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
if deleted_count > 0:
|
||||
changed_fields.update(fields_to_delete_sortings)
|
||||
|
||||
# Fetch the group bys of all updated fields to check if the group by, including
|
||||
# the type, is still compatible.
|
||||
groups_to_check = ViewGroupBy.objects.filter(
|
||||
field_id__in=all_fields_mapping.keys()
|
||||
)
|
||||
fields_to_delete_groupings = [
|
||||
f
|
||||
for f in fields
|
||||
all_fields_mapping[sort.field_id]
|
||||
for sort in groups_to_check
|
||||
if not field_type_registry.get_by_model(
|
||||
f.specific_class
|
||||
).check_can_group_by(f)
|
||||
all_fields_mapping[sort.field_id].specific_class
|
||||
).check_can_group_by(all_fields_mapping[sort.field_id], sort.type)
|
||||
]
|
||||
if fields_to_delete_groupings:
|
||||
deleted_count, _ = ViewGroupBy.objects.filter(
|
||||
|
@ -1422,7 +1490,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field_type = field_type_registry.get_by_model(field.specific_class)
|
||||
# Check whether the updated field is still compatible with the group by.
|
||||
# If not, it must be deleted.
|
||||
if not field_type.check_can_group_by(field):
|
||||
if not field_type.check_can_group_by(field, DEFAULT_SORT_TYPE_KEY):
|
||||
ViewGroupBy.objects.filter(field=field).delete()
|
||||
|
||||
def get_filter_builder(
|
||||
|
@ -1877,6 +1945,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field,
|
||||
field_name,
|
||||
view_sort_or_group_by.order,
|
||||
view_sort_or_group_by.type,
|
||||
table_model=queryset.model,
|
||||
)
|
||||
field_annotation = field_annotated_order_by.annotation
|
||||
|
@ -2008,6 +2077,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field: Field,
|
||||
order: str,
|
||||
primary_key: Optional[int] = None,
|
||||
sort_type: Optional[str] = None,
|
||||
) -> ViewSort:
|
||||
"""
|
||||
Creates a new view sort.
|
||||
|
@ -2018,6 +2088,8 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
:param order: The desired order, can either be ascending (A to Z) or
|
||||
descending (Z to A).
|
||||
:param primary_key: An optional primary key to give to the new view sort.
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
:raises ViewSortNotSupported: When the provided view does not support sorting.
|
||||
:raises FieldNotInTable: When the provided field does not belong to the
|
||||
provided view's table.
|
||||
|
@ -2034,6 +2106,9 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
user, CreateViewSortOperationType.type, workspace=workspace, context=view
|
||||
)
|
||||
|
||||
if not sort_type:
|
||||
sort_type = DEFAULT_SORT_TYPE_KEY
|
||||
|
||||
# Check if view supports sorting.
|
||||
view_type = view_type_registry.get_by_model(view.specific_class)
|
||||
if not view_type.can_sort:
|
||||
|
@ -2043,9 +2118,9 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
# Check if the field supports sorting.
|
||||
field_type = field_type_registry.get_by_model(field.specific_class)
|
||||
if not field_type.check_can_order_by(field):
|
||||
if not field_type.check_can_order_by(field, sort_type):
|
||||
raise ViewSortFieldNotSupported(
|
||||
f"The field {field.pk} does not support sorting."
|
||||
f"The field {field.pk} does not support sorting with type {sort_type}."
|
||||
)
|
||||
|
||||
# Check if field belongs to the grid views table
|
||||
|
@ -2061,7 +2136,11 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
)
|
||||
|
||||
view_sort = ViewSort.objects.create(
|
||||
pk=primary_key, view=view, field=field, order=order
|
||||
pk=primary_key,
|
||||
view=view,
|
||||
field=field,
|
||||
order=order,
|
||||
type=sort_type,
|
||||
)
|
||||
|
||||
view_sort_created.send(self, view_sort=view_sort, user=user)
|
||||
|
@ -2074,6 +2153,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
view_sort: ViewSort,
|
||||
field: Optional[Field] = None,
|
||||
order: Optional[str] = None,
|
||||
sort_type: Optional[str] = None,
|
||||
) -> ViewSort:
|
||||
"""
|
||||
Updates the values of an existing view sort.
|
||||
|
@ -2095,6 +2175,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
workspace = view_sort.view.table.database.workspace
|
||||
field = field if field is not None else view_sort.field
|
||||
order = order if order is not None else view_sort.order
|
||||
sort_type = sort_type if sort_type is not None else view_sort.type
|
||||
|
||||
CoreHandler().check_permissions(
|
||||
user, ReadFieldOperationType.type, workspace=workspace, context=field
|
||||
|
@ -2119,7 +2200,12 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
# If the field has changed we need to check if the new field type supports
|
||||
# sorting.
|
||||
field_type = field_type_registry.get_by_model(field.specific_class)
|
||||
if field.id != view_sort.field_id and not field_type.check_can_order_by(field):
|
||||
if (
|
||||
field.id != view_sort.field_id or sort_type != view_sort.type
|
||||
) and not field_type.check_can_order_by(
|
||||
field,
|
||||
sort_type,
|
||||
):
|
||||
raise ViewSortFieldNotSupported(
|
||||
f"The field {field.pk} does not support sorting."
|
||||
)
|
||||
|
@ -2136,6 +2222,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
view_sort.field = field
|
||||
view_sort.order = order
|
||||
view_sort.type = sort_type
|
||||
view_sort.save()
|
||||
|
||||
view_sort_updated.send(self, view_sort=view_sort, user=user)
|
||||
|
@ -2238,6 +2325,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field: Field,
|
||||
order: str,
|
||||
width: int,
|
||||
sort_type: str = None,
|
||||
primary_key: Optional[int] = None,
|
||||
) -> ViewGroupBy:
|
||||
"""
|
||||
|
@ -2248,6 +2336,9 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
:param field: The field that needs to be grouped.
|
||||
:param order: The desired order, can either be ascending (A to Z) or
|
||||
descending (Z to A).
|
||||
:param width: The visual width of the group column.
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
:param primary_key: An optional primary key to give to the new view group_by.
|
||||
:raises ViewGroupByNotSupported: When the provided view does not support
|
||||
grouping.
|
||||
|
@ -2264,6 +2355,9 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
user, CreateViewGroupByOperationType.type, workspace=workspace, context=view
|
||||
)
|
||||
|
||||
if not sort_type:
|
||||
sort_type = DEFAULT_SORT_TYPE_KEY
|
||||
|
||||
# Check if view supports grouping.
|
||||
view_type = view_type_registry.get_by_model(view.specific_class)
|
||||
if not view_type.can_group_by:
|
||||
|
@ -2273,9 +2367,9 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
# Check if the field supports grouping.
|
||||
field_type = field_type_registry.get_by_model(field.specific_class)
|
||||
if not field_type.check_can_group_by(field):
|
||||
if not field_type.check_can_group_by(field, sort_type):
|
||||
raise ViewGroupByFieldNotSupported(
|
||||
f"The field {field.pk} does not support grouping."
|
||||
f"The field {field.pk} does not support grouping with type {sort_type}."
|
||||
)
|
||||
|
||||
# Check if field belongs to the grid views table
|
||||
|
@ -2291,7 +2385,12 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
)
|
||||
|
||||
view_group_by = ViewGroupBy.objects.create(
|
||||
pk=primary_key, view=view, field=field, order=order, width=width
|
||||
pk=primary_key,
|
||||
view=view,
|
||||
field=field,
|
||||
order=order,
|
||||
width=width,
|
||||
type=sort_type,
|
||||
)
|
||||
|
||||
view_group_by_created.send(self, view_group_by=view_group_by, user=user)
|
||||
|
@ -2305,6 +2404,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field: Optional[Field] = None,
|
||||
order: Optional[str] = None,
|
||||
width: Optional[int] = None,
|
||||
sort_type: Optional[str] = None,
|
||||
) -> ViewGroupBy:
|
||||
"""
|
||||
Updates the values of an existing view group_by.
|
||||
|
@ -2314,6 +2414,8 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
:param field: The field that must be grouped on.
|
||||
:param order: Indicates the group by order direction.
|
||||
:param width: The visual width of the group by.
|
||||
:param sort_type: The sort type that must be used, `default` is set as default
|
||||
when the sort is created.
|
||||
:raises ViewGroupByDoesNotExist: When the view used by the filter is trashed.
|
||||
:raises ViewGroupByFieldNotSupported: When the field does not support grouping.
|
||||
:raises FieldNotInTable: When the provided field does not belong to the
|
||||
|
@ -2330,6 +2432,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field = field if field is not None else view_group_by.field
|
||||
order = order if order is not None else view_group_by.order
|
||||
width = width if width is not None else view_group_by.width
|
||||
sort_type = sort_type if sort_type is not None else view_group_by.type
|
||||
|
||||
CoreHandler().check_permissions(
|
||||
user, ReadFieldOperationType.type, workspace=workspace, context=field
|
||||
|
@ -2354,8 +2457,11 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
# If the field has changed we need to check if the new field type supports
|
||||
# grouping.
|
||||
field_type = field_type_registry.get_by_model(field.specific_class)
|
||||
if field.id != view_group_by.field_id and not field_type.check_can_order_by(
|
||||
field
|
||||
if (
|
||||
field.id != view_group_by.field_id or sort_type != view_group_by.type
|
||||
) and not field_type.check_can_order_by(
|
||||
field,
|
||||
sort_type,
|
||||
):
|
||||
raise ViewGroupByFieldNotSupported(
|
||||
f"The field {field.pk} does not support grouping."
|
||||
|
@ -2368,12 +2474,14 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
and view_group_by.view.viewgroupby_set.filter(field_id=field.pk).exists()
|
||||
):
|
||||
raise ViewGroupByFieldAlreadyExist(
|
||||
f"A group by for the field {field.pk} already exists."
|
||||
f"A group by for the field {field.pk} already exists with type "
|
||||
f"{sort_type}."
|
||||
)
|
||||
|
||||
view_group_by.field = field
|
||||
view_group_by.order = order
|
||||
view_group_by.width = width
|
||||
view_group_by.type = sort_type
|
||||
view_group_by.save()
|
||||
|
||||
view_group_by_updated.send(self, view_group_by=view_group_by, user=user)
|
||||
|
@ -3206,7 +3314,12 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
return view
|
||||
|
||||
def submit_form_view(
|
||||
self, user, form, values, model=None, enabled_field_options=None
|
||||
self,
|
||||
user,
|
||||
form,
|
||||
values,
|
||||
model: GeneratedTableModel | None = None,
|
||||
enabled_field_options=None,
|
||||
):
|
||||
"""
|
||||
Handles when a form is submitted. It will validate the data by checking if
|
||||
|
@ -3522,7 +3635,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field_name = field.db_column
|
||||
field_type = field_type_registry.get_by_model(field.specific_class)
|
||||
|
||||
if not field_type.check_can_group_by(field):
|
||||
if not field_type.check_can_group_by(field, DEFAULT_SORT_TYPE_KEY):
|
||||
raise ValueError(f"Can't group by {field_name}.")
|
||||
|
||||
value = getattr(row, field_name)
|
||||
|
@ -3753,3 +3866,127 @@ class CachingPublicViewRowChecker:
|
|||
# filters and so the result of the first check will be still
|
||||
# valid for any subsequent checks.
|
||||
return True
|
||||
|
||||
|
||||
class ViewSubscriptionHandler:
|
||||
@classmethod
|
||||
def subscribe_to_views(cls, subscriber: django_models.Model, views: list[View]):
|
||||
"""
|
||||
Subscribes a subscriber to the provided views. If the ViewRows already exist, it
|
||||
ensure to notify any changes to the subscriber first, so that the subscriber can
|
||||
be notified only for the changes that happened after the subscription.
|
||||
|
||||
:param subscriber: The subscriber to subscribe to the views.
|
||||
:param views: The views to subscribe to.
|
||||
"""
|
||||
|
||||
cls.notify_table_views_updates(views)
|
||||
ViewRows.create_missing_for_views(views)
|
||||
|
||||
new_subscriptions = []
|
||||
for view in views:
|
||||
new_subscriptions.append(ViewSubscription(subscriber=subscriber, view=view))
|
||||
ViewSubscription.objects.bulk_create(new_subscriptions, ignore_conflicts=True)
|
||||
|
||||
@classmethod
|
||||
def unsubscribe_from_views(
|
||||
cls, subscriber: django_models.Model, views: list[View] | None = None
|
||||
):
|
||||
"""
|
||||
Unsubscribes a subscriber from the provided views. If the views are not
|
||||
provided, it unsubscribes the subscriber from all views. Make sure to use a
|
||||
table-specific model for the subscriber to avoid unsubscribing from views that
|
||||
are not related to the subscriber.
|
||||
|
||||
:param subscriber: The subscriber to unsubscribe from the views.
|
||||
:param views: The views to unsubscribe from. If not provided, the subscriber
|
||||
will be unsubscribed
|
||||
"""
|
||||
|
||||
q = Q(
|
||||
subscriber_content_type=ContentType.objects.get_for_model(subscriber),
|
||||
subscriber_id=subscriber.pk,
|
||||
)
|
||||
if views is not None:
|
||||
q &= Q(view__in=views)
|
||||
|
||||
ViewSubscription.objects.filter(q).delete()
|
||||
|
||||
@classmethod
|
||||
def check_views_with_time_sensitive_filters(cls):
|
||||
"""
|
||||
Checks for views that have time-sensitive filters. If a view has a
|
||||
time-sensitive filter, calling this method periodically ensure proper signals
|
||||
are emitted to notify subscribers that the view results have changed.
|
||||
"""
|
||||
|
||||
views = View.objects.filter(
|
||||
id__in=ViewFilter.objects.filter(
|
||||
type__in=view_filter_type_registry.get_time_sensitive_filter_types(),
|
||||
view__in=ViewSubscription.objects.values("view"),
|
||||
).values("view_id")
|
||||
).order_by("table", "id")
|
||||
for _, view_group in itertools.groupby(views, key=lambda f: f.table):
|
||||
view_ids = [v.id for v in view_group]
|
||||
if view_ids:
|
||||
cls._notify_table_views_updates(view_ids)
|
||||
|
||||
@classmethod
|
||||
def notify_table_views_updates(
|
||||
cls, views: list[View], model: GeneratedTableModel | None = None
|
||||
):
|
||||
"""
|
||||
Verify if the views have subscribers and notify them of any changes in the view
|
||||
results.
|
||||
|
||||
:param views: The views to notify subscribers of.
|
||||
:param model: The table model to use for the views. If not provided, the model
|
||||
will be generated automatically.
|
||||
"""
|
||||
|
||||
view_ids_with_subscribers = ViewSubscription.objects.filter(
|
||||
view__in=views
|
||||
).values_list("view_id", flat=True)
|
||||
if view_ids_with_subscribers:
|
||||
cls._notify_table_views_updates(view_ids_with_subscribers, model)
|
||||
|
||||
@classmethod
|
||||
def _notify_table_views_updates(
|
||||
cls, view_ids: list[int], model: GeneratedTableModel | None = None
|
||||
):
|
||||
"""
|
||||
Notify subscribers of any changes in the view results, emitting the appropriate
|
||||
signals and updating the ViewRows state.
|
||||
|
||||
:param view_ids: The view ids to notify subscribers of.
|
||||
:param model: The table model to use for the views. If not provided, the model
|
||||
will be generated automatically
|
||||
"""
|
||||
|
||||
view_rows = list(
|
||||
ViewRows.objects.select_related("view__table")
|
||||
.filter(view_id__in=view_ids)
|
||||
.select_for_update(of=("self",))
|
||||
.order_by("view_id")
|
||||
)
|
||||
|
||||
if model is None:
|
||||
model = view_rows[0].view.table.get_model()
|
||||
|
||||
for view_state in view_rows:
|
||||
view = view_state.view
|
||||
new_row_ids, row_ids_entered, row_ids_exited = view_state.get_diff(model)
|
||||
changed = False
|
||||
if row_ids_entered:
|
||||
rows_entered_view.send(
|
||||
sender=cls, view=view, row_ids=row_ids_entered, model=model
|
||||
)
|
||||
changed = True
|
||||
if row_ids_exited:
|
||||
rows_exited_view.send(
|
||||
sender=cls, view=view, row_ids=row_ids_exited, model=model
|
||||
)
|
||||
changed = True
|
||||
if changed:
|
||||
view_state.row_ids = new_row_ids
|
||||
view_state.save()
|
||||
|
|
|
@ -4,7 +4,9 @@ from typing import Iterable, Optional, Union
|
|||
|
||||
from django.contrib.auth.hashers import check_password, make_password
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.db.models.query import Prefetch
|
||||
|
@ -53,6 +55,9 @@ VIEW_OWNERSHIP_TYPES = [OWNERSHIP_TYPE_COLLABORATIVE]
|
|||
# Must be the same as `modules/database/constants.js`.
|
||||
DEFAULT_FORM_VIEW_FIELD_COMPONENT_KEY = "default"
|
||||
|
||||
# Must be the same as `modules/database/constants.js`.
|
||||
DEFAULT_SORT_TYPE_KEY = "default"
|
||||
|
||||
|
||||
def get_default_view_content_type():
|
||||
return ContentType.objects.get_for_model(View)
|
||||
|
@ -501,6 +506,13 @@ class ViewSort(HierarchicalModelMixin, models.Model):
|
|||
"and DESC (Descending) is from Z to A.",
|
||||
default=SORT_ORDER_ASC,
|
||||
)
|
||||
type = models.CharField(
|
||||
max_length=32,
|
||||
default=DEFAULT_SORT_TYPE_KEY,
|
||||
db_default=DEFAULT_SORT_TYPE_KEY,
|
||||
help_text=f"Indicates the sort type. Will automatically fall back to `"
|
||||
f"{DEFAULT_SORT_TYPE_KEY}` if incompatible with field type.",
|
||||
)
|
||||
|
||||
def get_parent(self):
|
||||
return self.view
|
||||
|
@ -536,6 +548,13 @@ class ViewGroupBy(HierarchicalModelMixin, models.Model):
|
|||
"and DESC (Descending) is from Z to A.",
|
||||
default=SORT_ORDER_ASC,
|
||||
)
|
||||
type = models.CharField(
|
||||
max_length=32,
|
||||
default=DEFAULT_SORT_TYPE_KEY,
|
||||
db_default=DEFAULT_SORT_TYPE_KEY,
|
||||
help_text=f"Indicates the sort type. Will automatically fall back to `"
|
||||
f"{DEFAULT_SORT_TYPE_KEY}` if incompatible with field type.",
|
||||
)
|
||||
width = models.PositiveIntegerField(
|
||||
default=200,
|
||||
help_text="The pixel width of the group by in the related view.",
|
||||
|
@ -936,3 +955,68 @@ class FormViewFieldOptionsCondition(HierarchicalModelMixin, models.Model):
|
|||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
|
||||
class ViewRows(CreatedAndUpdatedOnMixin, models.Model):
|
||||
view = models.OneToOneField(View, on_delete=models.CASCADE, related_name="rows")
|
||||
row_ids = ArrayField(
|
||||
models.PositiveIntegerField(),
|
||||
default=list,
|
||||
help_text="The rows that are shown in the view. This list can be used by webhooks "
|
||||
"to determine which rows have been changed since the last check.",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_missing_for_views(cls, views: list[View], model=None):
|
||||
"""
|
||||
Creates ViewRows objects for the given views if they don't already exist.
|
||||
|
||||
:param views: The views for which to create ViewRows objects.
|
||||
"""
|
||||
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
|
||||
existing_view_ids = ViewRows.objects.filter(view__in=views).values_list(
|
||||
"view_id", flat=True
|
||||
)
|
||||
view_map = {view.id: view for view in views}
|
||||
missing_view_ids = list(set(view_map.keys()) - set(existing_view_ids))
|
||||
|
||||
view_rows = []
|
||||
for view_id in missing_view_ids:
|
||||
view = view_map[view_id]
|
||||
row_ids = (
|
||||
ViewHandler()
|
||||
.get_queryset(view, model=model, apply_sorts=False)
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
view_rows.append(ViewRows(view=view, row_ids=list(row_ids)))
|
||||
|
||||
return ViewRows.objects.bulk_create(view_rows, ignore_conflicts=True)
|
||||
|
||||
def get_diff(self, model=None):
|
||||
"""
|
||||
Executes the view query and returns the current row IDs in the view,
|
||||
along with the differences between the current state and the last saved state.
|
||||
"""
|
||||
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
|
||||
rows = ViewHandler().get_queryset(self.view, model=model, apply_sorts=False)
|
||||
previous_row_ids = set(self.row_ids)
|
||||
new_row_ids = set(rows.order_by().values_list("id", flat=True))
|
||||
|
||||
row_ids_entered = new_row_ids - previous_row_ids
|
||||
row_ids_exited = previous_row_ids - new_row_ids
|
||||
|
||||
return list(new_row_ids), list(row_ids_entered), list(row_ids_exited)
|
||||
|
||||
|
||||
class ViewSubscription(models.Model):
|
||||
view = models.ForeignKey(View, on_delete=models.CASCADE, related_name="subscribers")
|
||||
subscriber_content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
subscriber_id = models.PositiveIntegerField()
|
||||
subscriber = GenericForeignKey("subscriber_content_type", "subscriber_id")
|
||||
|
||||
class Meta:
|
||||
unique_together = ("view", "subscriber_content_type", "subscriber_id")
|
||||
|
|
98
backend/src/baserow/contrib/database/views/receivers.py
Normal file
98
backend/src/baserow/contrib/database/views/receivers.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
from django.dispatch import receiver
|
||||
|
||||
from baserow.contrib.database.fields.signals import (
|
||||
field_deleted,
|
||||
field_restored,
|
||||
field_updated,
|
||||
)
|
||||
from baserow.contrib.database.rows.signals import (
|
||||
rows_created,
|
||||
rows_deleted,
|
||||
rows_updated,
|
||||
)
|
||||
from baserow.contrib.database.table.models import GeneratedTableModel, Table
|
||||
from baserow.contrib.database.views.models import View
|
||||
from baserow.contrib.database.views.signals import (
|
||||
view_filter_created,
|
||||
view_filter_deleted,
|
||||
view_filter_group_created,
|
||||
view_filter_group_deleted,
|
||||
view_filter_group_updated,
|
||||
view_filter_updated,
|
||||
view_updated,
|
||||
)
|
||||
|
||||
from .handler import ViewSubscriptionHandler
|
||||
|
||||
|
||||
def _notify_table_data_updated(table: Table, model: GeneratedTableModel | None = None):
|
||||
"""
|
||||
Notifies the table views that the table data has been updated. This will result in
|
||||
the table views to be updated and the subscribers to be notified.
|
||||
|
||||
:param table: The table for which the data has been updated.
|
||||
:param model: The model that was updated if available.
|
||||
"""
|
||||
|
||||
ViewSubscriptionHandler.notify_table_views_updates(
|
||||
table.view_set.all(), model=model
|
||||
)
|
||||
|
||||
|
||||
def _notify_view_results_updated(view: View):
|
||||
"""
|
||||
Notify the table view that the results of the view have been updated. This will
|
||||
result in the subscribers to be notified.
|
||||
|
||||
:param view: The view for which the results have been updated.
|
||||
"""
|
||||
|
||||
ViewSubscriptionHandler.notify_table_views_updates([view])
|
||||
|
||||
|
||||
@receiver([rows_updated, rows_created, rows_deleted])
|
||||
def notify_rows_signals(sender, rows, user, table, model, dependant_fields, **kwargs):
|
||||
_notify_table_data_updated(table, model)
|
||||
|
||||
updated_tables = set()
|
||||
for field in dependant_fields:
|
||||
updated_tables.add(field.table)
|
||||
for updated_table in updated_tables:
|
||||
_notify_table_data_updated(updated_table)
|
||||
|
||||
|
||||
@receiver(view_updated)
|
||||
def notify_view_updated(sender, view, user, old_view, **kwargs):
|
||||
_notify_view_results_updated(view)
|
||||
|
||||
|
||||
@receiver([view_filter_created, view_filter_updated, view_filter_deleted])
|
||||
def notify_view_filter_created_or_updated(sender, view_filter, user, **kwargs):
|
||||
_notify_view_results_updated(view_filter.view)
|
||||
|
||||
|
||||
@receiver(
|
||||
[view_filter_group_created, view_filter_group_updated, view_filter_group_deleted]
|
||||
)
|
||||
def notify_view_filter_group_created_or_updated(
|
||||
sender, view_filter_group, user, **kwargs
|
||||
):
|
||||
_notify_view_results_updated(view_filter_group.view)
|
||||
|
||||
|
||||
def _notify_tables_of_fields_updated_or_deleted(field, related_fields, user, **kwargs):
|
||||
tables_to_notify = set([field.table])
|
||||
for updated_field in related_fields:
|
||||
tables_to_notify.add(updated_field.table)
|
||||
for table in tables_to_notify:
|
||||
_notify_table_data_updated(table)
|
||||
|
||||
|
||||
@receiver([field_restored, field_updated])
|
||||
def notify_field_updated(sender, field, related_fields, user, **kwargs):
|
||||
_notify_tables_of_fields_updated_or_deleted(field, related_fields, user, **kwargs)
|
||||
|
||||
|
||||
@receiver(field_deleted)
|
||||
def notify_field_deleted(sender, field_id, field, related_fields, user, **kwargs):
|
||||
_notify_tables_of_fields_updated_or_deleted(field, related_fields, user, **kwargs)
|
|
@ -1007,6 +1007,19 @@ class ViewFilterType(Instance):
|
|||
for t in self.compatible_field_types
|
||||
)
|
||||
|
||||
@property
|
||||
def time_sensitive(self) -> bool:
|
||||
"""
|
||||
Indicates if the filter results depend on the current time.
|
||||
For example, filters like 'date_is' with operators like `today` or `yesterday`
|
||||
will return different results as time passes, even if the underlying
|
||||
data hasn't changed.
|
||||
|
||||
:returns: True if the filter results change based on current time
|
||||
"""
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class ViewFilterTypeRegistry(Registry):
|
||||
"""
|
||||
|
@ -1020,6 +1033,21 @@ class ViewFilterTypeRegistry(Registry):
|
|||
does_not_exist_exception_class = ViewFilterTypeDoesNotExist
|
||||
already_registered_exception_class = ViewFilterTypeAlreadyRegistered
|
||||
|
||||
def get_time_sensitive_filter_types(self) -> List[str]:
|
||||
"""
|
||||
Returns a list of filter types that are time-dependent. For example, filters
|
||||
like `date_is` with operators like `today` or `yesterday` will return different
|
||||
results as time passes, even if the underlying data hasn't changed.
|
||||
|
||||
:returns: A list of filter types that are time-sensitive
|
||||
"""
|
||||
|
||||
return [
|
||||
filter_type.type
|
||||
for filter_type in self.registry.values()
|
||||
if filter_type.time_sensitive
|
||||
]
|
||||
|
||||
|
||||
class ViewAggregationType(Instance):
|
||||
"""
|
||||
|
|
|
@ -31,6 +31,9 @@ view_decoration_deleted = Signal()
|
|||
|
||||
view_field_options_updated = Signal()
|
||||
|
||||
rows_entered_view = Signal()
|
||||
rows_exited_view = Signal()
|
||||
|
||||
|
||||
@receiver(field_signals.field_deleted)
|
||||
def field_deleted(sender, field, **kwargs):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import traceback
|
||||
from datetime import timedelta
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
@ -9,7 +10,10 @@ from loguru import logger
|
|||
|
||||
from baserow.config.celery import app
|
||||
from baserow.contrib.database.views.exceptions import ViewDoesNotExist
|
||||
from baserow.contrib.database.views.handler import ViewIndexingHandler
|
||||
from baserow.contrib.database.views.handler import (
|
||||
ViewIndexingHandler,
|
||||
ViewSubscriptionHandler,
|
||||
)
|
||||
|
||||
AUTO_INDEX_CACHE_KEY = "auto_index_view_cache_key"
|
||||
|
||||
|
@ -105,3 +109,23 @@ def schedule_view_index_update(view_id: int):
|
|||
return
|
||||
|
||||
transaction.on_commit(lambda: _schedule_view_index_update(view_id))
|
||||
|
||||
|
||||
@app.task(queue="export")
|
||||
def periodic_check_for_views_with_time_sensitive_filters():
|
||||
"""
|
||||
Periodically checks for views that have time-sensitive filters. If a view has a
|
||||
time-sensitive filter, this task ensure proper signals are emitted to notify
|
||||
subscribers that the view results have changed.
|
||||
"""
|
||||
|
||||
with transaction.atomic():
|
||||
ViewSubscriptionHandler.check_views_with_time_sensitive_filters()
|
||||
|
||||
|
||||
@app.on_after_finalize.connect
|
||||
def setup_periodic_tasks(sender, **kwargs):
|
||||
sender.add_periodic_task(
|
||||
timedelta(minutes=30),
|
||||
periodic_check_for_views_with_time_sensitive_filters.s(),
|
||||
)
|
||||
|
|
|
@ -21,6 +21,7 @@ from baserow.contrib.database.fields.field_filters import (
|
|||
FilterBuilder,
|
||||
OptionallyAnnotatedQ,
|
||||
filename_contains_filter,
|
||||
map_ids_from_csv_string,
|
||||
parse_ids_from_csv_string,
|
||||
)
|
||||
from baserow.contrib.database.fields.field_types import (
|
||||
|
@ -436,6 +437,10 @@ class HigherThanOrEqualViewFilterType(NumericComparisonViewFilterType):
|
|||
|
||||
|
||||
class TimezoneAwareDateViewFilterType(ViewFilterType):
|
||||
@property
|
||||
def time_sensitive(self) -> bool:
|
||||
return True
|
||||
|
||||
compatible_field_types = [
|
||||
DateFieldType.type,
|
||||
LastModifiedFieldType.type,
|
||||
|
@ -1102,13 +1107,12 @@ class SingleSelectEqualViewFilterType(ViewFilterType):
|
|||
return filter_function(field_name, value, model_field, field)
|
||||
|
||||
def set_import_serialized_value(self, value, id_mapping):
|
||||
mapping = id_mapping["database_field_select_options"]
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
return map_ids_from_csv_string(value, mapping)[0]
|
||||
except IndexError:
|
||||
return ""
|
||||
|
||||
return str(id_mapping["database_field_select_options"].get(value, ""))
|
||||
|
||||
|
||||
class SingleSelectNotEqualViewFilterType(
|
||||
NotViewFilterTypeMixin, SingleSelectEqualViewFilterType
|
||||
|
@ -1159,13 +1163,8 @@ class SingleSelectIsAnyOfViewFilterType(ViewFilterType):
|
|||
return filter_function(field_name, option_ids, model_field, field)
|
||||
|
||||
def set_import_serialized_value(self, value: str | None, id_mapping: dict) -> str:
|
||||
# Parses the old option ids and remaps them to the new option ids.
|
||||
old_options_ids = parse_ids_from_csv_string(value or "")
|
||||
select_option_map = id_mapping["database_field_select_options"]
|
||||
new_values = []
|
||||
for old_id in old_options_ids:
|
||||
if new_id := select_option_map.get(old_id):
|
||||
new_values.append(str(new_id))
|
||||
new_values = map_ids_from_csv_string(value or "", select_option_map)
|
||||
return ",".join(new_values)
|
||||
|
||||
|
||||
|
@ -1414,15 +1413,8 @@ class MultipleSelectHasViewFilterType(ManyToManyHasBaseViewFilter):
|
|||
return filter_function(field_name, option_ids, model_field, field)
|
||||
|
||||
def set_import_serialized_value(self, value: str | None, id_mapping: dict) -> str:
|
||||
# Parses the old option ids and remaps them to the new option ids.
|
||||
old_options_ids = parse_ids_from_csv_string(value or "")
|
||||
select_option_map = id_mapping["database_field_select_options"]
|
||||
|
||||
new_values = []
|
||||
for old_id in old_options_ids:
|
||||
if new_id := select_option_map.get(old_id):
|
||||
new_values.append(str(new_id))
|
||||
|
||||
new_values = map_ids_from_csv_string(value or "", select_option_map)
|
||||
return ",".join(new_values)
|
||||
|
||||
|
||||
|
@ -1777,6 +1769,10 @@ DATE_FILTER_OPERATOR_DELTA_MAP = {
|
|||
class BaseDateMultiStepViewFilterType(ViewFilterType):
|
||||
incompatible_operators = []
|
||||
|
||||
@property
|
||||
def time_sensitive(self) -> bool:
|
||||
return True
|
||||
|
||||
def get_filter_date(
|
||||
self,
|
||||
operator: str,
|
||||
|
|
|
@ -18,5 +18,21 @@ class TableWebhookEventConfigFieldNotInTable(Exception):
|
|||
)
|
||||
|
||||
|
||||
class TableWebhookEventConfigViewNotInTable(Exception):
|
||||
"""Raised when trying to update the"""
|
||||
|
||||
def __init__(self, view_id=None, *args, **kwargs):
|
||||
self.view_id = view_id
|
||||
super().__init__(
|
||||
f"The view {view_id} does not belong to the table.",
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class SkipWebhookCall(Exception):
|
||||
"""Raised when the webhook call must be skipped"""
|
||||
|
||||
|
||||
class WebhookPayloadTooLarge(Exception):
|
||||
"""Raised when the webhook payload is too large and exceeds the batches limit."""
|
||||
|
|
|
@ -11,12 +11,14 @@ from requests import PreparedRequest, Response
|
|||
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.views.models import View
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.utils import extract_allowed, set_allowed_attrs
|
||||
|
||||
from .exceptions import (
|
||||
TableWebhookDoesNotExist,
|
||||
TableWebhookEventConfigFieldNotInTable,
|
||||
TableWebhookEventConfigViewNotInTable,
|
||||
TableWebhookMaxAllowedCountExceeded,
|
||||
)
|
||||
from .models import (
|
||||
|
@ -39,18 +41,21 @@ from .validators import get_webhook_request_function
|
|||
|
||||
|
||||
class WebhookHandler:
|
||||
def find_webhooks_to_call(self, table_id: int, event_type: str) -> QuerySet:
|
||||
def find_webhooks_to_call(
|
||||
self, table_id: int, event_type: str, additional_filters: Q | None = None
|
||||
) -> QuerySet[TableWebhook]:
|
||||
"""
|
||||
This function is responsible for finding all the webhooks related to a table
|
||||
that must be triggered on a specific event.
|
||||
"""
|
||||
|
||||
q = Q()
|
||||
q.add(Q(events__event_type__in=[event_type]), Q.OR)
|
||||
q = Q(events__event_type__in=[event_type])
|
||||
if additional_filters is not None:
|
||||
q &= additional_filters
|
||||
|
||||
event_type_object = webhook_event_type_registry.get(event_type)
|
||||
if event_type_object.should_trigger_when_all_event_types_selected:
|
||||
q.add(Q(include_all_events=True), Q.OR)
|
||||
q |= Q(include_all_events=True)
|
||||
|
||||
return (
|
||||
TableWebhook.objects.filter(
|
||||
|
@ -170,14 +175,25 @@ class WebhookHandler:
|
|||
# could have been deleted.
|
||||
if not event_object:
|
||||
continue
|
||||
event_fields = Field.objects.filter(
|
||||
table_id=webhook.table_id, id__in=event["fields"]
|
||||
)
|
||||
for field_id in event["fields"]:
|
||||
if not next((f for f in event_fields if field_id == f.id), None):
|
||||
|
||||
# Set fields
|
||||
field_ids = event.get("fields", [])
|
||||
fields = Field.objects.filter(table_id=webhook.table_id, id__in=field_ids)
|
||||
for field_id in field_ids:
|
||||
if not next((f for f in fields if field_id == f.id), None):
|
||||
raise TableWebhookEventConfigFieldNotInTable(field_id)
|
||||
|
||||
event_object.fields.set(event_fields)
|
||||
event_object.fields.set(fields)
|
||||
|
||||
# Set views
|
||||
view_ids = event.get("views", [])
|
||||
views = View.objects.filter(id__in=view_ids, table_id=webhook.table_id)
|
||||
for view_id in view_ids:
|
||||
if not next((v for v in views if view_id == v.id), None):
|
||||
raise TableWebhookEventConfigViewNotInTable(view_id)
|
||||
|
||||
event_object.views.set(views)
|
||||
event_object.get_type().after_update(event_object)
|
||||
|
||||
def create_table_webhook(
|
||||
self,
|
||||
|
@ -223,20 +239,24 @@ class WebhookHandler:
|
|||
values = extract_allowed(kwargs, allowed_fields)
|
||||
webhook = TableWebhook.objects.create(table_id=table.id, **values)
|
||||
|
||||
webhook_events = []
|
||||
if events is not None and not values.get("include_all_events"):
|
||||
event_headers = []
|
||||
for event in events:
|
||||
event_object = TableWebhookEvent(
|
||||
event_type=event, webhook_id=webhook.id
|
||||
for event_type in events:
|
||||
webhook_event = TableWebhookEvent(
|
||||
event_type=event_type, webhook=webhook
|
||||
)
|
||||
event_object.full_clean()
|
||||
event_headers.append(event_object)
|
||||
webhook_event.full_clean()
|
||||
webhook_events.append(webhook_event)
|
||||
|
||||
webhook_events = TableWebhookEvent.objects.bulk_create(event_headers)
|
||||
webhook_events = TableWebhookEvent.objects.bulk_create(webhook_events)
|
||||
|
||||
if event_config is not None and not values.get("include_all_events"):
|
||||
self._update_webhook_event_config(webhook, event_config, webhook_events)
|
||||
|
||||
for webhook_event in webhook_events:
|
||||
webhook_event_type = webhook_event.get_type()
|
||||
webhook_event_type.after_create(webhook_event)
|
||||
|
||||
if headers is not None:
|
||||
header_objects = []
|
||||
for key, value in headers.items():
|
||||
|
@ -247,7 +267,6 @@ class WebhookHandler:
|
|||
header_objects.append(header)
|
||||
|
||||
TableWebhookHeader.objects.bulk_create(header_objects)
|
||||
|
||||
return webhook
|
||||
|
||||
def update_table_webhook(
|
||||
|
@ -303,6 +322,7 @@ class WebhookHandler:
|
|||
kwargs.get("include_all_events", False) and not old_include_all_events
|
||||
)
|
||||
|
||||
created_events = []
|
||||
if not should_update_events:
|
||||
TableWebhookEvent.objects.filter(webhook=webhook).delete()
|
||||
elif events is not None:
|
||||
|
@ -327,11 +347,15 @@ class WebhookHandler:
|
|||
]
|
||||
|
||||
if len(events_to_create) > 0:
|
||||
TableWebhookEvent.objects.bulk_create(events_to_create)
|
||||
created_events = TableWebhookEvent.objects.bulk_create(events_to_create)
|
||||
|
||||
if event_config is not None and should_update_events:
|
||||
self._update_webhook_event_config(webhook, event_config)
|
||||
|
||||
for webhook_event in created_events:
|
||||
webhook_event_type = webhook_event.get_type()
|
||||
webhook_event_type.after_create(webhook_event)
|
||||
|
||||
if headers is not None:
|
||||
existing_headers = webhook.headers.all()
|
||||
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.core.validators import MaxLengthValidator
|
||||
from django.db import models
|
||||
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.views.models import View
|
||||
from baserow.core.models import CreatedAndUpdatedOnMixin
|
||||
|
||||
from .validators import header_name_validator, header_value_validator, url_validator
|
||||
|
@ -58,6 +61,18 @@ class TableWebhook(CreatedAndUpdatedOnMixin, models.Model):
|
|||
def header_dict(self):
|
||||
return {header.name: header.value for header in self.headers.all()}
|
||||
|
||||
@property
|
||||
def batch_limit(self) -> int:
|
||||
"""
|
||||
This value will be used to limit the amount batches a single webhook can make to
|
||||
paginate the payload. If the payload is too large to be sent in one go, the
|
||||
event_type can split it into multiple batches. If the number of batches exceeds
|
||||
this limit, a notification will be sent to workspace admins informing them that
|
||||
the webhook couldn't send all the data.
|
||||
"""
|
||||
|
||||
return settings.BASEROW_WEBHOOKS_BATCH_LIMIT
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
|
@ -68,6 +83,17 @@ class TableWebhookEvent(CreatedAndUpdatedOnMixin, models.Model):
|
|||
)
|
||||
event_type = models.CharField(max_length=50)
|
||||
fields = models.ManyToManyField(Field)
|
||||
views = models.ManyToManyField(View)
|
||||
view_subscriptions = GenericRelation(
|
||||
"ViewSubscription",
|
||||
content_type_field="subscriber_content_type",
|
||||
object_id_field="subscriber_id",
|
||||
)
|
||||
|
||||
def get_type(self):
|
||||
from .registries import webhook_event_type_registry
|
||||
|
||||
return webhook_event_type_registry.get(self.event_type)
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
@ -90,6 +116,13 @@ class TableWebhookCall(models.Model):
|
|||
editable=False,
|
||||
help_text="Event ID where the call originated from.",
|
||||
)
|
||||
batch_id = models.PositiveIntegerField(
|
||||
null=True,
|
||||
help_text=(
|
||||
"The batch ID for this call. Null if not part of a batch. "
|
||||
"Used for batching multiple calls of the same event_id due to large data."
|
||||
),
|
||||
)
|
||||
webhook = models.ForeignKey(
|
||||
TableWebhook, related_name="calls", on_delete=models.CASCADE
|
||||
)
|
||||
|
@ -111,3 +144,4 @@ class TableWebhookCall(models.Model):
|
|||
|
||||
class Meta:
|
||||
ordering = ("-called_time",)
|
||||
unique_together = ("event_id", "batch_id", "webhook", "event_type")
|
||||
|
|
|
@ -0,0 +1,169 @@
|
|||
from dataclasses import asdict, dataclass
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from baserow.core.models import (
|
||||
WORKSPACE_USER_PERMISSION_ADMIN,
|
||||
Workspace,
|
||||
WorkspaceUser,
|
||||
)
|
||||
from baserow.core.notifications.handler import NotificationHandler
|
||||
from baserow.core.notifications.models import NotificationRecipient
|
||||
from baserow.core.notifications.registries import (
|
||||
EmailNotificationTypeMixin,
|
||||
NotificationType,
|
||||
)
|
||||
|
||||
from .models import TableWebhook
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeactivatedWebhookData:
|
||||
webhook_id: int
|
||||
table_id: int
|
||||
database_id: int
|
||||
webhook_name: str
|
||||
|
||||
@classmethod
|
||||
def from_webhook(cls, webhook):
|
||||
return cls(
|
||||
webhook_id=webhook.id,
|
||||
table_id=webhook.table_id,
|
||||
database_id=webhook.table.database_id,
|
||||
webhook_name=webhook.name,
|
||||
)
|
||||
|
||||
|
||||
def notify_admins_in_workspace(
|
||||
workspace: Workspace, notification_type: str, data: dict
|
||||
) -> List[NotificationRecipient]:
|
||||
"""
|
||||
Notifies all admins in the workspace about an important event, such as a webhook
|
||||
deactivation or a payload exceeding size limits.
|
||||
|
||||
:param workspace: The workspace whose admins will be notified.
|
||||
:param notification_type: The type of notification to send.
|
||||
:param data: The data to include in the notification.
|
||||
:return: A list of created notification recipients.
|
||||
"""
|
||||
|
||||
admins_workspace_users = WorkspaceUser.objects.filter(
|
||||
workspace=workspace,
|
||||
permissions=WORKSPACE_USER_PERMISSION_ADMIN,
|
||||
user__profile__to_be_deleted=False,
|
||||
user__is_active=True,
|
||||
).select_related("user")
|
||||
admins_in_workspace = [admin.user for admin in admins_workspace_users]
|
||||
|
||||
return NotificationHandler.create_direct_notification_for_users(
|
||||
notification_type=notification_type,
|
||||
recipients=admins_in_workspace,
|
||||
data=data,
|
||||
sender=None,
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
|
||||
class WebhookDeactivatedNotificationType(EmailNotificationTypeMixin, NotificationType):
|
||||
type = "webhook_deactivated"
|
||||
has_web_frontend_route = True
|
||||
|
||||
@classmethod
|
||||
def notify_admins_in_workspace(
|
||||
cls, webhook: TableWebhook
|
||||
) -> List[NotificationRecipient]:
|
||||
"""
|
||||
Creates a notification of this type for each admin in the workspace that the
|
||||
webhook belongs to.
|
||||
|
||||
:param webhook: The webhook that was deactivated.
|
||||
:return: A list of notification recipients that have been created.
|
||||
"""
|
||||
|
||||
workspace = webhook.table.database.workspace
|
||||
return notify_admins_in_workspace(
|
||||
workspace, cls.type, asdict(DeactivatedWebhookData.from_webhook(webhook))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_notification_title_for_email(cls, notification, context):
|
||||
return _("%(name)s webhook has been deactivated.") % {
|
||||
"name": notification.data["webhook_name"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_notification_description_for_email(cls, notification, context):
|
||||
return _(
|
||||
"The webhook failed more than %(max_failures)s consecutive times and "
|
||||
"was therefore deactivated."
|
||||
) % {
|
||||
"max_failures": settings.BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class WebhookPayloadTooLargeData:
|
||||
webhook_id: int
|
||||
table_id: int
|
||||
database_id: int
|
||||
webhook_name: str
|
||||
event_id: str
|
||||
batch_limit: int
|
||||
|
||||
@classmethod
|
||||
def from_webhook(cls, webhook: TableWebhook, event_id: str):
|
||||
return cls(
|
||||
webhook_id=webhook.id,
|
||||
table_id=webhook.table_id,
|
||||
database_id=webhook.table.database_id,
|
||||
webhook_name=webhook.name,
|
||||
event_id=event_id,
|
||||
batch_limit=webhook.batch_limit,
|
||||
)
|
||||
|
||||
|
||||
class WebhookPayloadTooLargeNotificationType(
|
||||
EmailNotificationTypeMixin, NotificationType
|
||||
):
|
||||
type = "webhook_payload_too_large"
|
||||
has_web_frontend_route = True
|
||||
|
||||
@classmethod
|
||||
def notify_admins_in_workspace(
|
||||
cls, webhook: TableWebhook, event_id: str
|
||||
) -> List[NotificationRecipient]:
|
||||
"""
|
||||
Creates a notification of this type for each admin in the workspace that the
|
||||
webhook belongs to.
|
||||
|
||||
:param webhook: The webhook trying to send a payload that is too large.
|
||||
:param event_id: The event id that triggered the notification.
|
||||
:return: A list of notification recipients that have been created.
|
||||
"""
|
||||
|
||||
workspace = webhook.table.database.workspace
|
||||
return notify_admins_in_workspace(
|
||||
workspace,
|
||||
cls.type,
|
||||
asdict(WebhookPayloadTooLargeData.from_webhook(webhook, event_id)),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_notification_title_for_email(cls, notification, context):
|
||||
return _("%(name)s webhook payload too large.") % {
|
||||
"name": notification.data["webhook_name"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_notification_description_for_email(cls, notification, context):
|
||||
return _(
|
||||
"The payload for the %(name)s webhook with event ID %(event_id)s "
|
||||
"was too large. The content has been split into multiple batches, but "
|
||||
"data above the batch limit of %(batch_limit)s was discarded."
|
||||
) % {
|
||||
"name": notification.data["webhook_name"],
|
||||
"event_id": notification.data["event_id"],
|
||||
"batch_limit": notification.data["batch_limit"],
|
||||
}
|
|
@ -1,13 +1,16 @@
|
|||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from django.dispatch.dispatcher import Signal
|
||||
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.webhooks.models import TableWebhook, TableWebhookEvent
|
||||
from baserow.core.registry import Instance, ModelRegistryMixin, Registry
|
||||
|
||||
from .exceptions import SkipWebhookCall
|
||||
from .exceptions import SkipWebhookCall, WebhookPayloadTooLarge
|
||||
from .tasks import call_webhook
|
||||
|
||||
|
||||
|
@ -15,8 +18,8 @@ class WebhookEventType(Instance):
|
|||
"""
|
||||
This class represents a custom webhook event type that can be added to the webhook
|
||||
event type registry. Each registered event type needs to set a django signal on
|
||||
which it will listen on. Upon initialization the webhook event type will connect
|
||||
to the django signal.
|
||||
which it will listen on. Upon initialization the webhook event type will connect to
|
||||
the django signal.
|
||||
|
||||
The 'listener' function will be called for every received signal. The listener will
|
||||
generate a unique ID for every received signal, find all webhooks that need to be
|
||||
|
@ -97,6 +100,17 @@ class WebhookEventType(Instance):
|
|||
|
||||
return table
|
||||
|
||||
def get_additional_filters_for_webhooks_to_call(
|
||||
self, **kwargs: dict
|
||||
) -> Optional[Q]:
|
||||
"""
|
||||
Filters to pass to WebhookHandler.find_webhooks_to_call. By default, no
|
||||
additional filters are applied.
|
||||
|
||||
:param kwargs: The arguments of the signal.
|
||||
:return: A dictionary of additional filters.
|
||||
"""
|
||||
|
||||
def listener(self, **kwargs: dict):
|
||||
"""
|
||||
The method that is called when the signal is triggered. By default it will
|
||||
|
@ -107,6 +121,55 @@ class WebhookEventType(Instance):
|
|||
|
||||
transaction.on_commit(lambda: self.listener_after_commit(**kwargs))
|
||||
|
||||
def _paginate_payload(
|
||||
self, webhook: TableWebhook, event_id: str, payload: dict[str, any]
|
||||
) -> tuple[dict, dict | None]:
|
||||
"""
|
||||
This method is called in the celery task and can be overwritten to paginate the
|
||||
payload, if it's too large to send all the data at once. The default
|
||||
implementation returns the payload and None as the next cursor, but if the
|
||||
payload is too large to be sent in one go, this method can be used to return a
|
||||
part of the payload and the remaining part as the next cursor. Proper `batch_id`
|
||||
values will be added to the payload by the caller to keep track of the current
|
||||
batch.
|
||||
|
||||
:param payload: The payload that must be paginated.
|
||||
:return: A tuple containing the payload to be sent and the remaining payload for
|
||||
the next batch if any or None.
|
||||
"""
|
||||
|
||||
return payload, None
|
||||
|
||||
def paginate_payload(self, webhook, event_id, payload) -> tuple[dict, dict | None]:
|
||||
"""
|
||||
This method calls the `_paginate_payload` method and adds a `batch_id` to the
|
||||
payload if the remaining payload is not None. The `batch_id` is used to keep
|
||||
track of the current batch of the payload.
|
||||
|
||||
:param webhook: The webhook object related to the call.
|
||||
:param event_id: The unique uuid event id of the event that triggered the call.
|
||||
:param payload: The payload that must be paginated.
|
||||
:return: A tuple containing the payload to be sent and the remaining payload for
|
||||
the next batch if any or None.
|
||||
"""
|
||||
|
||||
batch_id = int(payload.get("batch_id", None) or 1)
|
||||
if webhook.batch_limit > 0 and batch_id > webhook.batch_limit:
|
||||
raise WebhookPayloadTooLarge(
|
||||
f"Payload for event '{self.type}' (event_id: '{event_id}') exceeds "
|
||||
f"the batch limit of ({webhook.batch_limit} batches)."
|
||||
)
|
||||
|
||||
prepared_payload, remaining_payload = self._paginate_payload(
|
||||
webhook, event_id, payload
|
||||
)
|
||||
|
||||
if remaining_payload is not None:
|
||||
prepared_payload["batch_id"] = batch_id
|
||||
remaining_payload["batch_id"] = batch_id + 1
|
||||
|
||||
return prepared_payload, remaining_payload
|
||||
|
||||
def listener_after_commit(self, **kwargs):
|
||||
"""
|
||||
Called after the signal is triggered and the transaction commits. By default it
|
||||
|
@ -123,7 +186,8 @@ class WebhookEventType(Instance):
|
|||
|
||||
table = self.get_table_object(**kwargs)
|
||||
webhook_handler = WebhookHandler()
|
||||
webhooks = webhook_handler.find_webhooks_to_call(table.id, self.type)
|
||||
filters = self.get_additional_filters_for_webhooks_to_call(**kwargs)
|
||||
webhooks = webhook_handler.find_webhooks_to_call(table.id, self.type, filters)
|
||||
event_id = uuid.uuid4()
|
||||
for webhook in webhooks:
|
||||
try:
|
||||
|
@ -144,8 +208,24 @@ class WebhookEventType(Instance):
|
|||
except SkipWebhookCall:
|
||||
pass
|
||||
|
||||
def after_create(self, webhook_event: TableWebhookEvent):
|
||||
"""
|
||||
This method is called after a webhook event has been created. By default it
|
||||
does nothing, but can be overwritten to add additional functionality.
|
||||
|
||||
class WebhookEventTypeRegistry(ModelRegistryMixin, Registry):
|
||||
:param webhook_event: The created webhook event.
|
||||
"""
|
||||
|
||||
def after_update(self, webhook_event: TableWebhookEvent):
|
||||
"""
|
||||
This method is called after a webhook event has been updated. By default it
|
||||
does nothing, but can be overwritten to add additional functionality.
|
||||
|
||||
:param webhook_event: The updated webhook event.
|
||||
"""
|
||||
|
||||
|
||||
class WebhookEventTypeRegistry(ModelRegistryMixin, Registry[WebhookEventType]):
|
||||
name = "webhook_event"
|
||||
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -8,6 +9,10 @@ from django.db.utils import OperationalError
|
|||
from loguru import logger
|
||||
|
||||
from baserow.config.celery import app
|
||||
from baserow.contrib.database.webhooks.exceptions import WebhookPayloadTooLarge
|
||||
from baserow.contrib.database.webhooks.notification_types import (
|
||||
WebhookPayloadTooLargeNotificationType,
|
||||
)
|
||||
from baserow.core.redis import RedisQueue
|
||||
|
||||
|
||||
|
@ -83,29 +88,18 @@ def call_webhook(
|
|||
can still measure this.
|
||||
"""
|
||||
|
||||
from advocate import UnacceptableAddressException
|
||||
from requests import RequestException
|
||||
|
||||
from .handler import WebhookHandler
|
||||
from .models import TableWebhook, TableWebhookCall
|
||||
from .models import TableWebhook
|
||||
from .registries import webhook_event_type_registry
|
||||
|
||||
if self.request.retries > retries:
|
||||
retries = self.request.retries
|
||||
|
||||
try:
|
||||
with transaction.atomic():
|
||||
handler = WebhookHandler()
|
||||
|
||||
try:
|
||||
webhook = TableWebhook.objects.select_for_update(
|
||||
of=("self",),
|
||||
nowait=True,
|
||||
).get(
|
||||
id=webhook_id,
|
||||
# If a webhook is not active anymore, then it should not be
|
||||
# executed.
|
||||
active=True,
|
||||
)
|
||||
of=("self",), nowait=True
|
||||
).get(id=webhook_id, active=True)
|
||||
except TableWebhook.DoesNotExist:
|
||||
# If the webhook has been deleted or disabled while executing, we don't
|
||||
# want to continue making calls the URL because we can't update the
|
||||
|
@ -127,65 +121,36 @@ def call_webhook(
|
|||
else:
|
||||
raise e
|
||||
|
||||
request = None
|
||||
response = None
|
||||
success = False
|
||||
error = ""
|
||||
|
||||
# Paginate the payload if necessary and enqueue the remaining data.
|
||||
webhook_event_type = webhook_event_type_registry.get(event_type)
|
||||
try:
|
||||
request, response = handler.make_request(method, url, headers, payload)
|
||||
success = response.ok
|
||||
except RequestException as exception:
|
||||
request = exception.request
|
||||
response = exception.response
|
||||
error = str(exception)
|
||||
except UnacceptableAddressException as exception:
|
||||
error = f"UnacceptableAddressException: {exception}"
|
||||
|
||||
TableWebhookCall.objects.update_or_create(
|
||||
event_id=event_id,
|
||||
event_type=event_type,
|
||||
webhook=webhook,
|
||||
defaults={
|
||||
"called_time": datetime.now(tz=timezone.utc),
|
||||
"called_url": url,
|
||||
"request": handler.format_request(request)
|
||||
if request is not None
|
||||
else None,
|
||||
"response": handler.format_response(response)
|
||||
if response is not None
|
||||
else None,
|
||||
"response_status": response.status_code
|
||||
if response is not None
|
||||
else None,
|
||||
"error": error,
|
||||
},
|
||||
)
|
||||
handler.clean_webhook_calls(webhook)
|
||||
|
||||
if success and webhook.failed_triggers != 0:
|
||||
# If the call was successful and failed triggers had been increased in
|
||||
# the past, we can safely reset it to 0 again to prevent deactivation of
|
||||
# the webhook.
|
||||
webhook.failed_triggers = 0
|
||||
webhook.save()
|
||||
elif not success and (
|
||||
webhook.failed_triggers
|
||||
< settings.BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES
|
||||
):
|
||||
# If the task has reached the maximum amount of failed calls, we're
|
||||
# going to give up and increase the total failed triggers of the webhook
|
||||
# if we're still operating within the limits of the max consecutive
|
||||
# trigger failures.
|
||||
webhook.failed_triggers += 1
|
||||
webhook.save()
|
||||
elif not success:
|
||||
# If webhook has reached the maximum amount of failed triggers,
|
||||
# we're going to deactivate it because we can reasonably assume that the
|
||||
# target doesn't listen anymore. At this point we've tried 8 * 10 times.
|
||||
# The user can manually activate it again when it's fixed.
|
||||
webhook.active = False
|
||||
webhook.save()
|
||||
payload, remaining = webhook_event_type.paginate_payload(
|
||||
webhook, event_id, deepcopy(payload)
|
||||
)
|
||||
except WebhookPayloadTooLarge:
|
||||
success = True # We don't want to retry this call, because it will fail again.
|
||||
transaction.on_commit(
|
||||
lambda: WebhookPayloadTooLargeNotificationType.notify_admins_in_workspace(
|
||||
webhook, event_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
success = make_request_and_save_result(
|
||||
webhook, event_id, event_type, method, url, headers, payload
|
||||
)
|
||||
# enqueue the next call if there is still remaining payload
|
||||
if success and remaining:
|
||||
args = (
|
||||
webhook_id,
|
||||
event_id,
|
||||
event_type,
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
remaining,
|
||||
)
|
||||
kwargs = {"retries": 0}
|
||||
enqueue_webhook_task(webhook_id, event_id, args, kwargs)
|
||||
|
||||
# After the transaction successfully commits we can delay the next call
|
||||
# in the queue, so that only one call is triggered concurrently.
|
||||
|
@ -207,3 +172,84 @@ def call_webhook(
|
|||
kwargs = self.request.kwargs or {}
|
||||
kwargs["retries"] = retries + 1
|
||||
self.retry(countdown=2**retries, kwargs=kwargs)
|
||||
|
||||
|
||||
def make_request_and_save_result(
|
||||
webhook, event_id, event_type, method, url, headers, payload
|
||||
):
|
||||
from advocate import UnacceptableAddressException
|
||||
from requests import RequestException
|
||||
|
||||
from .handler import WebhookHandler
|
||||
from .models import TableWebhookCall
|
||||
from .notification_types import WebhookDeactivatedNotificationType
|
||||
|
||||
handler = WebhookHandler()
|
||||
|
||||
request = None
|
||||
response = None
|
||||
success = False
|
||||
error = ""
|
||||
|
||||
try:
|
||||
request, response = handler.make_request(method, url, headers, payload)
|
||||
success = response.ok
|
||||
except RequestException as exception:
|
||||
request = exception.request
|
||||
response = exception.response
|
||||
error = str(exception)
|
||||
except UnacceptableAddressException as exception:
|
||||
error = f"UnacceptableAddressException: {exception}"
|
||||
|
||||
TableWebhookCall.objects.update_or_create(
|
||||
event_id=event_id,
|
||||
batch_id=payload.get("batch_id", None),
|
||||
event_type=event_type,
|
||||
webhook=webhook,
|
||||
defaults={
|
||||
"called_time": datetime.now(tz=timezone.utc),
|
||||
"called_url": url,
|
||||
"request": handler.format_request(request) if request is not None else None,
|
||||
"response": handler.format_response(response)
|
||||
if response is not None
|
||||
else None,
|
||||
"response_status": response.status_code if response is not None else None,
|
||||
"error": error,
|
||||
},
|
||||
)
|
||||
handler.clean_webhook_calls(webhook)
|
||||
|
||||
if success:
|
||||
if webhook.failed_triggers != 0:
|
||||
# If the call was successful and failed triggers had been increased
|
||||
# in the past, we can safely reset it to 0 again to prevent
|
||||
# deactivation of the webhook.
|
||||
webhook.failed_triggers = 0
|
||||
webhook.save()
|
||||
elif (
|
||||
webhook.failed_triggers
|
||||
< settings.BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES
|
||||
):
|
||||
# If the task has reached the maximum amount of failed calls, we're
|
||||
# going to give up and increase the total failed triggers of the webhook
|
||||
# if we're still operating within the limits of the max consecutive
|
||||
# trigger failures.
|
||||
webhook.failed_triggers += 1
|
||||
webhook.save()
|
||||
else:
|
||||
# If webhook has reached the maximum amount of failed triggers, we're
|
||||
# going to deactivate it because we can reasonably assume that the
|
||||
# target doesn't listen anymore. At this point we've tried 8 * 10 times.
|
||||
# The user can manually activate it again when it's fixed.
|
||||
webhook.active = False
|
||||
webhook.save()
|
||||
|
||||
# Send a notification to the workspace admins that the webhook was
|
||||
# deactivated.
|
||||
transaction.on_commit(
|
||||
lambda: WebhookDeactivatedNotificationType.notify_admins_in_workspace(
|
||||
webhook
|
||||
)
|
||||
)
|
||||
|
||||
return success
|
||||
|
|
|
@ -61,7 +61,11 @@ from baserow.contrib.database.views.exceptions import (
|
|||
AggregationTypeDoesNotExist,
|
||||
ViewDoesNotExist,
|
||||
)
|
||||
from baserow.contrib.database.views.models import DEFAULT_SORT_TYPE_KEY
|
||||
from baserow.contrib.database.views.service import ViewService
|
||||
from baserow.contrib.database.views.view_aggregations import (
|
||||
DistributionViewAggregationType,
|
||||
)
|
||||
from baserow.contrib.integrations.local_baserow.api.serializers import (
|
||||
LocalBaserowTableServiceFieldMappingSerializer,
|
||||
)
|
||||
|
@ -90,7 +94,7 @@ from baserow.contrib.integrations.local_baserow.utils import (
|
|||
guess_cast_function_from_response_serializer_field,
|
||||
guess_json_type_from_response_serializer_field,
|
||||
)
|
||||
from baserow.core.cache import local_cache
|
||||
from baserow.core.cache import global_cache, local_cache
|
||||
from baserow.core.formula import resolve_formula
|
||||
from baserow.core.formula.registries import formula_runtime_function_registry
|
||||
from baserow.core.handler import CoreHandler
|
||||
|
@ -115,6 +119,9 @@ if TYPE_CHECKING:
|
|||
from baserow.contrib.database.table.models import GeneratedTableModel, Table
|
||||
|
||||
|
||||
SCHEMA_CACHE_TTL = 60 * 60 # 1 hour
|
||||
|
||||
|
||||
class LocalBaserowServiceType(ServiceType):
|
||||
"""
|
||||
The `ServiceType` for all `LocalBaserow` integration services.
|
||||
|
@ -220,6 +227,8 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
|
|||
|
||||
return model.objects.all().enhance_by_fields(
|
||||
only_field_ids=extract_field_ids_from_list(only_field_names)
|
||||
if only_field_names is not None
|
||||
else None
|
||||
)
|
||||
|
||||
def enhance_queryset(self, queryset):
|
||||
|
@ -482,6 +491,29 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
|
|||
:return: A schema dictionary, or None if no `Table` has been applied.
|
||||
"""
|
||||
|
||||
if service.table_id is None:
|
||||
return None
|
||||
|
||||
properties = global_cache.get(
|
||||
f"table_{service.table_id}_{service.table.version}__service_schema",
|
||||
default=lambda: self._get_table_properties(service, allowed_fields),
|
||||
timeout=SCHEMA_CACHE_TTL,
|
||||
)
|
||||
|
||||
return self.get_schema_for_return_type(service, properties)
|
||||
|
||||
def _get_table_properties(
|
||||
self, service: ServiceSubClass, allowed_fields: Optional[List[str]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Extracts the properties from the table model fields.
|
||||
|
||||
:param service: A `LocalBaserowTableService` subclass.
|
||||
:param allowed_fields: The properties which are allowed to be included in the
|
||||
properties.
|
||||
:return: A schema dictionary, or None if no `Table` has been applied.
|
||||
"""
|
||||
|
||||
field_objects = self.get_table_field_objects(service)
|
||||
|
||||
if field_objects is None:
|
||||
|
@ -517,7 +549,7 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
|
|||
"searchable": field_type.is_searchable(field)
|
||||
and field_type.type
|
||||
not in self.unsupported_adhoc_searchable_field_types,
|
||||
"sortable": field_type.check_can_order_by(field)
|
||||
"sortable": field_type.check_can_order_by(field, DEFAULT_SORT_TYPE_KEY)
|
||||
and field_type.type not in self.unsupported_adhoc_sortable_field_types,
|
||||
"filterable": field_type.check_can_filter_by(field)
|
||||
and field_type.type
|
||||
|
@ -526,7 +558,7 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
|
|||
"metadata": field_serializer.data,
|
||||
} | self.get_json_type_from_response_serializer_field(field, field_type)
|
||||
|
||||
return self.get_schema_for_return_type(service, properties)
|
||||
return properties
|
||||
|
||||
def get_schema_name(self, service: ServiceSubClass) -> str:
|
||||
"""
|
||||
|
@ -570,7 +602,7 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
|
|||
|
||||
return local_cache.get(
|
||||
f"integration_service_{service.table_id}_table_model",
|
||||
service.table.get_model,
|
||||
lambda: service.table.get_model(),
|
||||
)
|
||||
|
||||
def get_table_field_objects(
|
||||
|
@ -592,6 +624,18 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
|
|||
|
||||
def get_context_data(
|
||||
self, service: ServiceSubClass, allowed_fields: Optional[List[str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
if service.table_id is None:
|
||||
return None
|
||||
|
||||
return global_cache.get(
|
||||
f"table_{service.table_id}_{service.table.version}__service_context_data",
|
||||
default=lambda: self._get_context_data(service, allowed_fields),
|
||||
timeout=SCHEMA_CACHE_TTL,
|
||||
)
|
||||
|
||||
def _get_context_data(
|
||||
self, service: ServiceSubClass, allowed_fields: Optional[List[str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
field_objects = self.get_table_field_objects(service)
|
||||
|
||||
|
@ -622,6 +666,22 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
|
|||
def get_context_data_schema(
|
||||
self, service: ServiceSubClass, allowed_fields: Optional[List[str]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
if service.table_id is None:
|
||||
return None
|
||||
|
||||
return global_cache.get(
|
||||
f"table_{service.table_id}_{service.table.version}__service_context_data_schema",
|
||||
default=lambda: self._get_context_data_schema(service, allowed_fields),
|
||||
timeout=SCHEMA_CACHE_TTL,
|
||||
)
|
||||
|
||||
def _get_context_data_schema(
|
||||
self, service: ServiceSubClass, allowed_fields: Optional[List[str]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Returns the context data schema for the table associated with the service.
|
||||
"""
|
||||
|
||||
field_objects = self.get_table_field_objects(service)
|
||||
|
||||
if field_objects is None:
|
||||
|
@ -1184,6 +1244,10 @@ class LocalBaserowAggregateRowsUserServiceType(
|
|||
dispatch_type = DispatchTypes.DISPATCH_DATA_SOURCE
|
||||
serializer_mixins = LocalBaserowTableServiceFilterableMixin.mixin_serializer_mixins
|
||||
|
||||
# Local Baserow aggregate rows does not currently support the distribution
|
||||
# aggregation type, this will be resolved in a future release.
|
||||
unsupported_aggregation_types = [DistributionViewAggregationType.type]
|
||||
|
||||
def get_schema_name(self, service: LocalBaserowAggregateRows) -> str:
|
||||
"""
|
||||
The Local Baserow aggregation schema name added to the `title` in
|
||||
|
@ -1322,6 +1386,19 @@ class LocalBaserowAggregateRowsUserServiceType(
|
|||
# The table and view will be prepared in the parent
|
||||
values = super().prepare_values(values, user, instance)
|
||||
|
||||
# Aggregation types are always checked for compatibility
|
||||
# no matter if they have been already set previously
|
||||
aggregation_type = values.get(
|
||||
"aggregation_type", getattr(instance, "aggregation_type", "")
|
||||
)
|
||||
|
||||
if aggregation_type in self.unsupported_aggregation_types:
|
||||
raise DRFValidationError(
|
||||
detail=f"The {aggregation_type} aggregation type "
|
||||
"is not currently supported.",
|
||||
code="unsupported_aggregation_type",
|
||||
)
|
||||
|
||||
if "table" in values:
|
||||
# Reset the field if the table has changed
|
||||
if (
|
||||
|
@ -1351,12 +1428,6 @@ class LocalBaserowAggregateRowsUserServiceType(
|
|||
code="invalid_field",
|
||||
)
|
||||
|
||||
# Aggregation types are always checked for compatibility
|
||||
# no matter if they have been already set previously
|
||||
aggregation_type = values.get(
|
||||
"aggregation_type", getattr(instance, "aggregation_type", "")
|
||||
)
|
||||
|
||||
if aggregation_type and field:
|
||||
agg_type = field_aggregation_registry.get(aggregation_type)
|
||||
if not agg_type.field_is_compatible(field):
|
||||
|
|
|
@ -2,8 +2,12 @@ from contextlib import contextmanager
|
|||
from typing import Callable, TypeVar
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
||||
from asgiref.local import Local
|
||||
from redis.exceptions import LockNotOwnedError
|
||||
|
||||
from baserow.version import VERSION as BASEROW_VERSION
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
@ -108,3 +112,147 @@ class LocalCacheMiddleware:
|
|||
def __call__(self, request):
|
||||
with local_cache.context():
|
||||
return self.get_response(request)
|
||||
|
||||
|
||||
SENTINEL = object()
|
||||
|
||||
|
||||
class GlobalCache:
|
||||
"""
|
||||
A global cache wrapper around the Django cache system that provides
|
||||
invalidation capabilities and a lock mechanism to prevent multiple
|
||||
concurrent updates. It's also versioned with Baserow version.
|
||||
|
||||
Example Usage:
|
||||
|
||||
# Storing and retrieving a value
|
||||
value = global_cache.get(
|
||||
"user_123_data",
|
||||
default=lambda: expensive_computation(),
|
||||
timeout=300
|
||||
)
|
||||
|
||||
# Invalidating a cache key
|
||||
global_cache.invalidate("user_123_data")
|
||||
"""
|
||||
|
||||
VERSION_KEY_TTL = 60 * 60 * 24 * 10 # 10 days
|
||||
|
||||
def _get_version_cache_key(
|
||||
self, key: str, invalidate_key: None | str = None
|
||||
) -> str:
|
||||
"""
|
||||
Generates a versioned cache key for tracking different versions of a cached
|
||||
value.
|
||||
|
||||
:param key: The base cache key.
|
||||
:param invalidate_key: The key used when this cache is invalidated.
|
||||
:return: A modified cache key used for version tracking.
|
||||
"""
|
||||
|
||||
key = key if invalidate_key is None else invalidate_key
|
||||
|
||||
return f"{BASEROW_VERSION}_{key}__current_version"
|
||||
|
||||
def _get_cache_key_with_version(self, key: str) -> str:
|
||||
"""
|
||||
Generates a cache key with included version.
|
||||
|
||||
:param key: The base cache key.
|
||||
:return: A modified cache key with version.
|
||||
"""
|
||||
|
||||
version = cache.get(self._get_version_cache_key(key), 0)
|
||||
return f"{BASEROW_VERSION}_{key}__version_{version}"
|
||||
|
||||
def get(
|
||||
self,
|
||||
key: str,
|
||||
default: T | Callable[[], T] = None,
|
||||
invalidate_key: None | str = None,
|
||||
timeout: int = 60,
|
||||
) -> T:
|
||||
"""
|
||||
Retrieves a value from the cache if it exists; otherwise, sets it using the
|
||||
provided default value.
|
||||
|
||||
This function also uses a lock (if available on the cache backend) to ensure
|
||||
multi call safety when setting a new value.
|
||||
|
||||
:param key: The key of the cache value to get (or set). Make sure this key is
|
||||
unique and not used elsewhere.
|
||||
:param invalidate_key: The key used when this cache is invalidated. A default
|
||||
one is used if none is provided and this value otherwise. Can be used to
|
||||
invalidate multiple caches at the same time. When invalidating the cache you
|
||||
must use the same key later.
|
||||
:param default: The default value to store in the cache if the key is absent.
|
||||
Can be either a literal value or a callable. If it's a callable,
|
||||
the function is called to retrieve the default value.
|
||||
:param timeout: The cache timeout in seconds for newly set values.
|
||||
Defaults to 60.
|
||||
:return: The cached value if it exists; otherwise, the newly set value.
|
||||
"""
|
||||
|
||||
version_key = self._get_version_cache_key(key, invalidate_key)
|
||||
|
||||
version = cache.get(version_key, 0)
|
||||
|
||||
cache_key_to_use = f"{BASEROW_VERSION}_{key}__version_{version}"
|
||||
|
||||
cached = cache.get(cache_key_to_use, SENTINEL)
|
||||
|
||||
if cached is SENTINEL:
|
||||
use_lock = hasattr(cache, "lock")
|
||||
if use_lock:
|
||||
cache_lock = cache.lock(f"{cache_key_to_use}__lock", timeout=10)
|
||||
cache_lock.acquire()
|
||||
try:
|
||||
cached = cache.get(cache_key_to_use, SENTINEL)
|
||||
# We check again to make sure it hasn't been populated in the meantime
|
||||
# while acquiring the lock
|
||||
if cached is SENTINEL:
|
||||
if callable(default):
|
||||
cached = default()
|
||||
else:
|
||||
cached = default
|
||||
|
||||
cache.set(
|
||||
cache_key_to_use,
|
||||
cached,
|
||||
timeout=timeout,
|
||||
)
|
||||
finally:
|
||||
if use_lock:
|
||||
try:
|
||||
cache_lock.release()
|
||||
except LockNotOwnedError:
|
||||
# If the lock release fails, it might be because of the timeout
|
||||
# and it's been stolen so we don't really care
|
||||
pass
|
||||
|
||||
return cached
|
||||
|
||||
def invalidate(self, key: None | str = None, invalidate_key: None | str = None):
|
||||
"""
|
||||
Invalidates the cached value associated with the given key, ensuring that
|
||||
subsequent cache reads will miss and require a new value to be set.
|
||||
|
||||
:param key: The cache key to invalidate.
|
||||
:param invalidate_key: The key to use for invalidation. If provided, this key
|
||||
must match the one given at cache creation.
|
||||
"""
|
||||
|
||||
version_key = self._get_version_cache_key(key, invalidate_key)
|
||||
|
||||
try:
|
||||
cache.incr(version_key, 1)
|
||||
except ValueError:
|
||||
# If the cache key does not exist, initialize its versioning.
|
||||
cache.set(
|
||||
version_key,
|
||||
1,
|
||||
timeout=self.VERSION_KEY_TTL,
|
||||
)
|
||||
|
||||
|
||||
global_cache = GlobalCache()
|
||||
|
|
|
@ -22,6 +22,7 @@ from django.contrib.contenttypes.models import ContentType
|
|||
from django.db import DEFAULT_DB_ALIAS, connection, transaction
|
||||
from django.db.models import ForeignKey, ManyToManyField, Max, Model, Prefetch, QuerySet
|
||||
from django.db.models.functions import Collate
|
||||
from django.db.models.query import ModelIterable
|
||||
from django.db.models.sql.query import LOOKUP_SEP
|
||||
from django.db.transaction import Atomic, get_connection
|
||||
|
||||
|
@ -470,6 +471,7 @@ class MultiFieldPrefetchQuerysetMixin(Generic[ModelInstance]):
|
|||
if (
|
||||
self._multi_field_prefetch_related_funcs
|
||||
and not self._multi_field_prefetch_done
|
||||
and issubclass(self._iterable_class, ModelIterable)
|
||||
):
|
||||
for f in self._multi_field_prefetch_related_funcs:
|
||||
f(self, self._result_cache)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue