mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-10 15:47:32 +00:00
merge with base branch
This commit is contained in:
commit
87b7f19d75
1015 changed files with 99695 additions and 9224 deletions
.env.example.gitignore.gitlab-ci.ymlREADME.mdapp.json
backend
.flake8
docker
flake8_plugins
requirements
src/baserow
api
cachalot_patch.pyconfig/settings
contrib
builder
api
application_types.pyapps.pyconstants.pydata_providers
data_sources
domains
elements
formula_property_extractor.pyhandler.pylocale/en/LC_MESSAGES
migrations
0043_buttonthemeconfigblock_button_font_weight_and_more.py0044_colorthemeconfigblock_custom_colors.py0045_element_style_background_radius_and_more.py0046_imagethemeconfigblock_image_radius.py0047_repeatelement_horizontal_gap_and_more.py0048_alter_buttonthemeconfigblock_button_background_color_and_more.py0049_element_style_width_child_alter_element_style_width.py0050_page_query_params.py0051_alter_builderworkflowaction_options.py
pages
populate.pysignals.pytheme
types.pyworkflow_actions
dashboard
database/airtable
|
@ -146,7 +146,7 @@ DATABASE_NAME=baserow
|
|||
# OTEL_TRACES_SAMPLER=traceidratio
|
||||
# OTEL_TRACES_SAMPLER_ARG=0.1
|
||||
# Always sample the root django and celery spans
|
||||
# OTEL_PER_MODULE_SAMPLER_OVERRIDES="opentelemetry.instrumentation.celery=always_on,opentelemetry.instrumentation.django=always_on"
|
||||
# OTEL_PER_MODULE_SAMPLER_OVERRIDES="opentelemetry.instrumentation.django=always_on"
|
||||
|
||||
# BASEROW_CACHALOT_ENABLED=
|
||||
# BASEROW_CACHALOT_MODE=
|
||||
|
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -100,6 +100,7 @@ web-frontend/plugins/
|
|||
backend/plugins/
|
||||
web-frontend/reports/
|
||||
backend/reports/
|
||||
backend/.env.testing*
|
||||
|
||||
.idea/
|
||||
*.iml
|
||||
|
@ -118,6 +119,10 @@ out/
|
|||
.vscode/
|
||||
!config/vscode/.vscode/
|
||||
|
||||
# cursor editor config files
|
||||
.cursor-config/
|
||||
!config/cursor/.cursor-config/
|
||||
|
||||
# VIM's swap files
|
||||
*.swp
|
||||
|
||||
|
|
142
.gitlab-ci.yml
142
.gitlab-ci.yml
|
@ -27,12 +27,6 @@ variables:
|
|||
BUILD_ALL_IN_ONE:
|
||||
value: "false"
|
||||
description: "If true then regardless of what branch we are on the all in one image will be built."
|
||||
BUILD_ALL_IN_ONE_PG_11:
|
||||
value: "false"
|
||||
description: "If true then regardless of what branch we are on the all in one image with embedded PostgreSQL version 11 (legacy) will be built."
|
||||
BUILD_ALL_IN_ONE_PGAUTOUPGRADE:
|
||||
value: "false"
|
||||
description: "If true then regardless of what branch we are on the image which auto upgrades PostgreSQL data directory from version 11 to 15 will be built."
|
||||
# An image repo which is used for storing and passing images between ci pipeline jobs
|
||||
# and also speeding up ci builds by caching from the latest ci image when building.
|
||||
CI_IMAGE_REPO: $CI_REGISTRY_IMAGE/ci
|
||||
|
@ -53,8 +47,6 @@ variables:
|
|||
BACKEND_DEV_IMAGE_NAME: backend_dev
|
||||
WEBFRONTEND_IMAGE_NAME: web-frontend
|
||||
ALLINONE_IMAGE_NAME: baserow
|
||||
ALLINONE_PG_11_IMAGE_NAME: baserow-pg11
|
||||
ALLINONE_PGAUTOUPGRADE_IMAGE_NAME: baserow-pgautoupgrade
|
||||
CLOUDRON_IMAGE_NAME: cloudron
|
||||
HEROKU_IMAGE_NAME: heroku
|
||||
WEBFRONTEND_DEV_IMAGE_NAME: web-frontend_dev
|
||||
|
@ -67,8 +59,6 @@ variables:
|
|||
TESTED_BACKEND_CI_DEV_IMAGE: $CI_IMAGE_REPO/$BACKEND_DEV_IMAGE_NAME:$TESTED_IMAGE_PREFIX$CI_COMMIT_SHORT_SHA
|
||||
TESTED_WEBFRONTEND_CI_DEV_IMAGE: $CI_IMAGE_REPO/$WEBFRONTEND_DEV_IMAGE_NAME:$TESTED_IMAGE_PREFIX$CI_COMMIT_SHORT_SHA
|
||||
TESTED_ALLINONE_CI_IMAGE: $CI_IMAGE_REPO/$ALLINONE_IMAGE_NAME:$TESTED_IMAGE_PREFIX$CI_COMMIT_SHORT_SHA
|
||||
TESTED_ALLINONE_PG_11_CI_IMAGE: $CI_IMAGE_REPO/$ALLINONE_PG_11_IMAGE_NAME:$TESTED_IMAGE_PREFIX$CI_COMMIT_SHORT_SHA
|
||||
TESTED_ALLINONE_PGAUTOUPGRADE_CI_IMAGE: $CI_IMAGE_REPO/$ALLINONE_PGAUTOUPGRADE_IMAGE_NAME:$TESTED_IMAGE_PREFIX$CI_COMMIT_SHORT_SHA
|
||||
TESTED_CLOUDRON_CI_IMAGE: $CI_IMAGE_REPO/$CLOUDRON_IMAGE_NAME:$TESTED_IMAGE_PREFIX$CI_COMMIT_SHORT_SHA
|
||||
TESTED_HEROKU_CI_IMAGE: $CI_IMAGE_REPO/$HEROKU_IMAGE_NAME:$TESTED_IMAGE_PREFIX$CI_COMMIT_SHORT_SHA
|
||||
# Used to tag the latest images on $DEVELOP_BRANCH_NAME
|
||||
|
@ -80,8 +70,6 @@ variables:
|
|||
BACKEND_DOCKERFILE_PATH: $CI_PROJECT_DIR/backend/Dockerfile
|
||||
WEBFRONTEND_DOCKERFILE_PATH: $CI_PROJECT_DIR/web-frontend/Dockerfile
|
||||
ALLINONE_DOCKERFILE_PATH: $CI_PROJECT_DIR/deploy/all-in-one/Dockerfile
|
||||
ALLINONE_PG_11_DOCKERFILE_PATH: $CI_PROJECT_DIR/deploy/all-in-one/pg11.Dockerfile
|
||||
ALLINONE_PGAUTOUPGRADE_DOCKERFILE_PATH: $CI_PROJECT_DIR/deploy/all-in-one/pgautoupgrade.Dockerfile
|
||||
CLOUDRON_DOCKERFILE_PATH: $CI_PROJECT_DIR/deploy/cloudron/Dockerfile
|
||||
HEROKU_DOCKERFILE_PATH: $CI_PROJECT_DIR/heroku.Dockerfile
|
||||
# By default, forks can use the baserow projects ci util image so they don't have
|
||||
|
@ -547,40 +535,6 @@ build-all-in-one-image:
|
|||
BUILD_FROM_BACKEND_IMAGE: $TESTED_BACKEND_CI_IMAGE
|
||||
BUILD_FROM_WEBFRONTEND_IMAGE: $TESTED_WEBFRONTEND_CI_IMAGE
|
||||
|
||||
build-all-in-one-embedded-pg-11-image:
|
||||
extends: .build-final-baserow-image
|
||||
needs:
|
||||
- job: build-all-in-one-image
|
||||
tags:
|
||||
- saas-linux-medium-amd64
|
||||
only:
|
||||
variables:
|
||||
- $CI_COMMIT_BRANCH == $MASTER_BRANCH_NAME
|
||||
- $CI_COMMIT_BRANCH == $DEVELOP_BRANCH_NAME
|
||||
- $BUILD_ALL_IN_ONE_PG_11 == "true"
|
||||
- $CI_COMMIT_MESSAGE =~ /\[build-all]/
|
||||
variables:
|
||||
IMAGE_NAME: $ALLINONE_PG_11_IMAGE_NAME
|
||||
DOCKERFILE_PATH: $ALLINONE_PG_11_DOCKERFILE_PATH
|
||||
BUILD_FROM_IMAGE: $TESTED_ALLINONE_CI_IMAGE
|
||||
|
||||
build-all-in-one-embedded-pg-15-auto-upgrade-image:
|
||||
extends: .build-final-baserow-image
|
||||
needs:
|
||||
- job: build-all-in-one-image
|
||||
tags:
|
||||
- saas-linux-medium-amd64
|
||||
only:
|
||||
variables:
|
||||
- $CI_COMMIT_BRANCH == $MASTER_BRANCH_NAME
|
||||
- $CI_COMMIT_BRANCH == $DEVELOP_BRANCH_NAME
|
||||
- $BUILD_ALL_IN_ONE_PGAUTOUPGRADE == "true"
|
||||
- $CI_COMMIT_MESSAGE =~ /\[build-all]/
|
||||
variables:
|
||||
IMAGE_NAME: $ALLINONE_PGAUTOUPGRADE_IMAGE_NAME
|
||||
DOCKERFILE_PATH: $ALLINONE_PGAUTOUPGRADE_DOCKERFILE_PATH
|
||||
BUILD_FROM_IMAGE: $TESTED_ALLINONE_CI_IMAGE
|
||||
|
||||
# If pipeline not triggered by tag:
|
||||
# - Build and store cloudron image in CI repo under the `ci-tested` tag so we know
|
||||
# those images have passed the tests.
|
||||
|
@ -799,102 +753,6 @@ publish-all-in-one-latest-release-image:
|
|||
TARGET_REGISTRY_PASSWORD: $RELEASE_REGISTRY_PASSWORD
|
||||
TARGET_REGISTRY_USER: $RELEASE_REGISTRY_USER
|
||||
|
||||
# Push baserow/baserow-pg11:develop-latest
|
||||
publish-all-in-one-embedded-pg-11-develop-latest-image:
|
||||
extends: .publish-baserow-image
|
||||
only:
|
||||
variables:
|
||||
- $CI_COMMIT_BRANCH == $DEVELOP_BRANCH_NAME
|
||||
dependencies: []
|
||||
variables:
|
||||
SKIP_IF_NOT_LATEST_COMMIT_ON_BRANCH: $DEVELOP_BRANCH_NAME
|
||||
SOURCE_IMAGE: $TESTED_ALLINONE_PG_11_CI_IMAGE
|
||||
TARGET_IMAGE: "$RELEASE_IMAGE_REPO/$ALLINONE_PG_11_IMAGE_NAME:$DEVELOP_LATEST_TAG"
|
||||
TARGET_REGISTRY: $RELEASE_REGISTRY
|
||||
TARGET_REGISTRY_PASSWORD: $RELEASE_REGISTRY_PASSWORD
|
||||
TARGET_REGISTRY_USER: $RELEASE_REGISTRY_USER
|
||||
|
||||
# Push baserow/baserow-pg11:$VERSION_GIT_TAG
|
||||
publish-all-in-one-embedded-pg-11-release-tagged-image:
|
||||
extends: .publish-baserow-image
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
dependencies: []
|
||||
variables:
|
||||
SKIP_IF_TAG_NOT_ON_BRANCH: $MASTER_BRANCH_NAME
|
||||
SOURCE_IMAGE: $TESTED_ALLINONE_PG_11_CI_IMAGE
|
||||
TARGET_IMAGE: "$RELEASE_IMAGE_REPO/$ALLINONE_PG_11_IMAGE_NAME:$CI_COMMIT_TAG"
|
||||
TARGET_REGISTRY: $RELEASE_REGISTRY
|
||||
TARGET_REGISTRY_PASSWORD: $RELEASE_REGISTRY_PASSWORD
|
||||
TARGET_REGISTRY_USER: $RELEASE_REGISTRY_USER
|
||||
|
||||
# Push baserow/baserow-pg11:latest
|
||||
publish-all-in-one-embedded-pg-11-latest-release-image:
|
||||
extends: .publish-baserow-image
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
variables:
|
||||
- $CI_COMMIT_TAG =~ /^[0-9.]+$/
|
||||
dependencies: []
|
||||
variables:
|
||||
SKIP_IF_NOT_LATEST_COMMIT_ON_BRANCH: $MASTER_BRANCH_NAME
|
||||
SKIP_IF_TAG_NOT_ON_BRANCH: $MASTER_BRANCH_NAME
|
||||
SOURCE_IMAGE: $TESTED_ALLINONE_PG_11_CI_IMAGE
|
||||
TARGET_IMAGE: "$RELEASE_IMAGE_REPO/$ALLINONE_PG_11_IMAGE_NAME:latest"
|
||||
TARGET_REGISTRY: $RELEASE_REGISTRY
|
||||
TARGET_REGISTRY_PASSWORD: $RELEASE_REGISTRY_PASSWORD
|
||||
TARGET_REGISTRY_USER: $RELEASE_REGISTRY_USER
|
||||
|
||||
# Push baserow/baserow-pgautoupgrade:develop-latest
|
||||
publish-all-in-one-embedded-pg-15-upgrade-develop-latest-image:
|
||||
extends: .publish-baserow-image
|
||||
only:
|
||||
variables:
|
||||
- $CI_COMMIT_BRANCH == $DEVELOP_BRANCH_NAME
|
||||
dependencies: []
|
||||
variables:
|
||||
SKIP_IF_NOT_LATEST_COMMIT_ON_BRANCH: $DEVELOP_BRANCH_NAME
|
||||
SOURCE_IMAGE: $TESTED_ALLINONE_PGAUTOUPGRADE_CI_IMAGE
|
||||
TARGET_IMAGE: "$RELEASE_IMAGE_REPO/$ALLINONE_PGAUTOUPGRADE_IMAGE_NAME:$DEVELOP_LATEST_TAG"
|
||||
TARGET_REGISTRY: $RELEASE_REGISTRY
|
||||
TARGET_REGISTRY_PASSWORD: $RELEASE_REGISTRY_PASSWORD
|
||||
TARGET_REGISTRY_USER: $RELEASE_REGISTRY_USER
|
||||
|
||||
# Push baserow/baserow-pgautoupgrade:$VERSION_GIT_TAG
|
||||
publish-all-in-one-embedded-pg-15-auto-upgrade-release-tagged-image:
|
||||
extends: .publish-baserow-image
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
dependencies: []
|
||||
variables:
|
||||
SKIP_IF_TAG_NOT_ON_BRANCH: $MASTER_BRANCH_NAME
|
||||
SOURCE_IMAGE: $TESTED_ALLINONE_PGAUTOUPGRADE_CI_IMAGE
|
||||
TARGET_IMAGE: "$RELEASE_IMAGE_REPO/$ALLINONE_PGAUTOUPGRADE_IMAGE_NAME:$CI_COMMIT_TAG"
|
||||
TARGET_REGISTRY: $RELEASE_REGISTRY
|
||||
TARGET_REGISTRY_PASSWORD: $RELEASE_REGISTRY_PASSWORD
|
||||
TARGET_REGISTRY_USER: $RELEASE_REGISTRY_USER
|
||||
|
||||
# Push baserow/baserow-pgautoupgrade:latest
|
||||
publish-all-in-one-embedded-pg-15-auto-upgrade-latest-release-image:
|
||||
extends: .publish-baserow-image
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
variables:
|
||||
- $CI_COMMIT_TAG =~ /^[0-9.]+$/
|
||||
dependencies: []
|
||||
variables:
|
||||
SKIP_IF_NOT_LATEST_COMMIT_ON_BRANCH: $MASTER_BRANCH_NAME
|
||||
SKIP_IF_TAG_NOT_ON_BRANCH: $MASTER_BRANCH_NAME
|
||||
SOURCE_IMAGE: $TESTED_ALLINONE_PGAUTOUPGRADE_CI_IMAGE
|
||||
TARGET_IMAGE: "$RELEASE_IMAGE_REPO/$ALLINONE_PGAUTOUPGRADE_IMAGE_NAME:latest"
|
||||
TARGET_REGISTRY: $RELEASE_REGISTRY
|
||||
TARGET_REGISTRY_PASSWORD: $RELEASE_REGISTRY_PASSWORD
|
||||
TARGET_REGISTRY_USER: $RELEASE_REGISTRY_USER
|
||||
|
||||
# Push baserow/cloudron:develop-latest
|
||||
publish-cloudron-develop-latest-image:
|
||||
extends: .publish-baserow-image
|
||||
|
|
|
@ -14,10 +14,10 @@ tool gives you the powers of a developer without leaving your browser.
|
|||
* Uses popular frameworks and tools like [Django](https://www.djangoproject.com/),
|
||||
[Vue.js](https://vuejs.org/) and [PostgreSQL](https://www.postgresql.org/).
|
||||
|
||||
[](https://heroku.com/deploy?template=https://github.com/bram2w/baserow/tree/master)
|
||||
[](https://www.heroku.com/deploy/?template=https://github.com/bram2w/baserow/tree/master)
|
||||
|
||||
```bash
|
||||
docker run -v baserow_data:/baserow/data -p 80:80 -p 443:443 baserow/baserow:1.30.1
|
||||
docker run -v baserow_data:/baserow/data -p 80:80 -p 443:443 baserow/baserow:1.31.1
|
||||
```
|
||||
|
||||

|
||||
|
@ -89,7 +89,7 @@ Created by Baserow B.V. - bram@baserow.io.
|
|||
|
||||
Distributes under the MIT license. See `LICENSE` for more information.
|
||||
|
||||
Version: 1.30.1
|
||||
Version: 1.31.1
|
||||
|
||||
The official repository can be found at https://gitlab.com/baserow/baserow.
|
||||
|
||||
|
|
4
app.json
4
app.json
|
@ -6,8 +6,8 @@
|
|||
"logo": "https://baserow.io/img/favicon_192.png",
|
||||
"success_url": "/",
|
||||
"addons": [
|
||||
"heroku-postgresql:mini",
|
||||
"heroku-redis:mini",
|
||||
"heroku-postgresql:essential-0",
|
||||
"heroku-redis:premium-0",
|
||||
"mailgun:starter"
|
||||
],
|
||||
"formation": {
|
||||
|
|
|
@ -7,6 +7,7 @@ per-file-ignores =
|
|||
../enterprise/backend/tests/*: F841
|
||||
src/baserow/contrib/database/migrations/*: X1
|
||||
src/baserow/core/migrations/*: X1
|
||||
src/baserow/core/psycopg.py: BRP001
|
||||
exclude =
|
||||
.git,
|
||||
__pycache__,
|
||||
|
@ -16,4 +17,5 @@ exclude =
|
|||
[flake8:local-plugins]
|
||||
extension =
|
||||
X1 = flake8_baserow:DocstringPlugin
|
||||
BRP001 = flake8_baserow:BaserowPsycopgChecker
|
||||
paths = ./flake8_plugins
|
||||
|
|
|
@ -6,7 +6,7 @@ set -euo pipefail
|
|||
# ENVIRONMENT VARIABLES USED DIRECTLY BY THIS ENTRYPOINT
|
||||
# ======================================================
|
||||
|
||||
export BASEROW_VERSION="1.30.1"
|
||||
export BASEROW_VERSION="1.31.1"
|
||||
|
||||
# Used by docker-entrypoint.sh to start the dev server
|
||||
# If not configured you'll receive this: CommandError: "0.0.0.0:" is not a valid port number or address:port pair.
|
||||
|
@ -55,7 +55,10 @@ DATABASE_PASSWORD=$DATABASE_PASSWORD \
|
|||
DATABASE_OPTIONS=$DATABASE_OPTIONS \
|
||||
python3 << END
|
||||
import sys
|
||||
import psycopg
|
||||
try:
|
||||
import psycopg
|
||||
except ImportError:
|
||||
import psycopg2 as psycopg
|
||||
import json
|
||||
import os
|
||||
DATABASE_NAME=os.getenv('DATABASE_NAME')
|
||||
|
@ -99,7 +102,10 @@ else
|
|||
DATABASE_URL=$DATABASE_URL \
|
||||
python3 << END
|
||||
import sys
|
||||
import psycopg
|
||||
try:
|
||||
import psycopg
|
||||
except ImportError:
|
||||
import psycopg2 as psycopg
|
||||
import os
|
||||
DATABASE_URL=os.getenv('DATABASE_URL')
|
||||
try:
|
||||
|
|
|
@ -1 +1 @@
|
|||
from .flake8_baserow import DocstringPlugin
|
||||
from .flake8_baserow import DocstringPlugin, BaserowPsycopgChecker
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from .docstring import Plugin as DocstringPlugin
|
||||
from .psycopg import BaserowPsycopgChecker
|
||||
|
||||
__all__ = ["DocstringPlugin"]
|
||||
__all__ = ["DocstringPlugin", "BaserowPsycopgChecker"]
|
||||
|
|
30
backend/flake8_plugins/flake8_baserow/psycopg.py
Normal file
30
backend/flake8_plugins/flake8_baserow/psycopg.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
import ast
|
||||
from typing import Iterator, Tuple, Any
|
||||
|
||||
class BaserowPsycopgChecker:
|
||||
name = 'flake8-baserow-psycopg'
|
||||
version = '0.1.0'
|
||||
|
||||
def __init__(self, tree: ast.AST, filename: str):
|
||||
self.tree = tree
|
||||
self.filename = filename
|
||||
|
||||
def run(self) -> Iterator[Tuple[int, int, str, Any]]:
|
||||
for node in ast.walk(self.tree):
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
if alias.name in ('psycopg', 'psycopg2'):
|
||||
yield (
|
||||
node.lineno,
|
||||
node.col_offset,
|
||||
'BRP001 Import psycopg/psycopg2 from baserow.core.psycopg instead',
|
||||
type(self)
|
||||
)
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
if node.module in ('psycopg', 'psycopg2'):
|
||||
yield (
|
||||
node.lineno,
|
||||
node.col_offset,
|
||||
'BRP001 Import psycopg/psycopg2 from baserow.core.psycopg instead',
|
||||
type(self)
|
||||
)
|
38
backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py
Normal file
38
backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
import ast
|
||||
from flake8_baserow.psycopg import BaserowPsycopgChecker
|
||||
|
||||
|
||||
def run_checker(code: str):
|
||||
tree = ast.parse(code)
|
||||
checker = BaserowPsycopgChecker(tree, 'test.py')
|
||||
return list(checker.run())
|
||||
|
||||
def test_direct_import():
|
||||
code = '''
|
||||
import psycopg
|
||||
import psycopg2
|
||||
from psycopg import connect
|
||||
from psycopg2 import connect as pg_connect
|
||||
'''
|
||||
errors = run_checker(code)
|
||||
assert len(errors) == 4
|
||||
assert all(error[2].startswith('BRP001') for error in errors)
|
||||
|
||||
def test_allowed_import():
|
||||
code = '''
|
||||
from baserow.core.psycopg import connect
|
||||
from baserow.core.psycopg import psycopg2
|
||||
'''
|
||||
errors = run_checker(code)
|
||||
assert len(errors) == 0
|
||||
|
||||
def test_mixed_imports():
|
||||
code = '''
|
||||
import psycopg
|
||||
from baserow.core.psycopg import connect
|
||||
from psycopg2 import connect as pg_connect
|
||||
'''
|
||||
errors = run_checker(code)
|
||||
assert len(errors) == 2
|
||||
assert errors[0][2].startswith('BRP001')
|
||||
assert errors[1][2].startswith('BRP001')
|
|
@ -2,8 +2,7 @@ django==5.0.9
|
|||
django-cors-headers==4.3.1
|
||||
djangorestframework==3.15.1
|
||||
djangorestframework-simplejwt==5.3.1
|
||||
psycopg==3.2.3
|
||||
psycopg-binary==3.2.3
|
||||
psycopg2==2.9.10
|
||||
Faker==25.0.1
|
||||
Twisted==24.3.0
|
||||
gunicorn==22.0.0
|
||||
|
@ -52,6 +51,7 @@ opentelemetry-instrumentation-dbapi==0.50b0
|
|||
opentelemetry-instrumentation-grpc==0.50b0
|
||||
opentelemetry-instrumentation-logging==0.50b0
|
||||
opentelemetry-instrumentation-redis==0.50b0
|
||||
opentelemetry-instrumentation-psycopg2==0.50b0
|
||||
opentelemetry-instrumentation-psycopg==0.50b0
|
||||
opentelemetry-instrumentation-requests==0.50b0
|
||||
opentelemetry-instrumentation-wsgi==0.50b0
|
||||
|
|
|
@ -360,6 +360,7 @@ opentelemetry-api==1.29.0
|
|||
# opentelemetry-instrumentation-grpc
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
# opentelemetry-instrumentation-redis
|
||||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
|
@ -382,6 +383,7 @@ opentelemetry-instrumentation==0.50b0
|
|||
# opentelemetry-instrumentation-grpc
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
# opentelemetry-instrumentation-redis
|
||||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
|
@ -397,6 +399,7 @@ opentelemetry-instrumentation-dbapi==0.50b0
|
|||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
opentelemetry-instrumentation-django==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-grpc==0.50b0
|
||||
|
@ -405,6 +408,8 @@ opentelemetry-instrumentation-logging==0.50b0
|
|||
# via -r base.in
|
||||
opentelemetry-instrumentation-psycopg==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-psycopg2==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-redis==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-requests==0.50b0
|
||||
|
@ -480,9 +485,7 @@ protobuf==5.29.2
|
|||
# proto-plus
|
||||
psutil==5.9.8
|
||||
# via -r base.in
|
||||
psycopg==3.2.3
|
||||
# via -r base.in
|
||||
psycopg-binary==3.2.3
|
||||
psycopg2==2.9.10
|
||||
# via -r base.in
|
||||
pyasn1==0.6.1
|
||||
# via
|
||||
|
|
|
@ -40,6 +40,7 @@ mypy-extensions==1.0.0
|
|||
isort==5.13.2
|
||||
ipython
|
||||
fakeredis[lua]==2.23.2
|
||||
pytest-retry==1.7.0
|
||||
ipdb
|
||||
# build is used to compile a wheel package with `python -m build .` command.
|
||||
build
|
||||
|
|
|
@ -224,6 +224,7 @@ pytest==8.2.0
|
|||
# pytest-metadata
|
||||
# pytest-mock
|
||||
# pytest-ordering
|
||||
# pytest-retry
|
||||
# pytest-split
|
||||
# pytest-unordered
|
||||
# pytest-xdist
|
||||
|
@ -245,6 +246,8 @@ pytest-mock==3.14.0
|
|||
# via -r dev.in
|
||||
pytest-ordering==0.6
|
||||
# via -r dev.in
|
||||
pytest-retry==1.7.0
|
||||
# via -r dev.in
|
||||
pytest-split==0.8.2
|
||||
# via -r dev.in
|
||||
pytest-unordered==0.6.0
|
||||
|
|
|
@ -85,28 +85,30 @@ class AllApplicationsView(APIView):
|
|||
returned.
|
||||
"""
|
||||
|
||||
workspaces = Workspace.objects.filter(users=request.user)
|
||||
workspaces = Workspace.objects.filter(users=request.user).prefetch_related(
|
||||
"workspaceuser_set", "template_set"
|
||||
)
|
||||
|
||||
# Compute list of readable application ids
|
||||
applications_ids = []
|
||||
all_applications_qs = Application.objects.none()
|
||||
for workspace in workspaces:
|
||||
applications = Application.objects.filter(
|
||||
workspace=workspace, workspace__trashed=False
|
||||
)
|
||||
applications = CoreHandler().filter_queryset(
|
||||
).select_related("content_type")
|
||||
applications_qs = CoreHandler().filter_queryset(
|
||||
request.user,
|
||||
ListApplicationsWorkspaceOperationType.type,
|
||||
applications,
|
||||
workspace=workspace,
|
||||
)
|
||||
applications_ids += applications.values_list("id", flat=True)
|
||||
all_applications_qs = all_applications_qs.union(applications_qs)
|
||||
|
||||
# Then filter with these ids
|
||||
applications = specific_iterator(
|
||||
Application.objects.select_related("content_type", "workspace")
|
||||
.prefetch_related("workspace__template_set")
|
||||
.filter(id__in=applications_ids)
|
||||
.order_by("workspace_id", "order"),
|
||||
.filter(id__in=all_applications_qs.values("id"))
|
||||
.order_by("workspace_id", "order", "id"),
|
||||
per_content_type_queryset_hook=(
|
||||
lambda model, queryset: application_type_registry.get_by_model(
|
||||
model
|
||||
|
|
|
@ -7,6 +7,7 @@ from rest_framework_simplejwt.exceptions import InvalidToken
|
|||
from rest_framework_simplejwt.settings import api_settings as jwt_settings
|
||||
|
||||
from baserow.api.user.errors import ERROR_INVALID_ACCESS_TOKEN
|
||||
from baserow.core.sentry import setup_user_in_sentry
|
||||
from baserow.core.telemetry.utils import setup_user_in_baggage_and_spans
|
||||
from baserow.core.user.exceptions import DeactivatedUserException
|
||||
|
||||
|
@ -65,6 +66,7 @@ class JSONWebTokenAuthentication(JWTAuthentication):
|
|||
|
||||
set_user_session_data_from_request(user, request)
|
||||
setup_user_in_baggage_and_spans(user, request)
|
||||
setup_user_in_sentry(user)
|
||||
|
||||
return user, token
|
||||
|
||||
|
|
|
@ -5,3 +5,15 @@ ERROR_SERVICE_INVALID_TYPE = (
|
|||
HTTP_400_BAD_REQUEST,
|
||||
"The service type does not exist.",
|
||||
)
|
||||
|
||||
ERROR_SERVICE_FILTER_PROPERTY_DOES_NOT_EXIST = (
|
||||
"ERROR_SERVICE_FILTER_PROPERTY_DOES_NOT_EXIST",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"A data source filter is misconfigured: {e}",
|
||||
)
|
||||
|
||||
ERROR_SERVICE_SORT_PROPERTY_DOES_NOT_EXIST = (
|
||||
"ERROR_SERVICE_SORT_PROPERTY_DOES_NOT_EXIST",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"A data source sort is misconfigured: {e}",
|
||||
)
|
||||
|
|
|
@ -6,49 +6,60 @@ from django.conf import settings
|
|||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.transaction import get_connection
|
||||
|
||||
from cachalot import utils as cachalot_utils
|
||||
from cachalot.settings import cachalot_settings
|
||||
from django_redis import get_redis_connection
|
||||
from loguru import logger
|
||||
from psycopg.sql import Composed
|
||||
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
@contextmanager
|
||||
def cachalot_enabled():
|
||||
"""
|
||||
A context manager that enables cachalot for the duration of the context. This is
|
||||
useful when you want to enable cachalot for a specific query but you don't want
|
||||
to enable it globally.
|
||||
Please note that the query have to be executed within the context of the context
|
||||
manager in order for it to be cached.
|
||||
"""
|
||||
if settings.CACHALOT_ENABLED:
|
||||
from cachalot.settings import cachalot_disabled, cachalot_settings # noqa: F401
|
||||
|
||||
from cachalot.api import LOCAL_STORAGE
|
||||
@contextmanager
|
||||
def cachalot_enabled():
|
||||
"""
|
||||
A context manager that enables cachalot for the duration of the context. This is
|
||||
useful when you want to enable cachalot for a specific query but you don't want
|
||||
to enable it globally. Please note that the query have to be executed within the
|
||||
context of the context manager in order for it to be cached.
|
||||
"""
|
||||
|
||||
was_enabled = getattr(
|
||||
LOCAL_STORAGE, "cachalot_enabled", cachalot_settings.CACHALOT_ENABLED
|
||||
)
|
||||
LOCAL_STORAGE.cachalot_enabled = True
|
||||
try:
|
||||
from cachalot.api import LOCAL_STORAGE
|
||||
|
||||
was_enabled = getattr(
|
||||
LOCAL_STORAGE, "cachalot_enabled", cachalot_settings.CACHALOT_ENABLED
|
||||
)
|
||||
LOCAL_STORAGE.cachalot_enabled = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
LOCAL_STORAGE.cachalot_enabled = was_enabled
|
||||
|
||||
else:
|
||||
|
||||
@contextmanager
|
||||
def cachalot_enabled():
|
||||
yield
|
||||
|
||||
@contextmanager
|
||||
def cachalot_disabled():
|
||||
yield
|
||||
finally:
|
||||
LOCAL_STORAGE.cachalot_enabled = was_enabled
|
||||
|
||||
|
||||
def patch_cachalot_for_baserow():
|
||||
"""
|
||||
This function patches the cachalot library to make it work with baserow
|
||||
dynamic models. The problem we're trying to solve here is that the only way
|
||||
to limit what cachalot caches is to provide a fix list of tables, but
|
||||
baserow creates dynamic models on the fly so we can't know what tables will
|
||||
be created in advance, so we need to include all the tables that start with
|
||||
the USER_TABLE_DATABASE_NAME_PREFIX prefix in the list of cachable tables.
|
||||
This function patches the cachalot library to make it work with baserow dynamic
|
||||
models. The problem we're trying to solve here is that the only way to limit what
|
||||
cachalot caches is to provide a fix list of tables, but baserow creates dynamic
|
||||
models on the fly so we can't know what tables will be created in advance, so we
|
||||
need to include all the tables that start with the USER_TABLE_DATABASE_NAME_PREFIX
|
||||
prefix in the list of cachable tables.
|
||||
|
||||
`filter_cachable` and `is_cachable` are called to invalidate the cache when
|
||||
a table is changed. `are_all_cachable` is called to check if a query can be
|
||||
cached.
|
||||
`filter_cachable` and `is_cachable` are called to invalidate the cache when a table
|
||||
is changed. `are_all_cachable` is called to check if a query can be cached.
|
||||
"""
|
||||
|
||||
from cachalot import utils as cachalot_utils
|
||||
|
||||
from baserow.contrib.database.table.constants import (
|
||||
LINK_ROW_THROUGH_TABLE_PREFIX,
|
||||
MULTIPLE_COLLABORATOR_THROUGH_TABLE_PREFIX,
|
||||
|
@ -97,13 +108,12 @@ def patch_cachalot_for_baserow():
|
|||
@wraps(original_are_all_cachable)
|
||||
def patched_are_all_cachable(tables):
|
||||
"""
|
||||
This patch works because cachalot does not explicitly set this thread
|
||||
local variable, but it assumes to be True by default if CACHALOT_ENABLED
|
||||
is not set otherwise. Since we are explicitly setting it to True in our
|
||||
code for the query we want to cache, we can check if the value has been
|
||||
set or not to exclude our dynamic tables from the list of tables that
|
||||
cachalot will check, making all of them cachable for the queries
|
||||
wrapped in the `cachalot_enabled` context manager.
|
||||
This patch works because cachalot does not explicitly set this thread local
|
||||
variable, but it assumes to be True by default if CACHALOT_ENABLED is not set
|
||||
otherwise. Since we are explicitly setting it to True in our code for the query
|
||||
we want to cache, we can check if the value has been set or not to exclude our
|
||||
dynamic tables from the list of tables that cachalot will check, making all of
|
||||
them cachable for the queries wrapped in the `cachalot_enabled` context manager.
|
||||
"""
|
||||
|
||||
from cachalot.api import LOCAL_STORAGE
|
||||
|
@ -146,14 +156,14 @@ def patch_cachalot_for_baserow():
|
|||
cursor = get_connection().cursor()
|
||||
return self.as_string(cursor.cursor).lower()
|
||||
|
||||
Composed.lower = lower
|
||||
sql.Composed.lower = lower
|
||||
|
||||
|
||||
def clear_cachalot_cache():
|
||||
"""
|
||||
This function clears the cachalot cache. It can be used in the tests to make
|
||||
sure that the cache is cleared between tests or as post_migrate receiver to
|
||||
ensure to start with a clean cache after migrations.
|
||||
This function clears the cachalot cache. It can be used in the tests to make sure
|
||||
that the cache is cleared between tests or as post_migrate receiver to ensure to
|
||||
start with a clean cache after migrations.
|
||||
"""
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -179,9 +189,8 @@ def clear_cachalot_cache():
|
|||
|
||||
def _delete_pattern(key_prefix: str) -> int:
|
||||
"""
|
||||
Allows deleting every redis key that matches a pattern. Copied from the
|
||||
django-redis implementation but modified to allow deleting all versions in the
|
||||
cache at once.
|
||||
Allows deleting every redis key that matches a pattern. Copied from the django-redis
|
||||
implementation but modified to allow deleting all versions in the cache at once.
|
||||
"""
|
||||
|
||||
client = get_redis_connection("default")
|
||||
|
|
|
@ -15,8 +15,8 @@ import posthog
|
|||
import sentry_sdk
|
||||
from corsheaders.defaults import default_headers
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
from sentry_sdk.scrubber import DEFAULT_DENYLIST, EventScrubber
|
||||
|
||||
from baserow.cachalot_patch import patch_cachalot_for_baserow
|
||||
from baserow.config.settings.utils import (
|
||||
Setting,
|
||||
get_crontab_from_env,
|
||||
|
@ -109,6 +109,7 @@ MIDDLEWARE = [
|
|||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"baserow.core.cache.LocalCacheMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"baserow.api.user_sources.middleware.AddUserSourceUserMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
|
@ -258,93 +259,16 @@ CACHES = {
|
|||
},
|
||||
}
|
||||
|
||||
|
||||
CACHALOT_TIMEOUT = int(os.getenv("BASEROW_CACHALOT_TIMEOUT", 60 * 60 * 24 * 7))
|
||||
BASEROW_CACHALOT_ONLY_CACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_ONLY_CACHABLE_TABLES", None
|
||||
)
|
||||
BASEROW_CACHALOT_MODE = os.getenv("BASEROW_CACHALOT_MODE", "default")
|
||||
if BASEROW_CACHALOT_MODE == "full":
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = []
|
||||
|
||||
elif BASEROW_CACHALOT_ONLY_CACHABLE_TABLES:
|
||||
# Please avoid to add tables with more than 50 modifications per minute
|
||||
# to this list, as described here:
|
||||
# https://django-cachalot.readthedocs.io/en/latest/limits.html
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = BASEROW_CACHALOT_ONLY_CACHABLE_TABLES.split(",")
|
||||
else:
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = [
|
||||
"auth_user",
|
||||
"django_content_type",
|
||||
"core_settings",
|
||||
"core_userprofile",
|
||||
"core_application",
|
||||
"core_operation",
|
||||
"core_template",
|
||||
"core_trashentry",
|
||||
"core_workspace",
|
||||
"core_workspaceuser",
|
||||
"core_workspaceuserinvitation",
|
||||
"core_authprovidermodel",
|
||||
"core_passwordauthprovidermodel",
|
||||
"database_database",
|
||||
"database_table",
|
||||
"database_field",
|
||||
"database_fieldependency",
|
||||
"database_linkrowfield",
|
||||
"database_selectoption",
|
||||
"baserow_premium_license",
|
||||
"baserow_premium_licenseuser",
|
||||
"baserow_enterprise_role",
|
||||
"baserow_enterprise_roleassignment",
|
||||
"baserow_enterprise_team",
|
||||
"baserow_enterprise_teamsubject",
|
||||
]
|
||||
|
||||
# This list will have priority over CACHALOT_ONLY_CACHABLE_TABLES.
|
||||
BASEROW_CACHALOT_UNCACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_UNCACHABLE_TABLES", None
|
||||
)
|
||||
|
||||
if BASEROW_CACHALOT_UNCACHABLE_TABLES:
|
||||
CACHALOT_UNCACHABLE_TABLES = list(
|
||||
filter(bool, BASEROW_CACHALOT_UNCACHABLE_TABLES.split(","))
|
||||
)
|
||||
|
||||
CACHALOT_ENABLED = os.getenv("BASEROW_CACHALOT_ENABLED", "false") == "true"
|
||||
CACHALOT_CACHE = "cachalot"
|
||||
CACHALOT_UNCACHABLE_TABLES = [
|
||||
"django_migrations",
|
||||
"core_action",
|
||||
"database_token",
|
||||
"baserow_enterprise_auditlogentry",
|
||||
]
|
||||
|
||||
BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS = int(
|
||||
# Default TTL is 10 minutes: 60 seconds * 10
|
||||
os.getenv("BASEROW_BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS")
|
||||
or 600
|
||||
)
|
||||
|
||||
|
||||
def install_cachalot():
|
||||
global INSTALLED_APPS
|
||||
|
||||
INSTALLED_APPS.append("cachalot")
|
||||
|
||||
patch_cachalot_for_baserow()
|
||||
|
||||
|
||||
if CACHALOT_ENABLED:
|
||||
install_cachalot()
|
||||
|
||||
CACHES[CACHALOT_CACHE] = {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": REDIS_URL,
|
||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": VERSION,
|
||||
}
|
||||
BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS = int(
|
||||
# Default TTL is 5 minutes
|
||||
os.getenv("BASEROW_BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS")
|
||||
or 300
|
||||
)
|
||||
|
||||
|
||||
CELERY_SINGLETON_BACKEND_CLASS = (
|
||||
|
@ -499,7 +423,7 @@ SPECTACULAR_SETTINGS = {
|
|||
"name": "MIT",
|
||||
"url": "https://gitlab.com/baserow/baserow/-/blob/master/LICENSE",
|
||||
},
|
||||
"VERSION": "1.30.1",
|
||||
"VERSION": "1.31.1",
|
||||
"SERVE_INCLUDE_SCHEMA": False,
|
||||
"TAGS": [
|
||||
{"name": "Settings"},
|
||||
|
@ -521,6 +445,7 @@ SPECTACULAR_SETTINGS = {
|
|||
{"name": "Database table view sortings"},
|
||||
{"name": "Database table view decorations"},
|
||||
{"name": "Database table view groupings"},
|
||||
{"name": "Database table view export"},
|
||||
{"name": "Database table grid view"},
|
||||
{"name": "Database table gallery view"},
|
||||
{"name": "Database table form view"},
|
||||
|
@ -1278,12 +1203,14 @@ for plugin in [*BASEROW_BUILT_IN_PLUGINS, *BASEROW_BACKEND_PLUGIN_NAMES]:
|
|||
|
||||
SENTRY_BACKEND_DSN = os.getenv("SENTRY_BACKEND_DSN")
|
||||
SENTRY_DSN = SENTRY_BACKEND_DSN or os.getenv("SENTRY_DSN")
|
||||
SENTRY_DENYLIST = DEFAULT_DENYLIST + ["username", "email", "name"]
|
||||
|
||||
if SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
integrations=[DjangoIntegration(signals_spans=False, middleware_spans=False)],
|
||||
send_default_pii=False,
|
||||
event_scrubber=EventScrubber(recursive=True, denylist=SENTRY_DENYLIST),
|
||||
environment=os.getenv("SENTRY_ENVIRONMENT", ""),
|
||||
)
|
||||
|
||||
|
@ -1341,3 +1268,90 @@ BASEROW_DEFAULT_ZIP_COMPRESS_LEVEL = 5
|
|||
BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE = int(
|
||||
os.getenv("BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE", "") or 10
|
||||
)
|
||||
|
||||
BASEROW_USE_LOCAL_CACHE = str_to_bool(os.getenv("BASEROW_USE_LOCAL_CACHE", "true"))
|
||||
|
||||
# -- CACHALOT SETTINGS --
|
||||
CACHALOT_TIMEOUT = int(os.getenv("BASEROW_CACHALOT_TIMEOUT", 60 * 60 * 24 * 7))
|
||||
BASEROW_CACHALOT_ONLY_CACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_ONLY_CACHABLE_TABLES", None
|
||||
)
|
||||
BASEROW_CACHALOT_MODE = os.getenv("BASEROW_CACHALOT_MODE", "default")
|
||||
if BASEROW_CACHALOT_MODE == "full":
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = []
|
||||
|
||||
elif BASEROW_CACHALOT_ONLY_CACHABLE_TABLES:
|
||||
# Please avoid to add tables with more than 50 modifications per minute to this
|
||||
# list, as described here:
|
||||
# https://django-cachalot.readthedocs.io/en/latest/limits.html
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = BASEROW_CACHALOT_ONLY_CACHABLE_TABLES.split(",")
|
||||
else:
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = [
|
||||
"auth_user",
|
||||
"django_content_type",
|
||||
"core_settings",
|
||||
"core_userprofile",
|
||||
"core_application",
|
||||
"core_operation",
|
||||
"core_template",
|
||||
"core_trashentry",
|
||||
"core_workspace",
|
||||
"core_workspaceuser",
|
||||
"core_workspaceuserinvitation",
|
||||
"core_authprovidermodel",
|
||||
"core_passwordauthprovidermodel",
|
||||
"database_database",
|
||||
"database_table",
|
||||
"database_field",
|
||||
"database_fieldependency",
|
||||
"database_linkrowfield",
|
||||
"database_selectoption",
|
||||
"baserow_premium_license",
|
||||
"baserow_premium_licenseuser",
|
||||
"baserow_enterprise_role",
|
||||
"baserow_enterprise_roleassignment",
|
||||
"baserow_enterprise_team",
|
||||
"baserow_enterprise_teamsubject",
|
||||
]
|
||||
|
||||
# This list will have priority over CACHALOT_ONLY_CACHABLE_TABLES.
|
||||
BASEROW_CACHALOT_UNCACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_UNCACHABLE_TABLES", None
|
||||
)
|
||||
|
||||
if BASEROW_CACHALOT_UNCACHABLE_TABLES:
|
||||
CACHALOT_UNCACHABLE_TABLES = list(
|
||||
filter(bool, BASEROW_CACHALOT_UNCACHABLE_TABLES.split(","))
|
||||
)
|
||||
|
||||
CACHALOT_ENABLED = str_to_bool(os.getenv("BASEROW_CACHALOT_ENABLED", ""))
|
||||
CACHALOT_CACHE = "cachalot"
|
||||
CACHALOT_UNCACHABLE_TABLES = [
|
||||
"django_migrations",
|
||||
"core_action",
|
||||
"database_token",
|
||||
"baserow_enterprise_auditlogentry",
|
||||
]
|
||||
|
||||
|
||||
def install_cachalot():
|
||||
from baserow.cachalot_patch import patch_cachalot_for_baserow
|
||||
|
||||
global INSTALLED_APPS
|
||||
|
||||
INSTALLED_APPS.append("cachalot")
|
||||
|
||||
patch_cachalot_for_baserow()
|
||||
|
||||
|
||||
if CACHALOT_ENABLED:
|
||||
install_cachalot()
|
||||
|
||||
CACHES[CACHALOT_CACHE] = {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": REDIS_URL,
|
||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": VERSION,
|
||||
}
|
||||
# -- END CACHALOT SETTINGS --
|
||||
|
|
|
@ -1,7 +1,28 @@
|
|||
# flake8: noqa: F405
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from unittest.mock import patch
|
||||
|
||||
from .base import * # noqa: F403, F401
|
||||
from dotenv import dotenv_values
|
||||
|
||||
# Create a .env.testing file in the backend directory to store different test settings and
|
||||
# override the default ones. For different test settings, provide the TEST_ENV_FILE
|
||||
# environment variable with the name of the file to use. Everything that starts with
|
||||
# .env.testing will be ignored by git.
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
TEST_ENV_FILE = os.getenv("TEST_ENV_FILE", ".env.testing")
|
||||
TEST_ENV_VARS = dotenv_values(os.path.join(BASE_DIR, f"../../../{TEST_ENV_FILE}"))
|
||||
|
||||
|
||||
def getenv_for_tests(key: str, default: str = "") -> str:
|
||||
return TEST_ENV_VARS.get(key, default)
|
||||
|
||||
|
||||
with patch("os.getenv", getenv_for_tests) as load_dotenv:
|
||||
# Avoid loading .env settings to prevent conflicts with the test settings,
|
||||
# but allow custom settings to be loaded from the .env.test file in the
|
||||
# backend root directory.
|
||||
from .base import * # noqa: F403, F401
|
||||
|
||||
TESTS = True
|
||||
|
||||
|
@ -43,11 +64,6 @@ CACHES = {
|
|||
"KEY_PREFIX": f"baserow-{GENERATED_MODEL_CACHE_NAME}-cache",
|
||||
"VERSION": None,
|
||||
},
|
||||
CACHALOT_CACHE: {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": None,
|
||||
},
|
||||
}
|
||||
|
||||
# Disable the default throttle classes because ConcurrentUserRequestsThrottle is
|
||||
|
@ -55,12 +71,9 @@ CACHES = {
|
|||
# Look into tests.baserow.api.test_api_utils.py if you need to test the throttle
|
||||
REST_FRAMEWORK["DEFAULT_THROTTLE_CLASSES"] = []
|
||||
|
||||
if "cachalot" not in INSTALLED_APPS:
|
||||
install_cachalot()
|
||||
|
||||
CACHALOT_ENABLED = False
|
||||
|
||||
BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS = 10
|
||||
BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS = 300
|
||||
|
||||
AUTO_INDEX_VIEW_ENABLED = False
|
||||
# For ease of testing tests assume this setting is set to this. Set it explicitly to
|
||||
|
@ -72,20 +85,6 @@ BASEROW_PERSONAL_VIEW_LOWEST_ROLE_ALLOWED = "VIEWER"
|
|||
if "baserow.middleware.ConcurrentUserRequestsMiddleware" in MIDDLEWARE:
|
||||
MIDDLEWARE.remove("baserow.middleware.ConcurrentUserRequestsMiddleware")
|
||||
|
||||
|
||||
BASEROW_OPENAI_API_KEY = None
|
||||
BASEROW_OPENAI_ORGANIZATION = None
|
||||
BASEROW_OPENAI_MODELS = []
|
||||
BASEROW_OPENROUTER_API_KEY = None
|
||||
BASEROW_OPENROUTER_ORGANIZATION = None
|
||||
BASEROW_OPENROUTER_MODELS = []
|
||||
BASEROW_ANTHROPIC_API_KEY = None
|
||||
BASEROW_ANTHROPIC_MODELS = []
|
||||
BASEROW_MISTRAL_API_KEY = None
|
||||
BASEROW_MISTRAL_MODELS = []
|
||||
BASEROW_OLLAMA_HOST = None
|
||||
BASEROW_OLLAMA_MODELS = []
|
||||
|
||||
PUBLIC_BACKEND_URL = "http://localhost:8000"
|
||||
PUBLIC_WEB_FRONTEND_URL = "http://localhost:3000"
|
||||
BASEROW_EMBEDDED_SHARE_URL = "http://localhost:3000"
|
||||
|
@ -102,3 +101,14 @@ STORAGES["default"] = {"BACKEND": BASE_FILE_STORAGE}
|
|||
BASEROW_LOGIN_ACTION_LOG_LIMIT = RateLimit.from_string("1000/s")
|
||||
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS = False
|
||||
|
||||
|
||||
CACHALOT_ENABLED = str_to_bool(os.getenv("CACHALOT_ENABLED", "false"))
|
||||
if CACHALOT_ENABLED:
|
||||
CACHES[CACHALOT_CACHE] = {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": None,
|
||||
}
|
||||
|
||||
install_cachalot()
|
||||
|
|
|
@ -17,6 +17,10 @@ from baserow.api.decorators import (
|
|||
)
|
||||
from baserow.api.errors import ERROR_PERMISSION_DENIED
|
||||
from baserow.api.schemas import CLIENT_SESSION_ID_SCHEMA_PARAMETER, get_error_schema
|
||||
from baserow.api.services.errors import (
|
||||
ERROR_SERVICE_FILTER_PROPERTY_DOES_NOT_EXIST,
|
||||
ERROR_SERVICE_SORT_PROPERTY_DOES_NOT_EXIST,
|
||||
)
|
||||
from baserow.api.utils import (
|
||||
CustomFieldRegistryMappingSerializer,
|
||||
DiscriminatorCustomFieldsMappingSerializer,
|
||||
|
@ -64,7 +68,9 @@ from baserow.core.exceptions import PermissionException
|
|||
from baserow.core.services.exceptions import (
|
||||
DoesNotExist,
|
||||
InvalidServiceTypeDispatchSource,
|
||||
ServiceFilterPropertyDoesNotExist,
|
||||
ServiceImproperlyConfigured,
|
||||
ServiceSortPropertyDoesNotExist,
|
||||
)
|
||||
from baserow.core.services.registries import service_type_registry
|
||||
|
||||
|
@ -180,7 +186,11 @@ class DataSourcesView(APIView):
|
|||
|
||||
page = PageHandler().get_page(page_id)
|
||||
|
||||
before = DataSourceHandler().get_data_source(before_id) if before_id else None
|
||||
before = (
|
||||
DataSourceHandler().get_data_source(before_id, specific=False)
|
||||
if before_id
|
||||
else None
|
||||
)
|
||||
|
||||
service_type = service_type_registry.get(type_name) if type_name else None
|
||||
|
||||
|
@ -417,7 +427,7 @@ class MoveDataSourceView(APIView):
|
|||
|
||||
before = None
|
||||
if before_id:
|
||||
before = DataSourceHandler().get_data_source(before_id)
|
||||
before = DataSourceHandler().get_data_source(before_id, specific=False)
|
||||
|
||||
moved_data_source = DataSourceService().move_data_source(
|
||||
request.user, data_source, before
|
||||
|
@ -471,6 +481,8 @@ class DispatchDataSourceView(APIView):
|
|||
DataSourceImproperlyConfigured: ERROR_DATA_SOURCE_IMPROPERLY_CONFIGURED,
|
||||
DataSourceRefinementForbidden: ERROR_DATA_SOURCE_REFINEMENT_FORBIDDEN,
|
||||
ServiceImproperlyConfigured: ERROR_DATA_SOURCE_IMPROPERLY_CONFIGURED,
|
||||
ServiceSortPropertyDoesNotExist: ERROR_SERVICE_SORT_PROPERTY_DOES_NOT_EXIST,
|
||||
ServiceFilterPropertyDoesNotExist: ERROR_SERVICE_FILTER_PROPERTY_DOES_NOT_EXIST,
|
||||
DoesNotExist: ERROR_DATA_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
|
|
|
@ -10,6 +10,10 @@ from baserow.api.applications.errors import ERROR_APPLICATION_DOES_NOT_EXIST
|
|||
from baserow.api.decorators import map_exceptions
|
||||
from baserow.api.errors import ERROR_PERMISSION_DENIED
|
||||
from baserow.api.schemas import CLIENT_SESSION_ID_SCHEMA_PARAMETER, get_error_schema
|
||||
from baserow.api.services.errors import (
|
||||
ERROR_SERVICE_FILTER_PROPERTY_DOES_NOT_EXIST,
|
||||
ERROR_SERVICE_SORT_PROPERTY_DOES_NOT_EXIST,
|
||||
)
|
||||
from baserow.api.utils import (
|
||||
DiscriminatorCustomFieldsMappingSerializer,
|
||||
apply_exception_mapping,
|
||||
|
@ -54,7 +58,12 @@ from baserow.contrib.builder.workflow_actions.service import (
|
|||
BuilderWorkflowActionService,
|
||||
)
|
||||
from baserow.core.exceptions import ApplicationDoesNotExist, PermissionException
|
||||
from baserow.core.services.exceptions import DoesNotExist, ServiceImproperlyConfigured
|
||||
from baserow.core.services.exceptions import (
|
||||
DoesNotExist,
|
||||
ServiceFilterPropertyDoesNotExist,
|
||||
ServiceImproperlyConfigured,
|
||||
ServiceSortPropertyDoesNotExist,
|
||||
)
|
||||
from baserow.core.services.registries import service_type_registry
|
||||
|
||||
from .serializers import PublicDataSourceSerializer, PublicElementSerializer
|
||||
|
@ -230,7 +239,7 @@ class PublicDataSourcesView(APIView):
|
|||
|
||||
handler = BuilderHandler()
|
||||
public_properties = handler.get_builder_public_properties(
|
||||
request.user, page.builder
|
||||
request.user_source_user, page.builder
|
||||
)
|
||||
|
||||
allowed_fields = []
|
||||
|
@ -340,11 +349,13 @@ class PublicDispatchDataSourceView(APIView):
|
|||
@transaction.atomic
|
||||
@map_exceptions(
|
||||
{
|
||||
DataSourceDoesNotExist: ERROR_DATA_SOURCE_DOES_NOT_EXIST,
|
||||
DataSourceImproperlyConfigured: ERROR_DATA_SOURCE_IMPROPERLY_CONFIGURED,
|
||||
ServiceImproperlyConfigured: ERROR_DATA_SOURCE_IMPROPERLY_CONFIGURED,
|
||||
DataSourceRefinementForbidden: ERROR_DATA_SOURCE_REFINEMENT_FORBIDDEN,
|
||||
DoesNotExist: ERROR_DATA_DOES_NOT_EXIST,
|
||||
DataSourceDoesNotExist: ERROR_DATA_SOURCE_DOES_NOT_EXIST,
|
||||
DataSourceRefinementForbidden: ERROR_DATA_SOURCE_REFINEMENT_FORBIDDEN,
|
||||
DataSourceImproperlyConfigured: ERROR_DATA_SOURCE_IMPROPERLY_CONFIGURED,
|
||||
ServiceSortPropertyDoesNotExist: ERROR_SERVICE_SORT_PROPERTY_DOES_NOT_EXIST,
|
||||
ServiceFilterPropertyDoesNotExist: ERROR_SERVICE_FILTER_PROPERTY_DOES_NOT_EXIST,
|
||||
ServiceImproperlyConfigured: ERROR_DATA_SOURCE_IMPROPERLY_CONFIGURED,
|
||||
}
|
||||
)
|
||||
def post(self, request, data_source_id: int):
|
||||
|
|
|
@ -10,7 +10,10 @@ from baserow.api.app_auth_providers.serializers import AppAuthProviderSerializer
|
|||
from baserow.api.polymorphic import PolymorphicSerializer
|
||||
from baserow.api.services.serializers import PublicServiceSerializer
|
||||
from baserow.api.user_files.serializers import UserFileField, UserFileSerializer
|
||||
from baserow.contrib.builder.api.pages.serializers import PathParamSerializer
|
||||
from baserow.contrib.builder.api.pages.serializers import (
|
||||
PathParamSerializer,
|
||||
QueryParamSerializer,
|
||||
)
|
||||
from baserow.contrib.builder.api.theme.serializers import (
|
||||
CombinedThemeConfigBlocksSerializer,
|
||||
serialize_builder_theme,
|
||||
|
@ -133,11 +136,14 @@ class PublicElementSerializer(serializers.ModelSerializer):
|
|||
"style_border_right_size",
|
||||
"style_padding_right",
|
||||
"style_margin_right",
|
||||
"style_background_radius",
|
||||
"style_border_radius",
|
||||
"style_background",
|
||||
"style_background_color",
|
||||
"style_background_file",
|
||||
"style_background_mode",
|
||||
"style_width",
|
||||
"style_width_child",
|
||||
"role_type",
|
||||
"roles",
|
||||
)
|
||||
|
@ -154,6 +160,7 @@ class PublicPageSerializer(serializers.ModelSerializer):
|
|||
"""
|
||||
|
||||
path_params = PathParamSerializer(many=True, required=False)
|
||||
query_params = QueryParamSerializer(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
|
@ -166,6 +173,7 @@ class PublicPageSerializer(serializers.ModelSerializer):
|
|||
"visibility",
|
||||
"role_type",
|
||||
"roles",
|
||||
"query_params",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
|
|
|
@ -79,11 +79,14 @@ class ElementSerializer(serializers.ModelSerializer):
|
|||
"style_border_right_size",
|
||||
"style_padding_right",
|
||||
"style_margin_right",
|
||||
"style_background_radius",
|
||||
"style_border_radius",
|
||||
"style_background",
|
||||
"style_background_color",
|
||||
"style_background_file",
|
||||
"style_background_mode",
|
||||
"style_width",
|
||||
"style_width_child",
|
||||
"role_type",
|
||||
"roles",
|
||||
)
|
||||
|
@ -146,6 +149,8 @@ class CreateElementSerializer(serializers.ModelSerializer):
|
|||
"style_border_left_size",
|
||||
"style_padding_left",
|
||||
"style_margin_left",
|
||||
"style_background_radius",
|
||||
"style_border_radius",
|
||||
"style_border_right_color",
|
||||
"style_border_right_size",
|
||||
"style_padding_right",
|
||||
|
@ -155,6 +160,7 @@ class CreateElementSerializer(serializers.ModelSerializer):
|
|||
"style_background_file",
|
||||
"style_background_mode",
|
||||
"style_width",
|
||||
"style_width_child",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"visibility": {"default": Element.VISIBILITY_TYPES.ALL},
|
||||
|
@ -186,6 +192,8 @@ class UpdateElementSerializer(serializers.ModelSerializer):
|
|||
"style_border_left_size",
|
||||
"style_padding_left",
|
||||
"style_margin_left",
|
||||
"style_background_radius",
|
||||
"style_border_radius",
|
||||
"style_border_right_color",
|
||||
"style_border_right_size",
|
||||
"style_padding_right",
|
||||
|
@ -195,6 +203,7 @@ class UpdateElementSerializer(serializers.ModelSerializer):
|
|||
"style_background_file",
|
||||
"style_background_mode",
|
||||
"style_width",
|
||||
"style_width_child",
|
||||
"role_type",
|
||||
"roles",
|
||||
)
|
||||
|
|
|
@ -134,6 +134,7 @@ class ElementsView(APIView):
|
|||
@map_exceptions(
|
||||
{
|
||||
PageDoesNotExist: ERROR_PAGE_DOES_NOT_EXIST,
|
||||
ElementDoesNotExist: ERROR_ELEMENT_DOES_NOT_EXIST,
|
||||
ElementNotInSamePage: ERROR_ELEMENT_NOT_IN_SAME_PAGE,
|
||||
}
|
||||
)
|
||||
|
|
|
@ -49,3 +49,17 @@ ERROR_DUPLICATE_PATH_PARAMS_IN_PATH = (
|
|||
HTTP_400_BAD_REQUEST,
|
||||
"The path params {e.path_param_names} are defined multiple times in path {e.path}",
|
||||
)
|
||||
|
||||
ERROR_INVALID_QUERY_PARAM_NAME = (
|
||||
"ERROR_INVALID_QUERY_PARAM_NAME",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"The provided query parameter name {e.query_param_name} is invalid. Query parameter "
|
||||
"names must contain only letters, numbers and underscores.",
|
||||
)
|
||||
|
||||
ERROR_DUPLICATE_QUERY_PARAMS = (
|
||||
"ERROR_DUPLICATE_QUERY_PARAMS",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"The query parameter {e.param} is either defined multiple times in {e.query_param_names} "
|
||||
"or conflicts with path parameters {e.path_param_names}.",
|
||||
)
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from baserow.contrib.builder.pages.constants import PAGE_PATH_PARAM_TYPE_CHOICES
|
||||
from baserow.contrib.builder.pages.constants import PAGE_PARAM_TYPE_CHOICES
|
||||
from baserow.contrib.builder.pages.models import Page
|
||||
from baserow.contrib.builder.pages.validators import path_param_name_validation
|
||||
from baserow.contrib.builder.pages.validators import (
|
||||
path_param_name_validation,
|
||||
query_param_name_validation,
|
||||
)
|
||||
|
||||
|
||||
class PathParamSerializer(serializers.Serializer):
|
||||
|
@ -13,7 +16,19 @@ class PathParamSerializer(serializers.Serializer):
|
|||
max_length=255,
|
||||
)
|
||||
type = serializers.ChoiceField(
|
||||
choices=PAGE_PATH_PARAM_TYPE_CHOICES, help_text="The type of the parameter."
|
||||
choices=PAGE_PARAM_TYPE_CHOICES, help_text="The type of the parameter."
|
||||
)
|
||||
|
||||
|
||||
class QueryParamSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(
|
||||
required=True,
|
||||
validators=[query_param_name_validation],
|
||||
help_text="The name of the parameter.",
|
||||
max_length=255,
|
||||
)
|
||||
type = serializers.ChoiceField(
|
||||
choices=PAGE_PARAM_TYPE_CHOICES, help_text="The type of the parameter."
|
||||
)
|
||||
|
||||
|
||||
|
@ -25,6 +40,7 @@ class PageSerializer(serializers.ModelSerializer):
|
|||
"""
|
||||
|
||||
path_params = PathParamSerializer(many=True, required=False)
|
||||
query_params = QueryParamSerializer(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
|
@ -39,6 +55,7 @@ class PageSerializer(serializers.ModelSerializer):
|
|||
"visibility",
|
||||
"role_type",
|
||||
"roles",
|
||||
"query_params",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
|
@ -53,18 +70,28 @@ class PageSerializer(serializers.ModelSerializer):
|
|||
|
||||
class CreatePageSerializer(serializers.ModelSerializer):
|
||||
path_params = PathParamSerializer(many=True, required=False)
|
||||
query_params = PathParamSerializer(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
fields = ("name", "path", "path_params")
|
||||
fields = ("name", "path", "path_params", "query_params")
|
||||
|
||||
|
||||
class UpdatePageSerializer(serializers.ModelSerializer):
|
||||
path_params = PathParamSerializer(many=True, required=False)
|
||||
query_params = QueryParamSerializer(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
fields = ("name", "path", "path_params", "visibility", "role_type", "roles")
|
||||
fields = (
|
||||
"name",
|
||||
"path",
|
||||
"path_params",
|
||||
"visibility",
|
||||
"role_type",
|
||||
"roles",
|
||||
"query_params",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"name": {"required": False},
|
||||
"path": {"required": False},
|
||||
|
|
|
@ -16,6 +16,8 @@ from baserow.api.jobs.serializers import JobSerializer
|
|||
from baserow.api.schemas import CLIENT_SESSION_ID_SCHEMA_PARAMETER, get_error_schema
|
||||
from baserow.contrib.builder.api.pages.errors import (
|
||||
ERROR_DUPLICATE_PATH_PARAMS_IN_PATH,
|
||||
ERROR_DUPLICATE_QUERY_PARAMS,
|
||||
ERROR_INVALID_QUERY_PARAM_NAME,
|
||||
ERROR_PAGE_DOES_NOT_EXIST,
|
||||
ERROR_PAGE_NAME_NOT_UNIQUE,
|
||||
ERROR_PAGE_NOT_IN_BUILDER,
|
||||
|
@ -32,7 +34,9 @@ from baserow.contrib.builder.api.pages.serializers import (
|
|||
)
|
||||
from baserow.contrib.builder.handler import BuilderHandler
|
||||
from baserow.contrib.builder.pages.exceptions import (
|
||||
DuplicatePageParams,
|
||||
DuplicatePathParamsInPath,
|
||||
InvalidQueryParamName,
|
||||
PageDoesNotExist,
|
||||
PageNameNotUnique,
|
||||
PageNotInBuilder,
|
||||
|
@ -76,6 +80,8 @@ class PagesView(APIView):
|
|||
"ERROR_PAGE_PATH_NOT_UNIQUE",
|
||||
"ERROR_PATH_PARAM_NOT_IN_PATH",
|
||||
"ERROR_PATH_PARAM_NOT_DEFINED",
|
||||
"ERROR_INVALID_QUERY_PARAM_NAME",
|
||||
"ERROR_DUPLICATE_QUERY_PARAMS",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
|
||||
|
@ -90,6 +96,8 @@ class PagesView(APIView):
|
|||
PathParamNotInPath: ERROR_PATH_PARAM_NOT_IN_PATH,
|
||||
PathParamNotDefined: ERROR_PATH_PARAM_NOT_DEFINED,
|
||||
DuplicatePathParamsInPath: ERROR_DUPLICATE_PATH_PARAMS_IN_PATH,
|
||||
InvalidQueryParamName: ERROR_INVALID_QUERY_PARAM_NAME,
|
||||
DuplicatePageParams: ERROR_DUPLICATE_QUERY_PARAMS,
|
||||
}
|
||||
)
|
||||
@validate_body(CreatePageSerializer, return_validated=True)
|
||||
|
@ -102,6 +110,7 @@ class PagesView(APIView):
|
|||
data["name"],
|
||||
path=data["path"],
|
||||
path_params=data.get("path_params", None),
|
||||
query_params=data.get("query_params", None),
|
||||
)
|
||||
|
||||
serializer = PageSerializer(page)
|
||||
|
@ -133,6 +142,8 @@ class PageView(APIView):
|
|||
"ERROR_PATH_PARAM_NOT_IN_PATH",
|
||||
"ERROR_PATH_PARAM_NOT_DEFINED",
|
||||
"ERROR_SHARED_PAGE_READ_ONLY",
|
||||
"ERROR_INVALID_QUERY_PARAM_NAME",
|
||||
"ERROR_DUPLICATE_QUERY_PARAMS",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(
|
||||
|
@ -151,6 +162,8 @@ class PageView(APIView):
|
|||
PathParamNotDefined: ERROR_PATH_PARAM_NOT_DEFINED,
|
||||
DuplicatePathParamsInPath: ERROR_DUPLICATE_PATH_PARAMS_IN_PATH,
|
||||
SharedPageIsReadOnly: ERROR_SHARED_PAGE_READ_ONLY,
|
||||
InvalidQueryParamName: ERROR_INVALID_QUERY_PARAM_NAME,
|
||||
DuplicatePageParams: ERROR_DUPLICATE_QUERY_PARAMS,
|
||||
}
|
||||
)
|
||||
@validate_body(UpdatePageSerializer, return_validated=True)
|
||||
|
|
|
@ -48,8 +48,7 @@ class BuilderSerializer(serializers.ModelSerializer):
|
|||
:return: A list of serialized pages that belong to this instance.
|
||||
"""
|
||||
|
||||
pages = PageHandler().get_pages(instance)
|
||||
|
||||
pages = instance.page_set.all()
|
||||
user = self.context.get("user")
|
||||
request = self.context.get("request")
|
||||
|
||||
|
|
|
@ -394,13 +394,13 @@ class DispatchBuilderWorkflowActionView(APIView):
|
|||
)
|
||||
|
||||
dispatch_context = BuilderDispatchContext(
|
||||
request, workflow_action.page, workflow_action=workflow_action
|
||||
request,
|
||||
workflow_action.page,
|
||||
workflow_action=workflow_action,
|
||||
)
|
||||
|
||||
response = BuilderWorkflowActionService().dispatch_action(
|
||||
request.user, workflow_action, dispatch_context # type: ignore
|
||||
)
|
||||
|
||||
if not isinstance(response, Response):
|
||||
response = Response(response)
|
||||
return response
|
||||
return Response(response.data, status=response.status)
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
import re
|
||||
from typing import Any, Dict, List, Optional
|
||||
from urllib.parse import urljoin
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from zipfile import ZipFile
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.files.storage import Storage
|
||||
from django.db import transaction
|
||||
from django.db.models import Prefetch
|
||||
from django.db.transaction import Atomic
|
||||
from django.urls import include, path
|
||||
|
||||
|
@ -424,7 +426,7 @@ class BuilderApplicationType(ApplicationType):
|
|||
|
||||
return builder
|
||||
|
||||
def get_default_application_urls(self, application: Builder) -> list[str]:
|
||||
def get_application_urls(self, application: Builder) -> list[str]:
|
||||
"""
|
||||
Returns the default frontend urls of a builder application.
|
||||
"""
|
||||
|
@ -448,9 +450,56 @@ class BuilderApplicationType(ApplicationType):
|
|||
# It's an unpublished version let's return to the home preview page
|
||||
return [preview_url]
|
||||
|
||||
@classmethod
|
||||
def _extract_builder_id_from_path(cls, url_path):
|
||||
# Define the regex pattern with a capturing group for the integer
|
||||
pattern = r"^/builder/(\d+)/preview/.*$"
|
||||
|
||||
# Use re.match to find the match
|
||||
match = re.match(pattern, url_path)
|
||||
|
||||
if match:
|
||||
# Extract the integer from the first capturing group
|
||||
return int(match.group(1))
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def get_application_id_for_url(cls, url: str) -> int | None:
|
||||
"""
|
||||
If the given URL is relative to the PUBLIC_WEB_FRONTEND_URL, we try to match
|
||||
the preview path and to extract the builder id from it.
|
||||
|
||||
Otherwise, we try to match a published domain and return the related
|
||||
application id.
|
||||
"""
|
||||
|
||||
from baserow.contrib.builder.domains.models import Domain
|
||||
|
||||
parsed_url = urlparse(url)
|
||||
parsed_frontend_url = urlparse(settings.PUBLIC_WEB_FRONTEND_URL)
|
||||
|
||||
if (parsed_url.scheme, parsed_url.hostname) == (
|
||||
parsed_frontend_url.scheme,
|
||||
parsed_frontend_url.hostname,
|
||||
):
|
||||
# It's an unpublished app and we try to access the preview
|
||||
url_path = parsed_url.path
|
||||
return cls._extract_builder_id_from_path(url_path)
|
||||
|
||||
try:
|
||||
# Let's search for a published app
|
||||
domain = Domain.objects.exclude(published_to=None).get(
|
||||
domain_name=parsed_url.hostname
|
||||
)
|
||||
return domain.published_to_id
|
||||
except Domain.DoesNotExist:
|
||||
return None
|
||||
|
||||
def enhance_queryset(self, queryset):
|
||||
queryset = queryset.prefetch_related("page_set")
|
||||
queryset = queryset.prefetch_related("user_sources")
|
||||
queryset = queryset.prefetch_related("integrations")
|
||||
queryset = queryset.select_related("favicon_file").prefetch_related(
|
||||
"user_sources",
|
||||
"integrations",
|
||||
Prefetch("page_set", queryset=Page.objects_with_shared.all()),
|
||||
)
|
||||
queryset = theme_config_block_registry.enhance_list_builder_queryset(queryset)
|
||||
return queryset
|
||||
|
|
|
@ -315,4 +315,5 @@ class BuilderConfig(AppConfig):
|
|||
|
||||
# The signals must always be imported last because they use the registries
|
||||
# which need to be filled first.
|
||||
import baserow.contrib.builder.signals # noqa: F403, F401
|
||||
import baserow.contrib.builder.ws.signals # noqa: F403, F401
|
||||
|
|
|
@ -2,6 +2,10 @@ from django.db import models
|
|||
|
||||
IMPORT_SERIALIZED_IMPORTING = "importing"
|
||||
|
||||
# A color field can store a hex color value, e.g. "#abc123ff". It can also
|
||||
# store an arbitrary string, like "transparent" or "my customer color".
|
||||
COLOR_FIELD_MAX_LENGTH = 255
|
||||
|
||||
|
||||
class HorizontalAlignments(models.TextChoices):
|
||||
LEFT = "left"
|
||||
|
@ -24,3 +28,17 @@ class BACKGROUND_IMAGE_MODES(models.TextChoices):
|
|||
TILE = "tile"
|
||||
FILL = "fill"
|
||||
FIT = "fit"
|
||||
|
||||
|
||||
class FontWeights(models.TextChoices):
|
||||
THIN = "thin"
|
||||
EXTRA_LIGHT = "extra-light"
|
||||
LIGHT = "light"
|
||||
REGULAR = "regular"
|
||||
MEDIUM = "medium"
|
||||
SEMI_BOLD = "semi-bold"
|
||||
BOLD = "bold"
|
||||
EXTRA_BOLD = "extra-bold"
|
||||
HEAVY = "heavy"
|
||||
BLACK = "black"
|
||||
EXTRA_BLACK = "extra-black"
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import re
|
||||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from baserow.contrib.builder.data_providers.exceptions import (
|
||||
|
@ -27,10 +29,12 @@ from baserow.contrib.builder.workflow_actions.handler import (
|
|||
from baserow.core.formula.exceptions import FormulaRecursion, InvalidBaserowFormula
|
||||
from baserow.core.formula.registries import DataProviderType
|
||||
from baserow.core.services.dispatch_context import DispatchContext
|
||||
from baserow.core.services.types import DispatchResult
|
||||
from baserow.core.user_sources.constants import DEFAULT_USER_ROLE_PREFIX
|
||||
from baserow.core.user_sources.user_source_user import UserSourceUser
|
||||
from baserow.core.utils import get_value_at_path
|
||||
from baserow.core.workflow_actions.exceptions import WorkflowActionDoesNotExist
|
||||
from baserow.core.workflow_actions.models import WorkflowAction
|
||||
|
||||
RE_DEFAULT_ROLE = re.compile(rf"{DEFAULT_USER_ROLE_PREFIX}(\d+)")
|
||||
|
||||
|
@ -197,7 +201,9 @@ class DataSourceDataProviderType(DataProviderType):
|
|||
return {}
|
||||
|
||||
try:
|
||||
data_source = DataSourceHandler().get_data_source(data_source_id)
|
||||
data_source = DataSourceHandler().get_data_source(
|
||||
data_source_id, with_cache=True
|
||||
)
|
||||
except DataSourceDoesNotExist as exc:
|
||||
# The data source has probably been deleted
|
||||
raise InvalidBaserowFormula() from exc
|
||||
|
@ -268,7 +274,9 @@ class DataSourceContextDataProviderType(DataProviderType):
|
|||
return {}
|
||||
|
||||
try:
|
||||
data_source = DataSourceHandler().get_data_source(data_source_id)
|
||||
data_source = DataSourceHandler().get_data_source(
|
||||
data_source_id, with_cache=True
|
||||
)
|
||||
except DataSourceDoesNotExist as exc:
|
||||
# The data source has probably been deleted
|
||||
raise InvalidBaserowFormula() from exc
|
||||
|
@ -294,7 +302,9 @@ class CurrentRecordDataProviderType(DataProviderType):
|
|||
"""
|
||||
|
||||
try:
|
||||
current_record = dispatch_context.request.data["current_record"]
|
||||
current_record_data = dispatch_context.request.data["current_record"]
|
||||
current_record = current_record_data["index"]
|
||||
current_record_id = current_record_data["record_id"]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
|
@ -314,8 +324,9 @@ class CurrentRecordDataProviderType(DataProviderType):
|
|||
# Narrow down our range to just our record index.
|
||||
dispatch_context = dispatch_context.from_context(
|
||||
dispatch_context,
|
||||
offset=current_record,
|
||||
offset=0,
|
||||
count=1,
|
||||
only_record_id=current_record_id,
|
||||
)
|
||||
|
||||
return DataSourceDataProviderType().get_data_chunk(
|
||||
|
@ -369,7 +380,9 @@ class CurrentRecordDataProviderType(DataProviderType):
|
|||
return {}
|
||||
|
||||
try:
|
||||
data_source = DataSourceHandler().get_data_source(data_source_id)
|
||||
data_source = DataSourceHandler().get_data_source(
|
||||
data_source_id, with_cache=True
|
||||
)
|
||||
except DataSourceDoesNotExist as exc:
|
||||
# The data source is probably not accessible so we raise an invalid formula
|
||||
raise InvalidBaserowFormula() from exc
|
||||
|
@ -401,14 +414,73 @@ class PreviousActionProviderType(DataProviderType):
|
|||
|
||||
type = "previous_action"
|
||||
|
||||
def get_dispatch_action_cache_key(self, dispatch_id: str, action_id: int) -> str:
|
||||
"""
|
||||
Return a unique string to key the intermediate dispatch results in
|
||||
the cache.
|
||||
"""
|
||||
|
||||
return f"builder_dispatch_action_{dispatch_id}_{action_id}"
|
||||
|
||||
def get_data_chunk(self, dispatch_context: DispatchContext, path: List[str]):
|
||||
previous_action_id, *rest = path
|
||||
previous_action = dispatch_context.request.data.get("previous_action", {})
|
||||
|
||||
if previous_action_id not in previous_action:
|
||||
previous_action_results = dispatch_context.request.data.get(
|
||||
"previous_action", {}
|
||||
)
|
||||
|
||||
if previous_action_id not in previous_action_results:
|
||||
message = "The previous action id is not present in the dispatch context"
|
||||
raise DataProviderChunkInvalidException(message)
|
||||
return get_value_at_path(previous_action, path)
|
||||
|
||||
if "current_dispatch_id" not in previous_action_results:
|
||||
message = "The dispatch id is missing in the dispatch context"
|
||||
raise DataProviderChunkInvalidException(message)
|
||||
|
||||
dispatch_id = previous_action_results.get("current_dispatch_id")
|
||||
|
||||
workflow_action = BuilderWorkflowActionHandler().get_workflow_action(
|
||||
previous_action_id
|
||||
)
|
||||
|
||||
if getattr(workflow_action.get_type(), "is_server_workflow", False):
|
||||
# If the previous action was a server action we get the previous result
|
||||
# from the cache instead
|
||||
cache_key = self.get_dispatch_action_cache_key(
|
||||
dispatch_id, workflow_action.id
|
||||
)
|
||||
return get_value_at_path(cache.get(cache_key), rest)
|
||||
else:
|
||||
return get_value_at_path(previous_action_results[previous_action_id], rest)
|
||||
|
||||
def post_dispatch(
|
||||
self,
|
||||
dispatch_context: DispatchContext,
|
||||
workflow_action: WorkflowAction,
|
||||
dispatch_result: DispatchResult,
|
||||
) -> None:
|
||||
"""
|
||||
If the current_dispatch_id exists in the request data, create a unique
|
||||
cache key and store the result in the cache.
|
||||
|
||||
The current_dispatch_id is used to keep track of results of chained
|
||||
workflow actions. For security reasons, the result of a workflow action
|
||||
is not returned to the frontend; it is instead stored in the cache. Should
|
||||
a subsequent workflow action require the result, it can fetch it from
|
||||
the cache.
|
||||
"""
|
||||
|
||||
if dispatch_id := dispatch_context.request.data.get("previous_action", {}).get(
|
||||
"current_dispatch_id"
|
||||
):
|
||||
cache_key = self.get_dispatch_action_cache_key(
|
||||
dispatch_id, workflow_action.id
|
||||
)
|
||||
cache.set(
|
||||
cache_key,
|
||||
dispatch_result.data,
|
||||
timeout=settings.BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS,
|
||||
)
|
||||
|
||||
def import_path(self, path, id_mapping, **kwargs):
|
||||
workflow_action_id, *rest = path
|
||||
|
@ -429,6 +501,45 @@ class PreviousActionProviderType(DataProviderType):
|
|||
|
||||
return [str(workflow_action_id), *rest]
|
||||
|
||||
def extract_properties(
|
||||
self,
|
||||
path: List[str],
|
||||
**kwargs,
|
||||
) -> Dict[str, List[str]]:
|
||||
"""
|
||||
Given a formula path, validates that the Workflow Action is valid
|
||||
and returns a dict where the key is the Workflow Action's service ID
|
||||
and the value is a list of field names.
|
||||
|
||||
E.g. supposing the original formula string is:
|
||||
'previous_action.456.field_1234', the `path` would be `['456', 'field_5769']`.
|
||||
|
||||
If the workflow action's service ID is 123, the following would be
|
||||
returned: `{123: ['field_1234']}`.
|
||||
"""
|
||||
|
||||
if not path:
|
||||
return {}
|
||||
|
||||
previous_id, *rest = path
|
||||
|
||||
try:
|
||||
previous_id = int(previous_id)
|
||||
except ValueError:
|
||||
return {}
|
||||
|
||||
try:
|
||||
previous_action = BuilderWorkflowActionHandler().get_workflow_action(
|
||||
previous_id
|
||||
)
|
||||
except WorkflowActionDoesNotExist as exc:
|
||||
raise InvalidBaserowFormula() from exc
|
||||
|
||||
service_type = previous_action.service.specific.get_type()
|
||||
return {
|
||||
previous_action.service.id: service_type.extract_properties(rest, **kwargs)
|
||||
}
|
||||
|
||||
|
||||
class UserDataProviderType(DataProviderType):
|
||||
"""
|
||||
|
|
|
@ -24,6 +24,8 @@ CACHE_KEY_PREFIX = "used_properties_for_page"
|
|||
|
||||
User = get_user_model()
|
||||
|
||||
SENTINEL = "__no_results__"
|
||||
|
||||
|
||||
class BuilderDispatchContext(DispatchContext):
|
||||
own_properties = [
|
||||
|
@ -33,6 +35,7 @@ class BuilderDispatchContext(DispatchContext):
|
|||
"element",
|
||||
"offset",
|
||||
"count",
|
||||
"only_record_id",
|
||||
"only_expose_public_allowed_properties",
|
||||
]
|
||||
|
||||
|
@ -44,12 +47,29 @@ class BuilderDispatchContext(DispatchContext):
|
|||
element: Optional["Element"] = None,
|
||||
offset: Optional[int] = None,
|
||||
count: Optional[int] = None,
|
||||
only_record_id: Optional[int | str] = None,
|
||||
only_expose_public_allowed_properties: Optional[bool] = True,
|
||||
):
|
||||
"""
|
||||
Dispatch context used in the builder.
|
||||
|
||||
:param request: The HTTP request from the view.
|
||||
:param page: The page related to the dispatch.
|
||||
:param workflow_action: The workflow action being executed, if any.
|
||||
:param element: An optional element that triggered the dispatch.
|
||||
:param offset: When we dispatch a list service, starts by that offset.
|
||||
:param count: When we dispatch a list service returns that max amount of record.
|
||||
:param record_id: If we want to narrow down the results of a list service to
|
||||
only the record with this Id.
|
||||
:param only_expose_public_allowed_properties: Determines whether only public
|
||||
allowed properties should be exposed. Defaults to True.
|
||||
"""
|
||||
|
||||
self.request = request
|
||||
self.page = page
|
||||
self.workflow_action = workflow_action
|
||||
self.element = element
|
||||
self.only_record_id = only_record_id
|
||||
|
||||
# Overrides the `request` GET offset/count values.
|
||||
self.offset = offset
|
||||
|
@ -273,6 +293,6 @@ class BuilderDispatchContext(DispatchContext):
|
|||
return None
|
||||
|
||||
return BuilderHandler().get_builder_public_properties(
|
||||
self.request.user,
|
||||
self.request.user_source_user,
|
||||
self.page.builder,
|
||||
)
|
||||
|
|
|
@ -17,6 +17,7 @@ from baserow.contrib.builder.data_sources.models import DataSource
|
|||
from baserow.contrib.builder.formula_importer import import_formula
|
||||
from baserow.contrib.builder.pages.models import Page
|
||||
from baserow.contrib.builder.types import DataSourceDict
|
||||
from baserow.core.cache import local_cache
|
||||
from baserow.core.integrations.models import Integration
|
||||
from baserow.core.integrations.registries import integration_type_registry
|
||||
from baserow.core.services.handler import ServiceHandler
|
||||
|
@ -36,13 +37,50 @@ class DataSourceHandler:
|
|||
self.service_handler = ServiceHandler()
|
||||
|
||||
def get_data_source(
|
||||
self, data_source_id: int, base_queryset: Optional[QuerySet] = None, cache=None
|
||||
self,
|
||||
data_source_id: int,
|
||||
base_queryset: Optional[QuerySet] = None,
|
||||
specific=True,
|
||||
with_cache=False,
|
||||
) -> DataSource:
|
||||
"""
|
||||
Returns a data_source instance from the database.
|
||||
|
||||
:param data_source_id: The ID of the data_source.
|
||||
:param base_queryset: The base queryset to use to build the query.
|
||||
:param specific: Return the specific version of related objects like the
|
||||
service and the integration
|
||||
:raises DataSourceDoesNotExist: If the data_source can't be found.
|
||||
:param with_cache: Whether this method should use the short
|
||||
cache for data_sources.
|
||||
:return: The data_source instance.
|
||||
"""
|
||||
|
||||
if with_cache and not base_queryset:
|
||||
return local_cache.get(
|
||||
f"ab_data_source_{data_source_id}_{specific}",
|
||||
lambda: self._get_data_source(
|
||||
data_source_id, base_queryset, specific=specific
|
||||
),
|
||||
)
|
||||
else:
|
||||
return self._get_data_source(
|
||||
data_source_id, base_queryset, specific=specific
|
||||
)
|
||||
|
||||
def _get_data_source(
|
||||
self,
|
||||
data_source_id: int,
|
||||
base_queryset: Optional[QuerySet] = None,
|
||||
specific=True,
|
||||
) -> DataSource:
|
||||
"""
|
||||
Base version of the get_data_source without the caching capabilities.
|
||||
|
||||
:param data_source_id: The ID of the data_source.
|
||||
:param base_queryset: The base queryset to use to build the query.
|
||||
:param specific: Return the specific version of related objects like the
|
||||
service and the integration
|
||||
:raises DataSourceDoesNotExist: If the data_source can't be found.
|
||||
:return: The data_source instance.
|
||||
"""
|
||||
|
@ -51,12 +89,24 @@ class DataSourceHandler:
|
|||
base_queryset if base_queryset is not None else DataSource.objects.all()
|
||||
)
|
||||
|
||||
queryset = queryset.select_related("page__builder__workspace")
|
||||
|
||||
try:
|
||||
data_source = queryset.select_related(
|
||||
"page", "page__builder", "page__builder__workspace", "service"
|
||||
).get(id=data_source_id)
|
||||
except DataSource.DoesNotExist:
|
||||
raise DataSourceDoesNotExist()
|
||||
if specific:
|
||||
data_source = queryset.get(id=data_source_id)
|
||||
if data_source.service_id:
|
||||
specific_service = ServiceHandler().get_service(
|
||||
data_source.service_id, specific=True
|
||||
)
|
||||
data_source.__class__.service.field.set_cached_value(
|
||||
data_source, specific_service
|
||||
)
|
||||
else:
|
||||
data_source = queryset.select_related("service__integration").get(
|
||||
id=data_source_id
|
||||
)
|
||||
except DataSource.DoesNotExist as exc:
|
||||
raise DataSourceDoesNotExist() from exc
|
||||
|
||||
return data_source
|
||||
|
||||
|
@ -83,26 +133,23 @@ class DataSourceHandler:
|
|||
base_queryset=queryset,
|
||||
)
|
||||
|
||||
def _query_data_sources(self, base_queryset: QuerySet, specific=True):
|
||||
def _query_data_sources(
|
||||
self, base_queryset: QuerySet, specific=True, with_cache=False
|
||||
):
|
||||
"""
|
||||
Query data sources from the base queryset.
|
||||
|
||||
:param base_queryset: The base QuerySet to query from.
|
||||
:param specific: A boolean flag indicating whether to include specific service
|
||||
instance.
|
||||
:param with_cache: Whether this method should populate the short
|
||||
cache for data_sources.
|
||||
:return: A list of queried data sources.
|
||||
"""
|
||||
|
||||
data_source_queryset = base_queryset.select_related(
|
||||
"service",
|
||||
"page__builder__workspace",
|
||||
"service__integration__application",
|
||||
)
|
||||
data_source_queryset = base_queryset.select_related("page__builder__workspace")
|
||||
|
||||
if specific:
|
||||
data_source_queryset = data_source_queryset.select_related(
|
||||
"service__content_type"
|
||||
)
|
||||
data_sources = list(data_source_queryset.all())
|
||||
|
||||
# Get all service ids to get them from DB in one query
|
||||
|
@ -124,9 +171,19 @@ class DataSourceHandler:
|
|||
if data_source.service_id:
|
||||
data_source.service = specific_services_map[data_source.service_id]
|
||||
|
||||
return data_sources
|
||||
else:
|
||||
return data_source_queryset.all()
|
||||
data_source_queryset.select_related(
|
||||
"service__integration__application",
|
||||
)
|
||||
data_sources = data_source_queryset.all()
|
||||
|
||||
if with_cache:
|
||||
for ds in data_sources:
|
||||
local_cache.get(
|
||||
f"ab_data_source_{ds.id}_{specific}",
|
||||
ds,
|
||||
)
|
||||
return data_sources
|
||||
|
||||
def get_data_sources(
|
||||
self,
|
||||
|
@ -134,6 +191,7 @@ class DataSourceHandler:
|
|||
base_queryset: Optional[QuerySet] = None,
|
||||
with_shared: Optional[bool] = False,
|
||||
specific: Optional[bool] = True,
|
||||
with_cache=False,
|
||||
) -> Union[QuerySet[DataSource], Iterable[DataSource]]:
|
||||
"""
|
||||
Gets all the specific data_sources of a given page.
|
||||
|
@ -144,6 +202,8 @@ class DataSourceHandler:
|
|||
on the same builder.
|
||||
:param specific: If True, return the specific version of the service related
|
||||
to the data source
|
||||
:param with_cache: Whether this method should populate the short
|
||||
cache for data_sources.
|
||||
:return: The data_sources of that page.
|
||||
"""
|
||||
|
||||
|
@ -159,13 +219,18 @@ class DataSourceHandler:
|
|||
else:
|
||||
data_source_queryset = data_source_queryset.filter(page=page)
|
||||
|
||||
return self._query_data_sources(data_source_queryset, specific=specific)
|
||||
return self._query_data_sources(
|
||||
data_source_queryset,
|
||||
specific=specific,
|
||||
with_cache=with_cache,
|
||||
)
|
||||
|
||||
def get_builder_data_sources(
|
||||
self,
|
||||
builder: "Builder",
|
||||
base_queryset: Optional[QuerySet] = None,
|
||||
specific: Optional[bool] = True,
|
||||
with_cache=False,
|
||||
) -> Union[QuerySet[DataSource], Iterable[DataSource]]:
|
||||
"""
|
||||
Gets all the specific data_sources of a given builder.
|
||||
|
@ -174,6 +239,8 @@ class DataSourceHandler:
|
|||
:param base_queryset: The base queryset to use to build the query.
|
||||
:param specific: If True, return the specific version of the service related
|
||||
to the data source
|
||||
:param with_cache: Whether this method should populate the short
|
||||
cache for data_sources.
|
||||
:return: The data_sources of that builder.
|
||||
"""
|
||||
|
||||
|
@ -183,7 +250,11 @@ class DataSourceHandler:
|
|||
|
||||
data_source_queryset = data_source_queryset.filter(page__builder=builder)
|
||||
|
||||
return self._query_data_sources(data_source_queryset, specific=specific)
|
||||
return self._query_data_sources(
|
||||
data_source_queryset,
|
||||
specific=specific,
|
||||
with_cache=with_cache,
|
||||
)
|
||||
|
||||
def get_data_sources_with_cache(
|
||||
self,
|
||||
|
@ -192,26 +263,25 @@ class DataSourceHandler:
|
|||
specific: bool = True,
|
||||
):
|
||||
"""
|
||||
Gets all the specific data_sources of a given page. This version cache the
|
||||
Gets all the data sources of a given page. This version cache the
|
||||
data sources of the page onto the page object to improve perfs.
|
||||
|
||||
:param page: The page that holds the data_source.
|
||||
:param base_queryset: The base queryset to use to build the query.
|
||||
:param specific: If True, return the specific version of the service related
|
||||
to the integration
|
||||
to the data source
|
||||
:return: The data_sources of the page.
|
||||
"""
|
||||
|
||||
if not hasattr(page, "_data_sources"):
|
||||
data_sources = DataSourceHandler().get_data_sources(
|
||||
return local_cache.get(
|
||||
f"ab_data_sources_{page.id}_{specific}",
|
||||
lambda: DataSourceHandler().get_data_sources(
|
||||
page,
|
||||
base_queryset=base_queryset,
|
||||
specific=specific,
|
||||
with_shared=True,
|
||||
)
|
||||
setattr(page, "_data_sources", data_sources)
|
||||
|
||||
return getattr(page, "_data_sources")
|
||||
),
|
||||
)
|
||||
|
||||
def get_data_source_with_cache(
|
||||
self,
|
||||
|
@ -326,6 +396,7 @@ class DataSourceHandler:
|
|||
:return: The updated data_source.
|
||||
"""
|
||||
|
||||
new_service_type = None
|
||||
if "new_service_type" in kwargs:
|
||||
new_service_type = kwargs.pop("new_service_type")
|
||||
|
||||
|
@ -353,7 +424,7 @@ class DataSourceHandler:
|
|||
)
|
||||
data_source.service = service
|
||||
|
||||
if data_source.service and kwargs:
|
||||
if data_source.service and kwargs and new_service_type is None:
|
||||
service_to_update = self.service_handler.get_service_for_update(
|
||||
data_source.service.id
|
||||
)
|
||||
|
@ -447,7 +518,7 @@ class DataSourceHandler:
|
|||
# it later
|
||||
dispatch_context.cache["data_source_contents"][
|
||||
data_source.id
|
||||
] = service_dispatch
|
||||
] = service_dispatch.data
|
||||
|
||||
return dispatch_context.cache["data_source_contents"][data_source.id]
|
||||
|
||||
|
|
|
@ -89,13 +89,15 @@ class DataSourceService:
|
|||
return self.handler.get_data_sources(page, base_queryset=user_data_sources)
|
||||
|
||||
def get_builder_data_sources(
|
||||
self, user: AbstractUser, builder: "Builder"
|
||||
self, user: AbstractUser, builder: "Builder", with_cache=False
|
||||
) -> List[DataSource]:
|
||||
"""
|
||||
Gets all the data_sources of a given builder visible to the given user.
|
||||
|
||||
:param user: The user trying to get the data_sources.
|
||||
:param page: The builder that holds the data_sources.
|
||||
:param with_cache: Whether this method should populate the short
|
||||
cache for data_sources.
|
||||
:return: The data_sources of that builder.
|
||||
"""
|
||||
|
||||
|
@ -107,7 +109,9 @@ class DataSourceService:
|
|||
)
|
||||
|
||||
return self.handler.get_builder_data_sources(
|
||||
builder, base_queryset=user_data_sources
|
||||
builder,
|
||||
base_queryset=user_data_sources,
|
||||
with_cache=with_cache,
|
||||
)
|
||||
|
||||
def create_data_source(
|
||||
|
@ -384,7 +388,7 @@ class DataSourceService:
|
|||
Dispatch the service related to the data_source if the user has the permission.
|
||||
|
||||
:param user: The current user.
|
||||
:param data_sources: The data source to be dispatched.
|
||||
:param data_source: The data source to be dispatched.
|
||||
:param dispatch_context: The context used for the dispatch.
|
||||
:return: return the dispatch result.
|
||||
"""
|
||||
|
|
|
@ -57,7 +57,7 @@ class DomainHandler:
|
|||
"""
|
||||
|
||||
if base_queryset is None:
|
||||
base_queryset = Domain.objects
|
||||
base_queryset = Domain.objects.all()
|
||||
|
||||
return specific_iterator(base_queryset.filter(builder=builder))
|
||||
|
||||
|
@ -73,7 +73,7 @@ class DomainHandler:
|
|||
try:
|
||||
domain = (
|
||||
Domain.objects.exclude(published_to=None)
|
||||
.select_related("published_to", "builder")
|
||||
.select_related("published_to", "builder__workspace")
|
||||
.only("published_to", "builder")
|
||||
.get(domain_name=domain_name)
|
||||
)
|
||||
|
@ -193,7 +193,7 @@ class DomainHandler:
|
|||
|
||||
return full_order
|
||||
|
||||
def publish(self, domain: Domain, progress: Progress):
|
||||
def publish(self, domain: Domain, progress: Progress | None = None):
|
||||
"""
|
||||
Publishes a builder for the given domain object. If the builder was
|
||||
already published, the previous version is deleted and a new one is created.
|
||||
|
@ -233,7 +233,8 @@ class DomainHandler:
|
|||
for user_source in exported_builder["user_sources"]
|
||||
]
|
||||
|
||||
progress.increment(by=50)
|
||||
if progress:
|
||||
progress.increment(by=50)
|
||||
|
||||
id_mapping = {"import_workspace_id": workspace.id}
|
||||
duplicate_builder = builder_application_type.import_serialized(
|
||||
|
@ -243,7 +244,9 @@ class DomainHandler:
|
|||
id_mapping,
|
||||
None,
|
||||
default_storage,
|
||||
progress_builder=progress.create_child_builder(represents_progress=50),
|
||||
progress_builder=progress.create_child_builder(represents_progress=50)
|
||||
if progress
|
||||
else None,
|
||||
)
|
||||
domain.published_to = duplicate_builder
|
||||
domain.last_published = datetime.now(tz=timezone.utc)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import functools
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
from baserow.contrib.builder.data_sources.operations import (
|
||||
|
@ -11,6 +13,7 @@ from baserow.contrib.builder.workflow_actions.operations import (
|
|||
DispatchBuilderWorkflowActionOperationType,
|
||||
ListBuilderWorkflowActionsPageOperationType,
|
||||
)
|
||||
from baserow.core.cache import local_cache
|
||||
from baserow.core.operations import ReadApplicationOperationType
|
||||
from baserow.core.registries import PermissionManagerType, operation_type_registry
|
||||
from baserow.core.subjects import AnonymousUserSubjectType, UserSubjectType
|
||||
|
@ -56,25 +59,54 @@ class AllowPublicBuilderManagerType(PermissionManagerType):
|
|||
ListUserSourcesApplicationOperationType.type,
|
||||
]
|
||||
|
||||
def get_builder_from_id(self, builder_id):
|
||||
"""
|
||||
Returns the builder for the given id. Can be a cached version.
|
||||
"""
|
||||
|
||||
def get_builder_if_exists():
|
||||
try:
|
||||
return Builder.objects.get(id=builder_id)
|
||||
except Builder.DoesNotExist:
|
||||
return None
|
||||
|
||||
return local_cache.get(f"ab_builder_{builder_id}", get_builder_if_exists)
|
||||
|
||||
def get_builder_from_instance(self, instance, property_name):
|
||||
"""
|
||||
Returns the builder from the instance at the given property. The value can be
|
||||
cached.
|
||||
"""
|
||||
|
||||
prop_id_name = f"{property_name}_id"
|
||||
|
||||
if getattr(instance.__class__, property_name).is_cached(instance):
|
||||
return local_cache.get(
|
||||
f"ab_builder_{getattr(instance, prop_id_name)}",
|
||||
lambda: getattr(instance, property_name),
|
||||
)
|
||||
else:
|
||||
return self.get_builder_from_id(getattr(instance, prop_id_name))
|
||||
|
||||
def check_multiple_permissions(self, checks, workspace=None, include_trash=False):
|
||||
result = {}
|
||||
|
||||
for check in checks:
|
||||
operation_type = operation_type_registry.get(check.operation_name)
|
||||
if operation_type.type in self.page_level_operations:
|
||||
builder = check.context.builder
|
||||
builder = self.get_builder_from_instance(check.context, "builder")
|
||||
elif operation_type.type in self.sub_page_level_operations:
|
||||
builder = check.context.page.builder
|
||||
builder = self.get_builder_from_instance(check.context.page, "builder")
|
||||
elif (
|
||||
operation_type.type in self.sub_application_level_operations
|
||||
and isinstance(check.context.application.specific, Builder)
|
||||
and self.get_builder_from_id(check.context.application_id)
|
||||
):
|
||||
builder = check.context.application.specific
|
||||
builder = self.get_builder_from_id(check.context.application_id)
|
||||
elif (
|
||||
operation_type.type in self.application_level_operations
|
||||
and isinstance(check.context.specific, Builder)
|
||||
and self.get_builder_from_id(check.context.id)
|
||||
):
|
||||
builder = check.context.specific
|
||||
builder = self.get_builder_from_id(check.context.id)
|
||||
else:
|
||||
continue
|
||||
|
||||
|
@ -100,7 +132,18 @@ class AllowPublicBuilderManagerType(PermissionManagerType):
|
|||
# give access to specific data.
|
||||
continue
|
||||
|
||||
if DomainHandler().get_domain_for_builder(builder) is not None:
|
||||
def is_public_callback(b):
|
||||
return (
|
||||
b.workspace is None
|
||||
and DomainHandler().get_domain_for_builder(b) is not None
|
||||
)
|
||||
|
||||
is_public = local_cache.get(
|
||||
f"ab_is_public_builder_{builder.id}",
|
||||
functools.partial(is_public_callback, builder),
|
||||
)
|
||||
|
||||
if is_public:
|
||||
# it's a public builder, we allow it.
|
||||
result[check] = True
|
||||
|
||||
|
|
|
@ -157,6 +157,16 @@ class LinkCollectionFieldType(CollectionFieldType):
|
|||
collection_field.config["page_parameters"][index]["value"] = new_formula
|
||||
yield collection_field
|
||||
|
||||
for index, query_parameter in enumerate(
|
||||
collection_field.config.get("query_parameters") or []
|
||||
):
|
||||
new_formula = yield query_parameter.get("value")
|
||||
if new_formula is not None:
|
||||
collection_field.config["query_parameters"][index][
|
||||
"value"
|
||||
] = new_formula
|
||||
yield collection_field
|
||||
|
||||
def deserialize_property(
|
||||
self,
|
||||
prop_name: str,
|
||||
|
|
|
@ -331,6 +331,8 @@ class RepeatElementType(
|
|||
return super().allowed_fields + [
|
||||
"orientation",
|
||||
"items_per_row",
|
||||
"horizontal_gap",
|
||||
"vertical_gap",
|
||||
]
|
||||
|
||||
@property
|
||||
|
@ -338,6 +340,8 @@ class RepeatElementType(
|
|||
return super().serializer_field_names + [
|
||||
"orientation",
|
||||
"items_per_row",
|
||||
"horizontal_gap",
|
||||
"vertical_gap",
|
||||
]
|
||||
|
||||
class SerializedDict(
|
||||
|
@ -346,6 +350,8 @@ class RepeatElementType(
|
|||
):
|
||||
orientation: str
|
||||
items_per_row: dict
|
||||
horizontal_gap: int
|
||||
vertical_gap: int
|
||||
|
||||
@property
|
||||
def serializer_field_overrides(self):
|
||||
|
@ -736,6 +742,7 @@ class NavigationElementManager:
|
|||
"navigate_to_page_id",
|
||||
"navigate_to_url",
|
||||
"page_parameters",
|
||||
"query_parameters",
|
||||
"target",
|
||||
]
|
||||
allowed_fields = [
|
||||
|
@ -743,6 +750,7 @@ class NavigationElementManager:
|
|||
"navigate_to_page_id",
|
||||
"navigate_to_url",
|
||||
"page_parameters",
|
||||
"query_parameters",
|
||||
"target",
|
||||
]
|
||||
simple_formula_fields = ["navigate_to_url"]
|
||||
|
@ -751,6 +759,7 @@ class NavigationElementManager:
|
|||
navigation_type: str
|
||||
navigate_to_page_id: int
|
||||
page_parameters: List
|
||||
query_parameters: List
|
||||
navigate_to_url: BaserowFormula
|
||||
target: str
|
||||
|
||||
|
@ -793,9 +802,16 @@ class NavigationElementManager:
|
|||
),
|
||||
"page_parameters": PageParameterValueSerializer(
|
||||
many=True,
|
||||
default=[],
|
||||
help_text=LinkElement._meta.get_field("page_parameters").help_text,
|
||||
required=False,
|
||||
),
|
||||
"query_parameters": PageParameterValueSerializer(
|
||||
many=True,
|
||||
default=[],
|
||||
help_text=LinkElement._meta.get_field("query_parameters").help_text,
|
||||
required=False,
|
||||
),
|
||||
"target": serializers.ChoiceField(
|
||||
choices=NavigationElementMixin.TARGETS.choices,
|
||||
help_text=LinkElement._meta.get_field("target").help_text,
|
||||
|
@ -814,6 +830,7 @@ class NavigationElementManager:
|
|||
"navigate_to_page_id": None,
|
||||
"navigate_to_url": '"http://example.com"',
|
||||
"page_parameters": [],
|
||||
"query_parameters": [],
|
||||
"target": "blank",
|
||||
}
|
||||
|
||||
|
@ -851,7 +868,7 @@ class NavigationElementManager:
|
|||
|
||||
return ElementType.prepare_value_for_db(self, values, instance)
|
||||
|
||||
def _raise_if_path_params_are_invalid(self, path_params: Dict, page: Page) -> None:
|
||||
def _raise_if_path_params_are_invalid(self, path_params: List, page: Page) -> None:
|
||||
"""
|
||||
Checks if the path parameters being set are correctly correlated to the
|
||||
path parameters defined for the page.
|
||||
|
@ -863,7 +880,6 @@ class NavigationElementManager:
|
|||
"""
|
||||
|
||||
parameter_types = {p["name"]: p["type"] for p in page.path_params}
|
||||
|
||||
for page_parameter in path_params:
|
||||
page_parameter_name = page_parameter["name"]
|
||||
page_parameter_type = parameter_types.get(page_parameter_name, None)
|
||||
|
@ -876,12 +892,11 @@ class NavigationElementManager:
|
|||
|
||||
class LinkElementType(ElementType):
|
||||
"""
|
||||
A simple paragraph element that can be used to display a paragraph of text.
|
||||
A link element that can be used to navigate to a page or a URL.
|
||||
"""
|
||||
|
||||
type = "link"
|
||||
model_class = LinkElement
|
||||
PATH_PARAM_TYPE_TO_PYTHON_TYPE_MAP = {"text": str, "numeric": int}
|
||||
simple_formula_fields = NavigationElementManager.simple_formula_fields + ["value"]
|
||||
|
||||
@property
|
||||
|
@ -917,7 +932,7 @@ class LinkElementType(ElementType):
|
|||
Generator that returns formula fields for the LinkElementType.
|
||||
|
||||
Unlike other Element types, this one has its formula fields in the
|
||||
page_parameters JSON field.
|
||||
page_parameters and query_prameters JSON fields.
|
||||
"""
|
||||
|
||||
yield from super().formula_generator(element)
|
||||
|
@ -928,6 +943,12 @@ class LinkElementType(ElementType):
|
|||
element.page_parameters[index]["value"] = new_formula
|
||||
yield element
|
||||
|
||||
for index, data in enumerate(element.query_parameters or []):
|
||||
new_formula = yield data["value"]
|
||||
if new_formula is not None:
|
||||
element.query_parameters[index]["value"] = new_formula
|
||||
yield element
|
||||
|
||||
def deserialize_property(
|
||||
self,
|
||||
prop_name: str,
|
||||
|
|
|
@ -65,11 +65,14 @@ class ElementHandler:
|
|||
"style_border_right_size",
|
||||
"style_padding_right",
|
||||
"style_margin_right",
|
||||
"style_background_radius",
|
||||
"style_border_radius",
|
||||
"style_background",
|
||||
"style_background_color",
|
||||
"style_background_file",
|
||||
"style_background_mode",
|
||||
"style_width",
|
||||
"style_width_child",
|
||||
]
|
||||
|
||||
allowed_fields_update = [
|
||||
|
@ -93,11 +96,14 @@ class ElementHandler:
|
|||
"style_border_right_size",
|
||||
"style_padding_right",
|
||||
"style_margin_right",
|
||||
"style_background_radius",
|
||||
"style_border_radius",
|
||||
"style_background",
|
||||
"style_background_color",
|
||||
"style_background_file",
|
||||
"style_background_mode",
|
||||
"style_width",
|
||||
"style_width_child",
|
||||
"role_type",
|
||||
"roles",
|
||||
]
|
||||
|
@ -118,9 +124,7 @@ class ElementHandler:
|
|||
|
||||
try:
|
||||
element = (
|
||||
queryset.select_related(
|
||||
"page", "page__builder", "page__builder__workspace"
|
||||
)
|
||||
queryset.select_related("page__builder__workspace")
|
||||
.get(id=element_id)
|
||||
.specific
|
||||
)
|
||||
|
@ -233,8 +237,7 @@ class ElementHandler:
|
|||
"""
|
||||
|
||||
if specific:
|
||||
queryset = base_queryset.select_related("content_type")
|
||||
elements = specific_iterator(queryset)
|
||||
elements = specific_iterator(base_queryset)
|
||||
else:
|
||||
elements = base_queryset
|
||||
|
||||
|
|
|
@ -502,7 +502,7 @@ class CollectionElementTypeMixin:
|
|||
# current instance
|
||||
data_source_id = instance.data_source_id or kwargs.get("data_source_id", None)
|
||||
data_source = (
|
||||
DataSourceHandler().get_data_source(data_source_id)
|
||||
DataSourceHandler().get_data_source(data_source_id, with_cache=True)
|
||||
if data_source_id
|
||||
else None
|
||||
)
|
||||
|
@ -517,9 +517,17 @@ class CollectionElementTypeMixin:
|
|||
.items()
|
||||
if any(options.values())
|
||||
]
|
||||
|
||||
if data_source and property_options:
|
||||
properties.setdefault(data_source.service_id, []).extend(property_options)
|
||||
|
||||
# We need the id for the element
|
||||
if data_source and data_source.service_id:
|
||||
service = data_source.service.specific
|
||||
id_property = service.get_type().get_id_property(service)
|
||||
if id_property not in properties.setdefault(service.id, []):
|
||||
properties[service.id].append(id_property)
|
||||
|
||||
return properties
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,11 @@ from django.core.validators import MaxValueValidator, MinValueValidator
|
|||
from django.db import models
|
||||
from django.db.models import SET_NULL, QuerySet
|
||||
|
||||
from baserow.contrib.builder.constants import BACKGROUND_IMAGE_MODES, VerticalAlignments
|
||||
from baserow.contrib.builder.constants import (
|
||||
BACKGROUND_IMAGE_MODES,
|
||||
COLOR_FIELD_MAX_LENGTH,
|
||||
VerticalAlignments,
|
||||
)
|
||||
from baserow.core.constants import DATE_FORMAT_CHOICES, DATE_TIME_FORMAT_CHOICES
|
||||
from baserow.core.formula.field import FormulaField
|
||||
from baserow.core.mixins import (
|
||||
|
@ -37,6 +41,12 @@ class WidthTypes(models.TextChoices):
|
|||
SMALL = "small"
|
||||
|
||||
|
||||
class ChildWidthTypes(models.TextChoices):
|
||||
NORMAL = "normal"
|
||||
MEDIUM = "medium"
|
||||
SMALL = "small"
|
||||
|
||||
|
||||
class INPUT_TEXT_TYPES(models.TextChoices):
|
||||
TEXT = "text"
|
||||
PASSWORD = "password" # nosec bandit B105
|
||||
|
@ -136,7 +146,7 @@ class Element(
|
|||
)
|
||||
|
||||
style_border_top_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="border",
|
||||
blank=True,
|
||||
help_text="Top border color.",
|
||||
|
@ -153,7 +163,7 @@ class Element(
|
|||
)
|
||||
|
||||
style_border_bottom_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="border",
|
||||
blank=True,
|
||||
help_text="Bottom border color",
|
||||
|
@ -170,7 +180,7 @@ class Element(
|
|||
)
|
||||
|
||||
style_border_left_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="border",
|
||||
blank=True,
|
||||
help_text="Left border color",
|
||||
|
@ -187,7 +197,7 @@ class Element(
|
|||
)
|
||||
|
||||
style_border_right_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="border",
|
||||
blank=True,
|
||||
help_text="Right border color",
|
||||
|
@ -203,6 +213,13 @@ class Element(
|
|||
help_text="Margin size of the right border.",
|
||||
)
|
||||
|
||||
style_background_radius = models.SmallIntegerField(
|
||||
default=0, db_default=0, help_text="Background radius."
|
||||
)
|
||||
style_border_radius = models.SmallIntegerField(
|
||||
default=0, db_default=0, help_text="Border radius."
|
||||
)
|
||||
|
||||
style_background = models.CharField(
|
||||
choices=BackgroundTypes.choices,
|
||||
default=BackgroundTypes.NONE,
|
||||
|
@ -210,7 +227,7 @@ class Element(
|
|||
max_length=20,
|
||||
)
|
||||
style_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#ffffffff",
|
||||
blank=True,
|
||||
help_text="The background color if `style_background` is color.",
|
||||
|
@ -234,10 +251,18 @@ class Element(
|
|||
style_width = models.CharField(
|
||||
choices=WidthTypes.choices,
|
||||
default=WidthTypes.NORMAL,
|
||||
help_text="Indicates the width of the element.",
|
||||
help_text="Indicates the width of the root element.",
|
||||
max_length=20,
|
||||
)
|
||||
|
||||
style_width_child = models.CharField(
|
||||
choices=ChildWidthTypes.choices,
|
||||
default=ChildWidthTypes.NORMAL,
|
||||
db_default=ChildWidthTypes.NORMAL,
|
||||
help_text="Indicates the width of the child element.",
|
||||
max_length=6,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("order", "id")
|
||||
|
||||
|
@ -463,6 +488,12 @@ class NavigationElementMixin(models.Model):
|
|||
help_text="The parameters for each parameters of the selected page if any.",
|
||||
null=True,
|
||||
)
|
||||
query_parameters = models.JSONField(
|
||||
db_default=[],
|
||||
default=list,
|
||||
help_text="The query parameters for each parameter of the selected page if any.",
|
||||
null=True,
|
||||
)
|
||||
target = models.CharField(
|
||||
choices=TARGETS.choices,
|
||||
help_text="The target of the link when we click on it.",
|
||||
|
@ -849,6 +880,24 @@ class RepeatElement(CollectionElement, ContainerElement):
|
|||
help_text="The amount repetitions per row, per device type. "
|
||||
"Only applicable when the orientation is horizontal.",
|
||||
)
|
||||
horizontal_gap = models.IntegerField(
|
||||
default=0,
|
||||
db_default=0,
|
||||
help_text="The amount of horizontal space between repeat elements.",
|
||||
validators=[
|
||||
MinValueValidator(0, message="Value cannot be less than 0."),
|
||||
MaxValueValidator(2000, message="Value cannot be greater than 2000."),
|
||||
],
|
||||
)
|
||||
vertical_gap = models.IntegerField(
|
||||
default=0,
|
||||
db_default=0,
|
||||
help_text="The amount of vertical space between repeat elements.",
|
||||
validators=[
|
||||
MinValueValidator(0, message="Value cannot be less than 0."),
|
||||
MaxValueValidator(2000, message="Value cannot be greater than 2000."),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class RecordSelectorElement(CollectionElement, FormElement):
|
||||
|
|
|
@ -204,19 +204,23 @@ def get_builder_used_property_names(
|
|||
BuilderWorkflowActionService,
|
||||
)
|
||||
|
||||
# We query the data source first to populate the data source cache
|
||||
data_sources = DataSourceService().get_builder_data_sources(
|
||||
user, builder, with_cache=True
|
||||
)
|
||||
|
||||
elements = list(ElementService().get_builder_elements(user, builder))
|
||||
element_map = {e.id: e for e in elements}
|
||||
|
||||
element_results = get_element_property_names(elements, element_map)
|
||||
|
||||
ds_results = get_data_source_property_names(data_sources)
|
||||
|
||||
workflow_actions = BuilderWorkflowActionService().get_builder_workflow_actions(
|
||||
user, builder
|
||||
)
|
||||
wa_results = get_workflow_action_property_names(workflow_actions, element_map)
|
||||
|
||||
data_sources = DataSourceService().get_builder_data_sources(user, builder)
|
||||
ds_results = get_data_source_property_names(data_sources)
|
||||
|
||||
results = {
|
||||
"internal": merge_dicts_no_duplicates(
|
||||
wa_results["internal"], ds_results["internal"]
|
||||
|
|
|
@ -3,7 +3,6 @@ from typing import Dict, List, Optional
|
|||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.cache import cache
|
||||
|
||||
from baserow.contrib.builder.formula_property_extractor import (
|
||||
get_builder_used_property_names,
|
||||
|
@ -11,9 +10,14 @@ from baserow.contrib.builder.formula_property_extractor import (
|
|||
from baserow.contrib.builder.models import Builder
|
||||
from baserow.contrib.builder.theme.registries import theme_config_block_registry
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.utils import invalidate_versioned_cache, safe_get_or_set_cache
|
||||
|
||||
User = get_user_model()
|
||||
CACHE_KEY_PREFIX = "used_properties_for_page"
|
||||
BUILDER_PREVIEW_USED_PROPERTIES_CACHE_TTL_SECONDS = 60
|
||||
|
||||
|
||||
SENTINEL = "__no_results__"
|
||||
|
||||
|
||||
class BuilderHandler:
|
||||
|
@ -41,6 +45,10 @@ class BuilderHandler:
|
|||
.specific
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_builder_version_cache(cls, builder: Builder):
|
||||
return f"{CACHE_KEY_PREFIX}_version_{builder.id}"
|
||||
|
||||
def get_builder_used_properties_cache_key(
|
||||
self, user: AbstractUser, builder: Builder
|
||||
) -> Optional[str]:
|
||||
|
@ -54,9 +62,7 @@ class BuilderHandler:
|
|||
attribute, unlike the User Source User.
|
||||
"""
|
||||
|
||||
if isinstance(user, User):
|
||||
return None
|
||||
elif user.is_anonymous:
|
||||
if user.is_anonymous or not user.role:
|
||||
# When the user is anonymous, only use the prefix + page ID.
|
||||
role = ""
|
||||
else:
|
||||
|
@ -64,6 +70,10 @@ class BuilderHandler:
|
|||
|
||||
return f"{CACHE_KEY_PREFIX}_{builder.id}{role}"
|
||||
|
||||
@classmethod
|
||||
def invalidate_builder_public_properties_cache(cls, builder):
|
||||
invalidate_versioned_cache(cls._get_builder_version_cache(builder))
|
||||
|
||||
def get_builder_public_properties(
|
||||
self, user: AbstractUser, builder: Builder
|
||||
) -> Dict[str, Dict[int, List[str]]]:
|
||||
|
@ -80,15 +90,17 @@ class BuilderHandler:
|
|||
(required only by the backend).
|
||||
"""
|
||||
|
||||
cache_key = self.get_builder_used_properties_cache_key(user, builder)
|
||||
properties = cache.get(cache_key) if cache_key else None
|
||||
if properties is None:
|
||||
def compute_properties():
|
||||
properties = get_builder_used_property_names(user, builder)
|
||||
if cache_key:
|
||||
cache.set(
|
||||
cache_key,
|
||||
properties,
|
||||
timeout=settings.BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS,
|
||||
)
|
||||
return SENTINEL if properties is None else properties
|
||||
|
||||
return properties
|
||||
result = safe_get_or_set_cache(
|
||||
self.get_builder_used_properties_cache_key(user, builder),
|
||||
self._get_builder_version_cache(builder),
|
||||
default=compute_properties,
|
||||
timeout=settings.BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS
|
||||
if builder.workspace_id
|
||||
else BUILDER_PREVIEW_USED_PROPERTIES_CACHE_TTL_SECONDS,
|
||||
)
|
||||
|
||||
return result if result != SENTINEL else None
|
||||
|
|
|
@ -8,7 +8,7 @@ msgid ""
|
|||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-12-17 08:48+0000\n"
|
||||
"POT-Creation-Date: 2025-01-15 11:59+0000\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
|
@ -55,9 +55,9 @@ msgstr ""
|
|||
msgid "Data source"
|
||||
msgstr ""
|
||||
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:569
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:574
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:579
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:578
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:583
|
||||
#: src/baserow/contrib/builder/elements/mixins.py:588
|
||||
#, python-format
|
||||
msgid "Column %(count)s"
|
||||
msgstr ""
|
||||
|
|
|
@ -0,0 +1,276 @@
|
|||
# Generated by Django 5.0.9 on 2025-01-13 06:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0042_footerelement_headerelement"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="inputthemeconfigblock",
|
||||
name="input_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="inputthemeconfigblock",
|
||||
name="label_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="medium",
|
||||
default="medium",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="linkthemeconfigblock",
|
||||
name="link_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_header_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="semi-bold",
|
||||
default="semi-bold",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="body_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_1_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="bold",
|
||||
default="bold",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_2_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="semi-bold",
|
||||
default="semi-bold",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_3_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="medium",
|
||||
default="medium",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_4_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="medium",
|
||||
default="medium",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_5_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_6_font_weight",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("thin", "Thin"),
|
||||
("extra-light", "Extra Light"),
|
||||
("light", "Light"),
|
||||
("regular", "Regular"),
|
||||
("medium", "Medium"),
|
||||
("semi-bold", "Semi Bold"),
|
||||
("bold", "Bold"),
|
||||
("extra-bold", "Extra Bold"),
|
||||
("heavy", "Heavy"),
|
||||
("black", "Black"),
|
||||
("extra-black", "Extra Black"),
|
||||
],
|
||||
db_default="regular",
|
||||
default="regular",
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 5.0.9 on 2025-01-02 10:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0043_buttonthemeconfigblock_button_font_weight_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="colorthemeconfigblock",
|
||||
name="custom_colors",
|
||||
field=models.JSONField(db_default=[], default=list),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,26 @@
|
|||
# Generated by Django 5.0.9 on 2025-01-14 14:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0044_colorthemeconfigblock_custom_colors"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="element",
|
||||
name="style_background_radius",
|
||||
field=models.SmallIntegerField(
|
||||
db_default=0, default=0, help_text="Background radius."
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="element",
|
||||
name="style_border_radius",
|
||||
field=models.SmallIntegerField(
|
||||
db_default=0, default=0, help_text="Border radius."
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,30 @@
|
|||
# Generated by Django 5.0.9 on 2025-01-15 07:06
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0045_element_style_background_radius_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="imagethemeconfigblock",
|
||||
name="image_border_radius",
|
||||
field=models.SmallIntegerField(
|
||||
db_default=0,
|
||||
default=0,
|
||||
help_text="The border radius for this image element.",
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(
|
||||
0, message="Value cannot be less than 0."
|
||||
),
|
||||
django.core.validators.MaxValueValidator(
|
||||
100, message="Value cannot be greater than 100."
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,47 @@
|
|||
# Generated by Django 5.0.9 on 2025-01-16 11:04
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0046_imagethemeconfigblock_image_radius"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="repeatelement",
|
||||
name="horizontal_gap",
|
||||
field=models.IntegerField(
|
||||
db_default=0,
|
||||
default=0,
|
||||
help_text="The amount of horizontal space between repeat elements.",
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(
|
||||
0, message="Value cannot be less than 0."
|
||||
),
|
||||
django.core.validators.MaxValueValidator(
|
||||
2000, message="Value cannot be greater than 2000."
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="repeatelement",
|
||||
name="vertical_gap",
|
||||
field=models.IntegerField(
|
||||
db_default=0,
|
||||
default=0,
|
||||
help_text="The amount of vertical space between repeat elements.",
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(
|
||||
0, message="Value cannot be less than 0."
|
||||
),
|
||||
django.core.validators.MaxValueValidator(
|
||||
2000, message="Value cannot be greater than 2000."
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,327 @@
|
|||
# Generated by Django 5.0.9 on 2025-01-21 09:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0047_repeatelement_horizontal_gap_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="primary",
|
||||
help_text="The background color of buttons",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_border_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="The border color of buttons",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_hover_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#96baf6ff",
|
||||
help_text="The background color of buttons when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_hover_border_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="The border color of buttons when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_hover_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The text color of buttons when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="buttonthemeconfigblock",
|
||||
name="button_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The text color of buttons",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="colorthemeconfigblock",
|
||||
name="border_color",
|
||||
field=models.CharField(default="#d7d8d9ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="colorthemeconfigblock",
|
||||
name="main_error_color",
|
||||
field=models.CharField(default="#FF5A4A", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="colorthemeconfigblock",
|
||||
name="main_success_color",
|
||||
field=models.CharField(default="#12D452", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="colorthemeconfigblock",
|
||||
name="main_warning_color",
|
||||
field=models.CharField(default="#FCC74A", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="colorthemeconfigblock",
|
||||
name="primary_color",
|
||||
field=models.CharField(default="#5190efff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="colorthemeconfigblock",
|
||||
name="secondary_color",
|
||||
field=models.CharField(default="#0eaa42ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="element",
|
||||
name="style_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The background color if `style_background` is color.",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="element",
|
||||
name="style_border_bottom_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="Bottom border color",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="element",
|
||||
name="style_border_left_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="Left border color",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="element",
|
||||
name="style_border_right_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="Right border color",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="element",
|
||||
name="style_border_top_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="border",
|
||||
help_text="Top border color.",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="inputthemeconfigblock",
|
||||
name="input_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#FFFFFFFF",
|
||||
help_text="The background color of the input",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="inputthemeconfigblock",
|
||||
name="input_border_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#000000FF",
|
||||
help_text="The color of the input border",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="inputthemeconfigblock",
|
||||
name="input_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#070810FF",
|
||||
help_text="The text color of the input",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="inputthemeconfigblock",
|
||||
name="label_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#070810FF",
|
||||
help_text="The text color of the label",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="linkthemeconfigblock",
|
||||
name="link_hover_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#96baf6ff",
|
||||
help_text="The hover color of links when hovered",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="linkthemeconfigblock",
|
||||
name="link_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="primary",
|
||||
help_text="The text color of links",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="pagethemeconfigblock",
|
||||
name="page_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#ffffffff",
|
||||
help_text="The background color of the page",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_border_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#000000FF",
|
||||
help_text="The color of the table border",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_cell_alternate_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="transparent",
|
||||
help_text="The alternate background color of the table cells",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_cell_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="transparent",
|
||||
help_text="The background color of the table cells",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_header_background_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#edededff",
|
||||
help_text="The background color of the table header cells",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_header_text_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#000000ff",
|
||||
help_text="The text color of the table header cells",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_horizontal_separator_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#000000FF",
|
||||
help_text="The color of the table horizontal separator",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablethemeconfigblock",
|
||||
name="table_vertical_separator_color",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default="#000000FF",
|
||||
help_text="The color of the table vertical separator",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="body_text_color",
|
||||
field=models.CharField(default="#070810ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_1_text_color",
|
||||
field=models.CharField(default="#070810ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_2_text_color",
|
||||
field=models.CharField(default="#070810ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_3_text_color",
|
||||
field=models.CharField(default="#070810ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_4_text_color",
|
||||
field=models.CharField(default="#070810ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_5_text_color",
|
||||
field=models.CharField(default="#070810ff", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="typographythemeconfigblock",
|
||||
name="heading_6_text_color",
|
||||
field=models.CharField(default="#202128", max_length=255),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,46 @@
|
|||
# Generated by Django 5.0.9 on 2025-01-30 10:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
(
|
||||
"builder",
|
||||
"0048_alter_buttonthemeconfigblock_button_background_color_and_more",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="element",
|
||||
name="style_width_child",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("normal", "Normal"),
|
||||
("medium", "Medium"),
|
||||
("small", "Small"),
|
||||
],
|
||||
db_default="normal",
|
||||
default="normal",
|
||||
help_text="Indicates the width of the child element.",
|
||||
max_length=6,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="element",
|
||||
name="style_width",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("full", "Full"),
|
||||
("full-width", "Full Width"),
|
||||
("normal", "Normal"),
|
||||
("medium", "Medium"),
|
||||
("small", "Small"),
|
||||
],
|
||||
default="normal",
|
||||
help_text="Indicates the width of the root element.",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,40 @@
|
|||
# Generated by Django 5.0.9 on 2024-12-09 18:58
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
(
|
||||
"builder",
|
||||
"0049_element_style_width_child_alter_element_style_width",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="page",
|
||||
name="query_params",
|
||||
field=models.JSONField(blank=True, default=list, db_default=[]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="linkelement",
|
||||
name="query_parameters",
|
||||
field=models.JSONField(
|
||||
default=list,
|
||||
db_default=[],
|
||||
help_text="The query parameters for each parameter of the selected page if any.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="openpageworkflowaction",
|
||||
name="query_parameters",
|
||||
field=models.JSONField(
|
||||
default=list,
|
||||
db_default=[],
|
||||
help_text="The query parameters for each parameter of the selected page if any.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,16 @@
|
|||
# Generated by Django 5.0.9 on 2024-09-30 16:30
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("builder", "0050_page_query_params"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="builderworkflowaction",
|
||||
options={"ordering": ("order", "id")},
|
||||
),
|
||||
]
|
|
@ -1,20 +1,20 @@
|
|||
import re
|
||||
import typing
|
||||
|
||||
from baserow.contrib.builder.pages.types import PAGE_PATH_PARAM_TYPE_CHOICES_LITERAL
|
||||
from baserow.contrib.builder.pages.types import PAGE_PARAM_TYPE_CHOICES_LITERAL
|
||||
|
||||
# Every page param in a page path needs to be prefixed by the below symbol
|
||||
PAGE_PATH_PARAM_PREFIX = ":"
|
||||
|
||||
PAGE_PATH_PARAM_TYPE_CHOICES = list(
|
||||
typing.get_args(PAGE_PATH_PARAM_TYPE_CHOICES_LITERAL)
|
||||
)
|
||||
PAGE_PARAM_TYPE_CHOICES = list(typing.get_args(PAGE_PARAM_TYPE_CHOICES_LITERAL))
|
||||
|
||||
# This regex needs to match the regex in `getPathParams` in the frontend
|
||||
# (builder/utils/path.js)
|
||||
PATH_PARAM_REGEX = re.compile("(:[A-Za-z0-9_]+)")
|
||||
PATH_PARAM_EXACT_MATCH_REGEX = re.compile("(^:[A-Za-z0-9_]+)$")
|
||||
|
||||
QUERY_PARAM_EXACT_MATCH_REGEX = re.compile(r"(^[A-Za-z][A-Za-z0-9_-]*$)")
|
||||
|
||||
# This constant can be used to be inserted into a path temporarily as a unique
|
||||
# placeholder since we already know the character can't be in the path (given it's
|
||||
# illegal) we can establish uniqueness.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from typing import List
|
||||
|
||||
from baserow.contrib.builder.pages.constants import PAGE_PATH_PARAM_TYPE_CHOICES
|
||||
from baserow.contrib.builder.pages.constants import PAGE_PARAM_TYPE_CHOICES
|
||||
|
||||
|
||||
class PageDoesNotExist(Exception):
|
||||
|
@ -89,7 +89,7 @@ class InvalidPagePathParamType(Exception):
|
|||
self.param_type = param_type
|
||||
super().__init__(
|
||||
f"The param type {param_type} is invalid, please chose from "
|
||||
f"{PAGE_PATH_PARAM_TYPE_CHOICES}"
|
||||
f"{PAGE_PARAM_TYPE_CHOICES}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -103,3 +103,32 @@ class DuplicatePathParamsInPath(Exception):
|
|||
f"The path params {path_param_names} are defined multiple times "
|
||||
f"in path {path}"
|
||||
)
|
||||
|
||||
|
||||
class InvalidQueryParamName(Exception):
|
||||
"""Raised when an invalid query param name is being set"""
|
||||
|
||||
def __init__(self, query_param_name: str, *args, **kwargs):
|
||||
self.query_param_name = query_param_name
|
||||
super().__init__(f"The query param {query_param_name} is invalid")
|
||||
|
||||
|
||||
class DuplicatePageParams(Exception):
|
||||
"""Raised when same query param is defined multiple times or query
|
||||
param names clash with path param names."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
param: str,
|
||||
query_param_names: List[str],
|
||||
path_param_names: List[str],
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
self.query_param_names = query_param_names
|
||||
self.path_param_names = path_param_names
|
||||
self.param = param
|
||||
super().__init__(
|
||||
f"The query param {param} is defined multiple times in {query_param_names}"
|
||||
f"or clash with path params {path_param_names}"
|
||||
)
|
||||
|
|
|
@ -16,9 +16,12 @@ from baserow.contrib.builder.pages.constants import (
|
|||
ILLEGAL_PATH_SAMPLE_CHARACTER,
|
||||
PAGE_PATH_PARAM_PREFIX,
|
||||
PATH_PARAM_REGEX,
|
||||
QUERY_PARAM_EXACT_MATCH_REGEX,
|
||||
)
|
||||
from baserow.contrib.builder.pages.exceptions import (
|
||||
DuplicatePageParams,
|
||||
DuplicatePathParamsInPath,
|
||||
InvalidQueryParamName,
|
||||
PageDoesNotExist,
|
||||
PageNameNotUnique,
|
||||
PageNotInBuilder,
|
||||
|
@ -28,7 +31,11 @@ from baserow.contrib.builder.pages.exceptions import (
|
|||
SharedPageIsReadOnly,
|
||||
)
|
||||
from baserow.contrib.builder.pages.models import Page
|
||||
from baserow.contrib.builder.pages.types import PagePathParams
|
||||
from baserow.contrib.builder.pages.types import (
|
||||
PagePathParams,
|
||||
PageQueryParam,
|
||||
PageQueryParams,
|
||||
)
|
||||
from baserow.contrib.builder.types import PageDict
|
||||
from baserow.contrib.builder.workflow_actions.handler import (
|
||||
BuilderWorkflowActionHandler,
|
||||
|
@ -41,7 +48,7 @@ from baserow.core.utils import ChildProgressBuilder, MirrorDict, find_unused_nam
|
|||
class PageHandler:
|
||||
def get_page(self, page_id: int, base_queryset: Optional[QuerySet] = None) -> Page:
|
||||
"""
|
||||
Gets a page by ID
|
||||
Gets a page by ID.
|
||||
|
||||
:param page_id: The ID of the page
|
||||
:param base_queryset: Can be provided to already filter or apply performance
|
||||
|
@ -97,15 +104,17 @@ class PageHandler:
|
|||
name: str,
|
||||
path: str,
|
||||
path_params: PagePathParams = None,
|
||||
query_params: PageQueryParam = None,
|
||||
shared: bool = False,
|
||||
) -> Page:
|
||||
"""
|
||||
Creates a new page
|
||||
Creates a new page.
|
||||
|
||||
:param builder: The builder the page belongs to
|
||||
:param name: The name of the page
|
||||
:param path: The path of the page
|
||||
:param path_params: The params of the path provided
|
||||
:param query_params: The query params of the page provided
|
||||
:param shared: If this is the shared page. They should be only one shared page
|
||||
per builder application.
|
||||
:return: The newly created page instance
|
||||
|
@ -113,6 +122,7 @@ class PageHandler:
|
|||
|
||||
last_order = Page.get_last_order(builder)
|
||||
path_params = path_params or []
|
||||
query_params = query_params or []
|
||||
|
||||
self.is_page_path_valid(path, path_params, raises=True)
|
||||
self.is_page_path_unique(builder, path, raises=True)
|
||||
|
@ -124,6 +134,7 @@ class PageHandler:
|
|||
order=last_order,
|
||||
path=path,
|
||||
path_params=path_params,
|
||||
query_params=query_params,
|
||||
shared=shared,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
|
@ -137,7 +148,7 @@ class PageHandler:
|
|||
|
||||
def delete_page(self, page: Page):
|
||||
"""
|
||||
Deletes the page provided
|
||||
Deletes the page provided.
|
||||
|
||||
:param page: The page that must be deleted
|
||||
"""
|
||||
|
@ -149,7 +160,7 @@ class PageHandler:
|
|||
|
||||
def update_page(self, page: Page, **kwargs) -> Page:
|
||||
"""
|
||||
Updates fields of a page
|
||||
Updates fields of a page.
|
||||
|
||||
:param page: The page that should be updated
|
||||
:param kwargs: The fields that should be updated with their corresponding value
|
||||
|
@ -172,6 +183,13 @@ class PageHandler:
|
|||
), # We don't want to conflict with the current page
|
||||
raises=True,
|
||||
)
|
||||
if "query_params" in kwargs:
|
||||
query_params = kwargs.get("query_params")
|
||||
self.validate_query_params(
|
||||
kwargs.get("path", page.path),
|
||||
kwargs.get("path_params", page.path_params),
|
||||
query_params,
|
||||
)
|
||||
|
||||
for key, value in kwargs.items():
|
||||
setattr(page, key, value)
|
||||
|
@ -215,7 +233,7 @@ class PageHandler:
|
|||
self, page: Page, progress_builder: Optional[ChildProgressBuilder] = None
|
||||
):
|
||||
"""
|
||||
Duplicates an existing page instance
|
||||
Duplicates an existing page instance.
|
||||
|
||||
:param page: The page that is being duplicated
|
||||
:param progress_builder: A progress object that can be used to report progress
|
||||
|
@ -343,6 +361,47 @@ class PageHandler:
|
|||
|
||||
return True
|
||||
|
||||
def validate_query_params(
|
||||
self, path: str, path_params: PagePathParams, query_params: PageQueryParams
|
||||
) -> bool:
|
||||
"""
|
||||
Validates the query parameters of a page.
|
||||
|
||||
:param path: The path of the page.
|
||||
:param path_params: The path parameters defined for the page.
|
||||
:param query_params: The query parameters to validate.
|
||||
:raises InvalidQueryParamName: If a query parameter name doesn't match the
|
||||
required format.
|
||||
:raises DuplicatePageParams: If a query parameter is defined multiple times
|
||||
or clashes with path parameters.
|
||||
:return: True if validation passes.
|
||||
"""
|
||||
|
||||
# Extract path param names for checking duplicates
|
||||
path_param_names = [p["name"] for p in path_params]
|
||||
|
||||
# Get list of query param names
|
||||
query_param_names = [p["name"] for p in query_params]
|
||||
|
||||
# Check for duplicates within query params
|
||||
seen_params = set()
|
||||
for param_name in query_param_names:
|
||||
# Validate query param name format using regex
|
||||
if not QUERY_PARAM_EXACT_MATCH_REGEX.match(param_name):
|
||||
raise InvalidQueryParamName(query_param_name=param_name)
|
||||
|
||||
# Check if param name already seen or conflicts with path param
|
||||
if param_name in seen_params or param_name in path_param_names:
|
||||
raise DuplicatePageParams(
|
||||
param=param_name,
|
||||
query_param_names=query_param_names,
|
||||
path_param_names=path_param_names,
|
||||
)
|
||||
|
||||
seen_params.add(param_name)
|
||||
|
||||
return True
|
||||
|
||||
def is_page_path_unique(
|
||||
self,
|
||||
builder: Builder,
|
||||
|
@ -369,7 +428,6 @@ class PageHandler:
|
|||
if raises:
|
||||
raise PagePathNotUnique(path=path, builder_id=builder.id)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def generalise_path(self, path: str) -> str:
|
||||
|
@ -445,6 +503,7 @@ class PageHandler:
|
|||
order=page.order,
|
||||
path=page.path,
|
||||
path_params=page.path_params,
|
||||
query_params=page.query_params,
|
||||
shared=page.shared,
|
||||
elements=serialized_elements,
|
||||
data_sources=serialized_data_sources,
|
||||
|
@ -613,6 +672,7 @@ class PageHandler:
|
|||
order=serialized_page["order"],
|
||||
path=serialized_page["path"],
|
||||
path_params=serialized_page["path_params"],
|
||||
query_params=serialized_page.get("query_params", []),
|
||||
shared=False,
|
||||
visibility=serialized_page.get("visibility", Page.VISIBILITY_TYPES.ALL),
|
||||
role_type=serialized_page.get("role_type", Page.ROLE_TYPES.ALLOW_ALL),
|
||||
|
|
|
@ -54,6 +54,7 @@ class Page(
|
|||
name = models.CharField(max_length=255)
|
||||
path = models.CharField(max_length=255, validators=[path_validation])
|
||||
path_params = models.JSONField(default=dict)
|
||||
query_params = models.JSONField(default=list, blank=True, db_default=[])
|
||||
|
||||
# Shared page is invisible to the user but contains all shared data like
|
||||
# shared data sources or shared elements. That way we keep everything working as
|
||||
|
|
|
@ -19,7 +19,7 @@ from baserow.contrib.builder.pages.signals import (
|
|||
page_updated,
|
||||
pages_reordered,
|
||||
)
|
||||
from baserow.contrib.builder.pages.types import PagePathParams
|
||||
from baserow.contrib.builder.pages.types import PagePathParams, PageQueryParams
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.utils import ChildProgressBuilder, extract_allowed
|
||||
|
||||
|
@ -55,6 +55,7 @@ class PageService:
|
|||
name: str,
|
||||
path: str,
|
||||
path_params: PagePathParams = None,
|
||||
query_params: PageQueryParams = None,
|
||||
) -> Page:
|
||||
"""
|
||||
Creates a new page
|
||||
|
@ -74,7 +75,9 @@ class PageService:
|
|||
context=builder,
|
||||
)
|
||||
|
||||
page = self.handler.create_page(builder, name, path, path_params=path_params)
|
||||
page = self.handler.create_page(
|
||||
builder, name, path, path_params=path_params, query_params=query_params
|
||||
)
|
||||
|
||||
page_created.send(self, page=page, user=user)
|
||||
|
||||
|
@ -119,7 +122,16 @@ class PageService:
|
|||
)
|
||||
|
||||
allowed_updates = extract_allowed(
|
||||
kwargs, ["name", "path", "path_params", "visibility", "role_type", "roles"]
|
||||
kwargs,
|
||||
[
|
||||
"name",
|
||||
"path",
|
||||
"path_params",
|
||||
"visibility",
|
||||
"role_type",
|
||||
"roles",
|
||||
"query_params",
|
||||
],
|
||||
)
|
||||
|
||||
self.handler.update_page(page, **allowed_updates)
|
||||
|
|
|
@ -1,11 +1,19 @@
|
|||
from typing import List, Literal, TypedDict
|
||||
|
||||
PAGE_PATH_PARAM_TYPE_CHOICES_LITERAL = Literal["text", "numeric"]
|
||||
PAGE_PARAM_TYPE_CHOICES_LITERAL = Literal["text", "numeric"]
|
||||
|
||||
|
||||
class PagePathParam(TypedDict):
|
||||
name: str
|
||||
param_type: Literal[PAGE_PATH_PARAM_TYPE_CHOICES_LITERAL]
|
||||
param_type: Literal[PAGE_PARAM_TYPE_CHOICES_LITERAL]
|
||||
|
||||
|
||||
PagePathParams = List[PagePathParam]
|
||||
|
||||
|
||||
class PageQueryParam(TypedDict):
|
||||
name: str
|
||||
param_type: Literal[PAGE_PARAM_TYPE_CHOICES_LITERAL]
|
||||
|
||||
|
||||
PageQueryParams = List[PageQueryParam]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.core.validators import URLValidator
|
|||
from baserow.contrib.builder.pages.constants import (
|
||||
PAGE_PATH_PARAM_PREFIX,
|
||||
PATH_PARAM_EXACT_MATCH_REGEX,
|
||||
QUERY_PARAM_EXACT_MATCH_REGEX,
|
||||
)
|
||||
|
||||
|
||||
|
@ -39,3 +40,15 @@ def path_param_name_validation(value: str):
|
|||
|
||||
if not PATH_PARAM_EXACT_MATCH_REGEX.match(full_path_param):
|
||||
raise ValidationError(f"Path param {value} contains invalid characters")
|
||||
|
||||
|
||||
def query_param_name_validation(value: str):
|
||||
"""
|
||||
Verifies that the path param is semantically valid.
|
||||
|
||||
:param value: The path param to check
|
||||
:raises ValidationError: If the path param is not semantically valid
|
||||
"""
|
||||
|
||||
if not QUERY_PARAM_EXACT_MATCH_REGEX.match(value):
|
||||
raise ValidationError(f"Query param {value} contains invalid characters")
|
||||
|
|
|
@ -68,6 +68,8 @@ def load_test_data():
|
|||
text_element_type = element_type_registry.get("text")
|
||||
table_element_type = element_type_registry.get("table")
|
||||
link_element_type = element_type_registry.get("link")
|
||||
header_element = element_type_registry.get("header")
|
||||
column_element = element_type_registry.get("column")
|
||||
|
||||
try:
|
||||
homepage = Page.objects.get(name="Homepage", builder=builder)
|
||||
|
@ -338,3 +340,23 @@ def load_test_data():
|
|||
navigation_type="page",
|
||||
navigate_to_page_id=products.id,
|
||||
)
|
||||
|
||||
# Add shared elements
|
||||
if builder.shared_page.element_set.count() == 0:
|
||||
header = ElementHandler().create_element(
|
||||
header_element,
|
||||
builder.shared_page,
|
||||
)
|
||||
column = ElementHandler().create_element(
|
||||
column_element, builder.shared_page, parent_element_id=header.id
|
||||
)
|
||||
ElementHandler().create_element(
|
||||
link_element_type,
|
||||
builder.shared_page,
|
||||
parent_element_id=column.id,
|
||||
place_in_container="0",
|
||||
value='"Home"',
|
||||
variant="link",
|
||||
navigation_type="page",
|
||||
navigate_to_page_id=homepage.id,
|
||||
)
|
||||
|
|
124
backend/src/baserow/contrib/builder/signals.py
Normal file
124
backend/src/baserow/contrib/builder/signals.py
Normal file
|
@ -0,0 +1,124 @@
|
|||
from django.dispatch import receiver
|
||||
|
||||
from baserow.contrib.builder.data_sources import signals as ds_signals
|
||||
from baserow.contrib.builder.elements import signals as element_signals
|
||||
from baserow.contrib.builder.handler import BuilderHandler
|
||||
from baserow.contrib.builder.models import Builder
|
||||
from baserow.contrib.builder.pages import signals as page_signals
|
||||
from baserow.contrib.builder.workflow_actions import signals as wa_signals
|
||||
from baserow.core.user_sources import signals as us_signals
|
||||
|
||||
__all__ = [
|
||||
"element_created",
|
||||
"elements_created",
|
||||
"element_deleted",
|
||||
"element_updated",
|
||||
"wa_created",
|
||||
"wa_updated",
|
||||
"wa_deleted",
|
||||
"ds_created",
|
||||
"ds_updated",
|
||||
"ds_deleted",
|
||||
"page_deleted",
|
||||
]
|
||||
|
||||
# Elements
|
||||
|
||||
|
||||
@receiver(element_signals.element_created)
|
||||
def element_created(sender, element, user, before_id=None, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(element.page.builder)
|
||||
|
||||
|
||||
@receiver(element_signals.elements_created)
|
||||
def elements_created(sender, elements, page, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(page.builder)
|
||||
|
||||
|
||||
@receiver(element_signals.element_updated)
|
||||
def element_updated(sender, element, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(element.page.builder)
|
||||
|
||||
|
||||
@receiver(element_signals.element_deleted)
|
||||
def element_deleted(sender, page, element_id, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(page.builder)
|
||||
|
||||
|
||||
# Workflow actions
|
||||
|
||||
|
||||
@receiver(wa_signals.workflow_action_created)
|
||||
def wa_created(sender, workflow_action, user, before_id=None, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(
|
||||
workflow_action.page.builder
|
||||
)
|
||||
|
||||
|
||||
@receiver(wa_signals.workflow_action_updated)
|
||||
def wa_updated(sender, workflow_action, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(
|
||||
workflow_action.page.builder
|
||||
)
|
||||
|
||||
|
||||
@receiver(wa_signals.workflow_action_deleted)
|
||||
def wa_deleted(sender, workflow_action_id, page, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(page.builder)
|
||||
|
||||
|
||||
# Data sources
|
||||
|
||||
|
||||
@receiver(ds_signals.data_source_created)
|
||||
def ds_created(sender, data_source, user, before_id=None, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(
|
||||
data_source.page.builder
|
||||
)
|
||||
|
||||
|
||||
@receiver(ds_signals.data_source_updated)
|
||||
def ds_updated(sender, data_source, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(
|
||||
data_source.page.builder
|
||||
)
|
||||
|
||||
|
||||
@receiver(ds_signals.data_source_deleted)
|
||||
def ds_deleted(sender, data_source_id, page, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(page.builder)
|
||||
|
||||
|
||||
# Page
|
||||
|
||||
|
||||
@receiver(page_signals.page_deleted)
|
||||
def page_deleted(sender, builder, page_id, user, **kwargs):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(builder)
|
||||
|
||||
|
||||
# User sources
|
||||
|
||||
|
||||
@receiver(us_signals.user_source_created)
|
||||
def us_created(sender, user_source, user, before_id=None, **kwargs):
|
||||
if isinstance(user_source.application.specific, Builder):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(
|
||||
user_source.application.specific
|
||||
)
|
||||
|
||||
|
||||
@receiver(us_signals.user_source_updated)
|
||||
def us_updated(sender, user_source, user, **kwargs):
|
||||
if isinstance(user_source.application.specific, Builder):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(
|
||||
user_source.application.specific
|
||||
)
|
||||
|
||||
|
||||
@receiver(us_signals.user_source_deleted)
|
||||
def us_deleted(sender, user_source_id, application, user, **kwargs):
|
||||
if isinstance(application.specific, Builder):
|
||||
BuilderHandler().invalidate_builder_public_properties_cache(
|
||||
application.specific
|
||||
)
|
|
@ -3,7 +3,9 @@ from django.db import models
|
|||
|
||||
from baserow.contrib.builder.constants import (
|
||||
BACKGROUND_IMAGE_MODES,
|
||||
COLOR_FIELD_MAX_LENGTH,
|
||||
WIDTHS,
|
||||
FontWeights,
|
||||
HorizontalAlignments,
|
||||
)
|
||||
from baserow.core.fields import AutoOneToOneField
|
||||
|
@ -22,12 +24,25 @@ class ThemeConfigBlock(models.Model):
|
|||
|
||||
|
||||
class ColorThemeConfigBlock(ThemeConfigBlock):
|
||||
primary_color = models.CharField(max_length=9, default="#5190efff")
|
||||
secondary_color = models.CharField(max_length=9, default="#0eaa42ff")
|
||||
border_color = models.CharField(max_length=9, default="#d7d8d9ff")
|
||||
main_success_color = models.CharField(max_length=9, default="#12D452")
|
||||
main_warning_color = models.CharField(max_length=9, default="#FCC74A")
|
||||
main_error_color = models.CharField(max_length=9, default="#FF5A4A")
|
||||
primary_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#5190efff"
|
||||
)
|
||||
secondary_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#0eaa42ff"
|
||||
)
|
||||
border_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#d7d8d9ff"
|
||||
)
|
||||
main_success_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#12D452"
|
||||
)
|
||||
main_warning_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#FCC74A"
|
||||
)
|
||||
main_error_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#FF5A4A"
|
||||
)
|
||||
custom_colors = models.JSONField(default=list, db_default=[])
|
||||
|
||||
|
||||
class TypographyThemeConfigBlock(ThemeConfigBlock):
|
||||
|
@ -36,7 +51,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
body_font_size = models.SmallIntegerField(default=14)
|
||||
body_text_color = models.CharField(max_length=9, default="#070810ff")
|
||||
body_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.REGULAR,
|
||||
db_default=FontWeights.REGULAR,
|
||||
)
|
||||
body_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#070810ff"
|
||||
)
|
||||
body_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -47,7 +70,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
heading_1_font_size = models.SmallIntegerField(default=24)
|
||||
heading_1_text_color = models.CharField(max_length=9, default="#070810ff")
|
||||
heading_1_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.BOLD,
|
||||
db_default=FontWeights.BOLD,
|
||||
)
|
||||
heading_1_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#070810ff"
|
||||
)
|
||||
heading_1_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -58,7 +89,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
heading_2_font_size = models.SmallIntegerField(default=20)
|
||||
heading_2_text_color = models.CharField(max_length=9, default="#070810ff")
|
||||
heading_2_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.SEMI_BOLD,
|
||||
db_default=FontWeights.SEMI_BOLD,
|
||||
)
|
||||
heading_2_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#070810ff"
|
||||
)
|
||||
heading_2_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -69,7 +108,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
heading_3_font_size = models.SmallIntegerField(default=16)
|
||||
heading_3_text_color = models.CharField(max_length=9, default="#070810ff")
|
||||
heading_3_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.MEDIUM,
|
||||
db_default=FontWeights.MEDIUM,
|
||||
)
|
||||
heading_3_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#070810ff"
|
||||
)
|
||||
heading_3_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -80,7 +127,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
heading_4_font_size = models.SmallIntegerField(default=16)
|
||||
heading_4_text_color = models.CharField(max_length=9, default="#070810ff")
|
||||
heading_4_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.MEDIUM,
|
||||
db_default=FontWeights.MEDIUM,
|
||||
)
|
||||
heading_4_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#070810ff"
|
||||
)
|
||||
heading_4_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -91,7 +146,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
heading_5_font_size = models.SmallIntegerField(default=14)
|
||||
heading_5_text_color = models.CharField(max_length=9, default="#070810ff")
|
||||
heading_5_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.REGULAR,
|
||||
db_default=FontWeights.REGULAR,
|
||||
)
|
||||
heading_5_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#070810ff"
|
||||
)
|
||||
heading_5_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -102,7 +165,15 @@ class TypographyThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
heading_6_font_size = models.SmallIntegerField(default=14)
|
||||
heading_6_text_color = models.CharField(max_length=9, default="#202128")
|
||||
heading_6_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.REGULAR,
|
||||
db_default=FontWeights.REGULAR,
|
||||
)
|
||||
heading_6_text_color = models.CharField(
|
||||
max_length=COLOR_FIELD_MAX_LENGTH, default="#202128"
|
||||
)
|
||||
heading_6_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -116,6 +187,12 @@ class ButtonThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
button_font_size = models.SmallIntegerField(default=13)
|
||||
button_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.REGULAR,
|
||||
db_default=FontWeights.REGULAR,
|
||||
)
|
||||
button_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
|
@ -132,19 +209,19 @@ class ButtonThemeConfigBlock(ThemeConfigBlock):
|
|||
default=WIDTHS.AUTO,
|
||||
)
|
||||
button_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="primary",
|
||||
blank=True,
|
||||
help_text="The background color of buttons",
|
||||
)
|
||||
button_text_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#ffffffff",
|
||||
blank=True,
|
||||
help_text="The text color of buttons",
|
||||
)
|
||||
button_border_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="border",
|
||||
blank=True,
|
||||
help_text="The border color of buttons",
|
||||
|
@ -162,19 +239,19 @@ class ButtonThemeConfigBlock(ThemeConfigBlock):
|
|||
default=12, help_text="Button horizontal padding"
|
||||
)
|
||||
button_hover_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#96baf6ff",
|
||||
blank=True,
|
||||
help_text="The background color of buttons when hovered",
|
||||
)
|
||||
button_hover_text_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#ffffffff",
|
||||
blank=True,
|
||||
help_text="The text color of buttons when hovered",
|
||||
)
|
||||
button_hover_border_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="border",
|
||||
blank=True,
|
||||
help_text="The border color of buttons when hovered",
|
||||
|
@ -187,19 +264,25 @@ class LinkThemeConfigBlock(ThemeConfigBlock):
|
|||
default="inter",
|
||||
)
|
||||
link_font_size = models.SmallIntegerField(default=13)
|
||||
link_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.REGULAR,
|
||||
db_default=FontWeights.REGULAR,
|
||||
)
|
||||
link_text_alignment = models.CharField(
|
||||
choices=HorizontalAlignments.choices,
|
||||
max_length=10,
|
||||
default=HorizontalAlignments.LEFT,
|
||||
)
|
||||
link_text_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="primary",
|
||||
blank=True,
|
||||
help_text="The text color of links",
|
||||
)
|
||||
link_hover_text_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#96baf6ff",
|
||||
blank=True,
|
||||
help_text="The hover color of links when hovered",
|
||||
|
@ -236,6 +319,16 @@ class ImageThemeConfigBlock(ThemeConfigBlock):
|
|||
],
|
||||
)
|
||||
|
||||
image_border_radius = models.SmallIntegerField(
|
||||
help_text="The border radius for this image element.",
|
||||
validators=[
|
||||
MinValueValidator(0, message="Value cannot be less than 0."),
|
||||
MaxValueValidator(100, message="Value cannot be greater than 100."),
|
||||
],
|
||||
default=0,
|
||||
db_default=0,
|
||||
)
|
||||
|
||||
image_constraint = models.CharField(
|
||||
help_text="The image constraint to apply to this image",
|
||||
choices=IMAGE_CONSTRAINT_TYPES.choices,
|
||||
|
@ -250,7 +343,7 @@ class PageThemeConfigBlock(ThemeConfigBlock):
|
|||
"""
|
||||
|
||||
page_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#ffffffff",
|
||||
blank=True,
|
||||
help_text="The background color of the page",
|
||||
|
@ -283,7 +376,7 @@ class InputThemeConfigBlock(ThemeConfigBlock):
|
|||
help_text="The font family of the label",
|
||||
)
|
||||
label_text_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#070810FF",
|
||||
blank=True,
|
||||
help_text="The text color of the label",
|
||||
|
@ -292,6 +385,12 @@ class InputThemeConfigBlock(ThemeConfigBlock):
|
|||
default=13,
|
||||
help_text="The font size of the label",
|
||||
)
|
||||
label_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.MEDIUM,
|
||||
db_default=FontWeights.MEDIUM,
|
||||
)
|
||||
|
||||
input_font_family = models.CharField(
|
||||
max_length=250,
|
||||
|
@ -299,20 +398,26 @@ class InputThemeConfigBlock(ThemeConfigBlock):
|
|||
help_text="The font family of the input",
|
||||
)
|
||||
input_font_size = models.SmallIntegerField(default=13)
|
||||
input_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.REGULAR,
|
||||
db_default=FontWeights.REGULAR,
|
||||
)
|
||||
input_text_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#070810FF",
|
||||
blank=True,
|
||||
help_text="The text color of the input",
|
||||
)
|
||||
input_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#FFFFFFFF",
|
||||
blank=True,
|
||||
help_text="The background color of the input",
|
||||
)
|
||||
input_border_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#000000FF",
|
||||
blank=True,
|
||||
help_text="The color of the input border",
|
||||
|
@ -338,7 +443,7 @@ class TableThemeConfigBlock(ThemeConfigBlock):
|
|||
|
||||
# Table styles
|
||||
table_border_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#000000FF",
|
||||
blank=True,
|
||||
help_text="The color of the table border",
|
||||
|
@ -352,13 +457,13 @@ class TableThemeConfigBlock(ThemeConfigBlock):
|
|||
|
||||
# Header styles
|
||||
table_header_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#edededff",
|
||||
blank=True,
|
||||
help_text="The background color of the table header cells",
|
||||
)
|
||||
table_header_text_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#000000ff",
|
||||
blank=True,
|
||||
help_text="The text color of the table header cells",
|
||||
|
@ -367,6 +472,12 @@ class TableThemeConfigBlock(ThemeConfigBlock):
|
|||
default=13,
|
||||
help_text="The font size of the header cells",
|
||||
)
|
||||
table_header_font_weight = models.CharField(
|
||||
choices=FontWeights.choices,
|
||||
max_length=11,
|
||||
default=FontWeights.SEMI_BOLD,
|
||||
db_default=FontWeights.SEMI_BOLD,
|
||||
)
|
||||
table_header_font_family = models.CharField(
|
||||
max_length=250,
|
||||
default="inter",
|
||||
|
@ -380,14 +491,14 @@ class TableThemeConfigBlock(ThemeConfigBlock):
|
|||
|
||||
# Cell styles
|
||||
table_cell_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="transparent",
|
||||
blank=True,
|
||||
help_text="The background color of the table cells",
|
||||
)
|
||||
|
||||
table_cell_alternate_background_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="transparent",
|
||||
blank=True,
|
||||
help_text="The alternate background color of the table cells",
|
||||
|
@ -406,7 +517,7 @@ class TableThemeConfigBlock(ThemeConfigBlock):
|
|||
|
||||
# Separator styles
|
||||
table_vertical_separator_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#000000FF",
|
||||
blank=True,
|
||||
help_text="The color of the table vertical separator",
|
||||
|
@ -416,7 +527,7 @@ class TableThemeConfigBlock(ThemeConfigBlock):
|
|||
)
|
||||
|
||||
table_horizontal_separator_color = models.CharField(
|
||||
max_length=20,
|
||||
max_length=COLOR_FIELD_MAX_LENGTH,
|
||||
default="#000000FF",
|
||||
blank=True,
|
||||
help_text="The color of the table horizontal separator",
|
||||
|
|
|
@ -3,6 +3,7 @@ from typing import Type, TypeVar
|
|||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from baserow.contrib.builder.models import Builder
|
||||
from baserow.core.registry import (
|
||||
CustomFieldsInstanceMixin,
|
||||
CustomFieldsRegistryMixin,
|
||||
|
@ -89,6 +90,21 @@ class ThemeConfigBlockType(
|
|||
|
||||
return instance
|
||||
|
||||
def enhance_queryset(self, queryset: QuerySet[Builder]) -> QuerySet[Builder]:
|
||||
"""
|
||||
Enhance the queryset to select the related theme config block model. This method
|
||||
is used by enhance_list_builder_queryset to select all related theme config
|
||||
block models in a single query. By default, this method selects the related
|
||||
theme config but it can be customized by subclasses to add additional
|
||||
select_related or prefetch_related calls.
|
||||
|
||||
:param queryset: The queryset that lists the builder applications.
|
||||
:return: The same queryset with proper select_related and/or prefetch_related to
|
||||
reduce the number of queries necessary to fetch the data.
|
||||
"""
|
||||
|
||||
return queryset.select_related(self.related_name_in_builder_model)
|
||||
|
||||
|
||||
ThemeConfigBlockTypeSubClass = TypeVar(
|
||||
"ThemeConfigBlockTypeSubClass", bound=ThemeConfigBlockType
|
||||
|
@ -115,9 +131,8 @@ class ThemeConfigBlockRegistry(
|
|||
:return: The enhanced queryset.
|
||||
"""
|
||||
|
||||
for theme_config_block in self.get_all():
|
||||
related_name = theme_config_block.related_name_in_builder_model
|
||||
queryset = queryset.select_related(related_name)
|
||||
for theme_config_block_type in self.get_all():
|
||||
queryset = theme_config_block_type.enhance_queryset(queryset)
|
||||
return queryset
|
||||
|
||||
|
||||
|
|
|
@ -2,9 +2,11 @@ from typing import Any, Dict, Optional
|
|||
from zipfile import ZipFile
|
||||
|
||||
from django.core.files.storage import Storage
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from baserow.contrib.builder.models import Builder
|
||||
from baserow.core.user_files.handler import UserFileHandler
|
||||
|
||||
from .models import (
|
||||
|
@ -166,6 +168,11 @@ class PageThemeConfigBlockType(ThemeConfigBlockType):
|
|||
|
||||
return value
|
||||
|
||||
def enhance_queryset(self, queryset: QuerySet[Builder]) -> QuerySet[Builder]:
|
||||
return queryset.select_related(
|
||||
f"{self.related_name_in_builder_model}__page_background_file"
|
||||
)
|
||||
|
||||
|
||||
class InputThemeConfigBlockType(ThemeConfigBlockType):
|
||||
type = "input"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from typing import List, Optional, TypedDict
|
||||
|
||||
from baserow.contrib.builder.pages.types import PagePathParams
|
||||
from baserow.contrib.builder.pages.types import PagePathParams, PageQueryParams
|
||||
from baserow.core.integrations.types import IntegrationDictSubClass
|
||||
from baserow.core.services.types import ServiceDictSubClass
|
||||
from baserow.core.user_sources.types import UserSourceDictSubClass
|
||||
|
@ -33,11 +33,14 @@ class ElementDict(TypedDict):
|
|||
style_border_right_size: int
|
||||
style_padding_right: int
|
||||
style_margin_right: int
|
||||
style_background_radius: int
|
||||
style_border_radius: int
|
||||
style_background: str
|
||||
style_background_color: str
|
||||
style_background_file_id: str
|
||||
style_background_mode: str
|
||||
style_width: str
|
||||
style_width_child: str
|
||||
|
||||
|
||||
class DataSourceDict(TypedDict):
|
||||
|
@ -53,6 +56,7 @@ class PageDict(TypedDict):
|
|||
order: int
|
||||
path: str
|
||||
path_params: PagePathParams
|
||||
query_params: PageQueryParams
|
||||
elements: List[ElementDict]
|
||||
data_sources: List[DataSourceDict]
|
||||
workflow_actions: List[WorkflowAction]
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional
|
||||
from zipfile import ZipFile
|
||||
|
||||
from django.core.files.storage import Storage
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from baserow.contrib.builder.data_providers.registries import (
|
||||
builder_data_provider_type_registry,
|
||||
)
|
||||
from baserow.contrib.builder.data_sources.builder_dispatch_context import (
|
||||
BuilderDispatchContext,
|
||||
)
|
||||
|
@ -22,6 +25,7 @@ from baserow.contrib.builder.workflow_actions.registries import (
|
|||
)
|
||||
from baserow.core.exceptions import IdDoesNotExist
|
||||
from baserow.core.services.handler import ServiceHandler
|
||||
from baserow.core.services.types import DispatchResult
|
||||
from baserow.core.workflow_actions.handler import WorkflowActionHandler
|
||||
from baserow.core.workflow_actions.models import WorkflowAction
|
||||
from baserow.core.workflow_actions.registries import WorkflowActionType
|
||||
|
@ -171,7 +175,7 @@ class BuilderWorkflowActionHandler(WorkflowActionHandler):
|
|||
self,
|
||||
workflow_action: BuilderWorkflowServiceAction,
|
||||
dispatch_context: BuilderDispatchContext,
|
||||
) -> Any:
|
||||
) -> DispatchResult:
|
||||
"""
|
||||
Dispatch the service related to the workflow_action.
|
||||
|
||||
|
@ -182,6 +186,13 @@ class BuilderWorkflowActionHandler(WorkflowActionHandler):
|
|||
:return: The result of dispatching the workflow action.
|
||||
"""
|
||||
|
||||
return ServiceHandler().dispatch_service(
|
||||
dispatch_result = ServiceHandler().dispatch_service(
|
||||
workflow_action.service.specific, dispatch_context
|
||||
)
|
||||
|
||||
for data_provider in builder_data_provider_type_registry.get_all():
|
||||
data_provider.post_dispatch(
|
||||
dispatch_context, workflow_action, dispatch_result
|
||||
)
|
||||
|
||||
return dispatch_result
|
||||
|
|
|
@ -57,6 +57,9 @@ class BuilderWorkflowAction(
|
|||
queryset = BuilderWorkflowAction.objects.filter(page=page, element=None)
|
||||
return cls.get_highest_order_of_queryset(queryset) + 1
|
||||
|
||||
class Meta:
|
||||
ordering = ("order", "id")
|
||||
|
||||
|
||||
class NotificationWorkflowAction(BuilderWorkflowAction):
|
||||
title = FormulaField(default="")
|
||||
|
|
|
@ -40,6 +40,7 @@ from baserow.contrib.builder.workflow_actions.workflow_action_types import (
|
|||
BuilderWorkflowActionType,
|
||||
)
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.services.types import DispatchResult
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from baserow.contrib.builder.models import Builder
|
||||
|
@ -290,6 +291,18 @@ class BuilderWorkflowActionService:
|
|||
|
||||
return full_order
|
||||
|
||||
def remove_unused_field_names(
|
||||
self,
|
||||
row: dict[str, Any],
|
||||
field_names: List[str],
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Given a row dictionary, return a version of it that only contains keys
|
||||
existing in the field_names list.
|
||||
"""
|
||||
|
||||
return {key: value for key, value in row.items() if key in field_names}
|
||||
|
||||
def dispatch_action(
|
||||
self,
|
||||
user,
|
||||
|
@ -320,4 +333,16 @@ class BuilderWorkflowActionService:
|
|||
context=workflow_action,
|
||||
)
|
||||
|
||||
return self.handler.dispatch_workflow_action(workflow_action, dispatch_context)
|
||||
result = self.handler.dispatch_workflow_action(
|
||||
workflow_action, dispatch_context
|
||||
)
|
||||
|
||||
# Remove unfiltered fields
|
||||
field_names = dispatch_context.public_allowed_properties.get(
|
||||
"external", {}
|
||||
).get(workflow_action.service.id, [])
|
||||
|
||||
return DispatchResult(
|
||||
data=self.remove_unused_field_names(result.data, field_names),
|
||||
status=result.status,
|
||||
)
|
||||
|
|
|
@ -135,6 +135,12 @@ class OpenPageWorkflowActionType(BuilderWorkflowActionType):
|
|||
workflow_action.page_parameters[index]["value"] = new_formula
|
||||
yield workflow_action
|
||||
|
||||
for index, query_parameter in enumerate(workflow_action.query_parameters or []):
|
||||
new_formula = yield query_parameter.get("value")
|
||||
if new_formula is not None:
|
||||
workflow_action.query_parameters[index]["value"] = new_formula
|
||||
yield workflow_action
|
||||
|
||||
def deserialize_property(
|
||||
self,
|
||||
prop_name,
|
||||
|
@ -227,6 +233,7 @@ class BuilderWorkflowServiceActionType(BuilderWorkflowActionType):
|
|||
help_text="The service which this workflow action is associated with.",
|
||||
)
|
||||
}
|
||||
is_server_workflow = True
|
||||
serializer_field_overrides = {
|
||||
"service": PolymorphicServiceSerializer(
|
||||
help_text="The service which this workflow action is associated with."
|
||||
|
|
3
backend/src/baserow/contrib/dashboard/actions.py
Normal file
3
backend/src/baserow/contrib/dashboard/actions.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
DASHBOARD_ACTION_CONTEXT = _('in dashboard "%(dashboard_name)s" (%(dashboard_id)s).')
|
|
@ -53,9 +53,9 @@ class DashboardDataSourceSerializer(ServiceSerializer):
|
|||
def get_dashboard_id(self, instance):
|
||||
return self.context["data_source"].dashboard_id
|
||||
|
||||
@extend_schema_field(OpenApiTypes.FLOAT)
|
||||
@extend_schema_field(OpenApiTypes.STR)
|
||||
def get_order(self, instance):
|
||||
return self.context["data_source"].order
|
||||
return str(self.context["data_source"].order)
|
||||
|
||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||
def get_schema(self, instance):
|
||||
|
|
|
@ -7,13 +7,20 @@ from rest_framework.response import Response
|
|||
from rest_framework.views import APIView
|
||||
|
||||
from baserow.api.decorators import map_exceptions, validate_data_custom_fields
|
||||
from baserow.api.schemas import CLIENT_SESSION_ID_SCHEMA_PARAMETER, get_error_schema
|
||||
from baserow.api.schemas import (
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
|
||||
get_error_schema,
|
||||
)
|
||||
from baserow.api.services.errors import ERROR_SERVICE_INVALID_TYPE
|
||||
from baserow.api.utils import (
|
||||
CustomFieldRegistryMappingSerializer,
|
||||
DiscriminatorCustomFieldsMappingSerializer,
|
||||
)
|
||||
from baserow.contrib.dashboard.api.errors import ERROR_DASHBOARD_DOES_NOT_EXIST
|
||||
from baserow.contrib.dashboard.data_sources.actions import (
|
||||
UpdateDashboardDataSourceActionType,
|
||||
)
|
||||
from baserow.contrib.dashboard.data_sources.dispatch_context import (
|
||||
DashboardDispatchContext,
|
||||
)
|
||||
|
@ -69,6 +76,7 @@ class DashboardDataSourcesView(APIView):
|
|||
200: DiscriminatorCustomFieldsMappingSerializer(
|
||||
service_type_registry, DashboardDataSourceSerializer, many=True
|
||||
),
|
||||
401: get_error_schema(["ERROR_PERMISSION_DENIED"]),
|
||||
404: get_error_schema(["ERROR_DASHBOARD_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
|
@ -110,6 +118,7 @@ class DashboardDataSourceView(APIView):
|
|||
description="The id of the dashboard data source.",
|
||||
),
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
|
||||
],
|
||||
tags=["Dashboard data sources"],
|
||||
operation_id="update_dashboard_data_source",
|
||||
|
@ -130,6 +139,7 @@ class DashboardDataSourceView(APIView):
|
|||
"ERROR_DASHBOARD_DATA_SOURCE_CANNOT_USE_SERVICE_TYPE",
|
||||
]
|
||||
),
|
||||
401: get_error_schema(["ERROR_PERMISSION_DENIED"]),
|
||||
404: get_error_schema(
|
||||
[
|
||||
"ERROR_DASHBOARD_DATA_SOURCE_DOES_NOT_EXIST",
|
||||
|
@ -176,8 +186,8 @@ class DashboardDataSourceView(APIView):
|
|||
return_validated=True,
|
||||
)
|
||||
|
||||
data_source_updated = DashboardDataSourceService().update_data_source(
|
||||
request.user, data_source_id, service_type=service_type, **data
|
||||
data_source_updated = UpdateDashboardDataSourceActionType.do(
|
||||
request.user, data_source_id, service_type, data
|
||||
)
|
||||
serializer = service_type_registry.get_serializer(
|
||||
data_source_updated.service,
|
||||
|
@ -199,7 +209,6 @@ class DispatchDashboardDataSourceView(APIView):
|
|||
description="The id of the data source you want to call the dispatch "
|
||||
"for",
|
||||
),
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
],
|
||||
tags=["Dashboard data sources"],
|
||||
operation_id="dispatch_dashboard_data_source",
|
||||
|
@ -220,6 +229,7 @@ class DispatchDashboardDataSourceView(APIView):
|
|||
"ERROR_DASHBOARD_DATA_SOURCE_IMPROPERLY_CONFIGURED",
|
||||
]
|
||||
),
|
||||
401: get_error_schema(["ERROR_PERMISSION_DENIED"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
|
|
|
@ -76,5 +76,6 @@ class UpdateWidgetSerializer(serializers.ModelSerializer):
|
|||
"description",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"title": {"required": False, "allow_blank": False},
|
||||
"description": {"required": False, "allow_blank": True},
|
||||
}
|
||||
|
|
|
@ -9,7 +9,11 @@ from rest_framework.response import Response
|
|||
from rest_framework.views import APIView
|
||||
|
||||
from baserow.api.decorators import map_exceptions, validate_body_custom_fields
|
||||
from baserow.api.schemas import CLIENT_SESSION_ID_SCHEMA_PARAMETER, get_error_schema
|
||||
from baserow.api.schemas import (
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
|
||||
get_error_schema,
|
||||
)
|
||||
from baserow.api.utils import (
|
||||
CustomFieldRegistryMappingSerializer,
|
||||
DiscriminatorCustomFieldsMappingSerializer,
|
||||
|
@ -17,6 +21,11 @@ from baserow.api.utils import (
|
|||
)
|
||||
from baserow.contrib.dashboard.api.errors import ERROR_DASHBOARD_DOES_NOT_EXIST
|
||||
from baserow.contrib.dashboard.exceptions import DashboardDoesNotExist
|
||||
from baserow.contrib.dashboard.widgets.actions import (
|
||||
CreateWidgetActionType,
|
||||
DeleteWidgetActionType,
|
||||
UpdateWidgetActionType,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.exceptions import (
|
||||
WidgetDoesNotExist,
|
||||
WidgetTypeDoesNotExist,
|
||||
|
@ -55,6 +64,7 @@ class WidgetsView(APIView):
|
|||
200: DiscriminatorCustomFieldsMappingSerializer(
|
||||
widget_type_registry, WidgetSerializer, many=True
|
||||
),
|
||||
401: get_error_schema(["ERROR_PERMISSION_DENIED"]),
|
||||
404: get_error_schema(["ERROR_DASHBOARD_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
|
@ -86,6 +96,7 @@ class WidgetsView(APIView):
|
|||
"provided value.",
|
||||
),
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
|
||||
],
|
||||
tags=["Dashboard widgets"],
|
||||
operation_id="create_dashboard_widget",
|
||||
|
@ -99,11 +110,13 @@ class WidgetsView(APIView):
|
|||
200: DiscriminatorCustomFieldsMappingSerializer(
|
||||
widget_type_registry, WidgetSerializer
|
||||
),
|
||||
400: get_error_schema(["ERROR_REQUEST_BODY_VALIDATION"]),
|
||||
400: get_error_schema(
|
||||
["ERROR_REQUEST_BODY_VALIDATION", "ERROR_WIDGET_TYPE_DOES_NOT_EXIST"]
|
||||
),
|
||||
401: get_error_schema(["ERROR_PERMISSION_DENIED"]),
|
||||
404: get_error_schema(
|
||||
[
|
||||
"ERROR_DASHBOARD_DOES_NOT_EXIST",
|
||||
"ERROR_WIDGET_TYPE_DOES_NOT_EXIST",
|
||||
]
|
||||
),
|
||||
},
|
||||
|
@ -122,8 +135,8 @@ class WidgetsView(APIView):
|
|||
"""Creates a new widget."""
|
||||
|
||||
widget_type = data.pop("type")
|
||||
widget = WidgetService().create_widget(
|
||||
request.user, widget_type, dashboard_id, **data
|
||||
widget = CreateWidgetActionType.do(
|
||||
request.user, dashboard_id, widget_type, data
|
||||
)
|
||||
serializer = widget_type_registry.get_serializer(widget, WidgetSerializer)
|
||||
return Response(serializer.data)
|
||||
|
@ -139,6 +152,7 @@ class WidgetView(APIView):
|
|||
description="The id of the widget",
|
||||
),
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
|
||||
],
|
||||
tags=["Dashboard widgets"],
|
||||
operation_id="update_dashboard_widget",
|
||||
|
@ -157,6 +171,7 @@ class WidgetView(APIView):
|
|||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
]
|
||||
),
|
||||
401: get_error_schema(["ERROR_PERMISSION_DENIED"]),
|
||||
404: get_error_schema(
|
||||
[
|
||||
"ERROR_WIDGET_DOES_NOT_EXIST",
|
||||
|
@ -185,7 +200,9 @@ class WidgetView(APIView):
|
|||
partial=True,
|
||||
return_validated=True,
|
||||
)
|
||||
updated_widget = WidgetService().update_widget(request.user, widget_id, **data)
|
||||
updated_widget = UpdateWidgetActionType.do(
|
||||
request.user, widget_id, widget_type, data
|
||||
)
|
||||
serializer = widget_type_registry.get_serializer(
|
||||
updated_widget, WidgetSerializer
|
||||
)
|
||||
|
@ -200,12 +217,14 @@ class WidgetView(APIView):
|
|||
description="The id of the widget",
|
||||
),
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
|
||||
],
|
||||
tags=["Dashboard widgets"],
|
||||
operation_id="delete_dashboard_widget",
|
||||
description="Deletes the widget related to the given id.",
|
||||
responses={
|
||||
204: None,
|
||||
401: get_error_schema(["ERROR_PERMISSION_DENIED"]),
|
||||
404: get_error_schema(["ERROR_WIDGET_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
|
@ -220,5 +239,5 @@ class WidgetView(APIView):
|
|||
Deletes a widget.
|
||||
"""
|
||||
|
||||
WidgetService().delete_widget(request.user, widget_id)
|
||||
DeleteWidgetActionType.do(request.user, widget_id)
|
||||
return Response(status=204)
|
||||
|
|
|
@ -17,6 +17,7 @@ from baserow.core.integrations.registries import integration_type_registry
|
|||
from baserow.core.models import Application, Workspace
|
||||
from baserow.core.registries import ApplicationType, ImportExportConfig
|
||||
from baserow.core.storage import ExportZipFile
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.utils import ChildProgressBuilder
|
||||
|
||||
|
||||
|
@ -46,6 +47,17 @@ class DashboardApplicationType(ApplicationType):
|
|||
authorized_user=user,
|
||||
)
|
||||
|
||||
def pre_delete(self, dashboard):
|
||||
"""
|
||||
When a dashboard application is being deleted, delete
|
||||
all widgets first.
|
||||
"""
|
||||
|
||||
widgets = dashboard.widget_set(manager="objects_and_trash").all()
|
||||
|
||||
for widget in widgets:
|
||||
TrashHandler.permanently_delete(widget)
|
||||
|
||||
def export_serialized(
|
||||
self,
|
||||
dashboard: Dashboard,
|
||||
|
|
|
@ -7,6 +7,7 @@ class DashboardConfig(AppConfig):
|
|||
name = "baserow.contrib.dashboard"
|
||||
|
||||
def ready(self):
|
||||
from baserow.core.action.registries import action_type_registry
|
||||
from baserow.core.registries import (
|
||||
application_type_registry,
|
||||
object_scope_type_registry,
|
||||
|
@ -88,7 +89,6 @@ class DashboardConfig(AppConfig):
|
|||
from baserow.core.registries import permission_manager_type_registry
|
||||
|
||||
from .permission_manager import AllowIfTemplatePermissionManagerType
|
||||
from .ws.receivers import widget_created # noqa: F401
|
||||
|
||||
prev_manager = permission_manager_type_registry.get(
|
||||
AllowIfTemplatePermissionManagerType.type
|
||||
|
@ -99,3 +99,24 @@ class DashboardConfig(AppConfig):
|
|||
permission_manager_type_registry.register(
|
||||
AllowIfTemplatePermissionManagerType(prev_manager)
|
||||
)
|
||||
|
||||
from baserow.contrib.dashboard.data_sources.actions import (
|
||||
UpdateDashboardDataSourceActionType,
|
||||
)
|
||||
from baserow.contrib.dashboard.widgets.actions import (
|
||||
CreateWidgetActionType,
|
||||
DeleteWidgetActionType,
|
||||
UpdateWidgetActionType,
|
||||
)
|
||||
|
||||
from .ws.receivers import ( # noqa: F401
|
||||
dashboard_data_source_updated,
|
||||
widget_created,
|
||||
widget_deleted,
|
||||
widget_updated,
|
||||
)
|
||||
|
||||
action_type_registry.register(CreateWidgetActionType())
|
||||
action_type_registry.register(UpdateWidgetActionType())
|
||||
action_type_registry.register(DeleteWidgetActionType())
|
||||
action_type_registry.register(UpdateDashboardDataSourceActionType())
|
||||
|
|
100
backend/src/baserow/contrib/dashboard/data_sources/actions.py
Normal file
100
backend/src/baserow/contrib/dashboard/data_sources/actions.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
from dataclasses import dataclass
|
||||
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from baserow.contrib.dashboard.actions import DASHBOARD_ACTION_CONTEXT
|
||||
from baserow.core.action.models import Action
|
||||
from baserow.core.action.registries import ActionTypeDescription, UndoableActionType
|
||||
from baserow.core.action.scopes import ApplicationActionScopeType
|
||||
from baserow.core.services.registries import service_type_registry
|
||||
|
||||
from .models import DashboardDataSource
|
||||
from .service import DashboardDataSourceService
|
||||
|
||||
|
||||
class UpdateDashboardDataSourceActionType(UndoableActionType):
|
||||
type = "update_dashboard_data_source"
|
||||
description = ActionTypeDescription(
|
||||
_("Update dashboard data source"),
|
||||
_('Data source "%(data_source_name)s" (%(data_source_id)s) updated'),
|
||||
DASHBOARD_ACTION_CONTEXT,
|
||||
)
|
||||
analytics_params = ["dashboard_id", "data_source_id"]
|
||||
|
||||
@dataclass
|
||||
class Params:
|
||||
dashboard_id: int
|
||||
dashboard_name: str
|
||||
data_source_id: int
|
||||
data_source_name: str
|
||||
service_type: str
|
||||
data_source_original_params: dict[str, any]
|
||||
data_source_new_params: dict[str, any]
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
data_source_id: int,
|
||||
service_type,
|
||||
new_data: dict,
|
||||
) -> DashboardDataSource:
|
||||
updated_data_source = DashboardDataSourceService().update_data_source(
|
||||
user, data_source_id, service_type=service_type, **new_data
|
||||
)
|
||||
|
||||
# For now remove information about integrations as they cannot
|
||||
# change and would be rejected later on undo/redo calls
|
||||
updated_data_source.original_values.pop("integration_id", None)
|
||||
updated_data_source.new_values.pop("integration_id", None)
|
||||
|
||||
cls.register_action(
|
||||
user=user,
|
||||
params=cls.Params(
|
||||
updated_data_source.data_source.dashboard.id,
|
||||
updated_data_source.data_source.dashboard.name,
|
||||
updated_data_source.data_source.id,
|
||||
updated_data_source.data_source.name,
|
||||
service_type.type,
|
||||
updated_data_source.original_values,
|
||||
updated_data_source.new_values,
|
||||
),
|
||||
scope=cls.scope(updated_data_source.data_source.dashboard.id),
|
||||
workspace=updated_data_source.data_source.dashboard.workspace,
|
||||
)
|
||||
return updated_data_source.data_source
|
||||
|
||||
@classmethod
|
||||
def scope(cls, dashboard_id):
|
||||
return ApplicationActionScopeType.value(dashboard_id)
|
||||
|
||||
@classmethod
|
||||
def undo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_undo: Action,
|
||||
):
|
||||
service_type = service_type_registry.get(params.service_type)
|
||||
DashboardDataSourceService().update_data_source(
|
||||
user,
|
||||
params.data_source_id,
|
||||
service_type=service_type,
|
||||
**params.data_source_original_params,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def redo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_redo: Action,
|
||||
):
|
||||
service_type = service_type_registry.get(params.service_type)
|
||||
DashboardDataSourceService().update_data_source(
|
||||
user,
|
||||
params.data_source_id,
|
||||
service_type=service_type,
|
||||
**params.data_source_new_params,
|
||||
)
|
|
@ -21,7 +21,11 @@ from baserow.core.services.registries import ServiceType, service_type_registry
|
|||
from baserow.core.storage import ExportZipFile
|
||||
from baserow.core.utils import find_unused_name
|
||||
|
||||
from .types import DashboardDataSourceDict, DashboardDataSourceForUpdate
|
||||
from .types import (
|
||||
DashboardDataSourceDict,
|
||||
DashboardDataSourceForUpdate,
|
||||
UpdatedDashboardDataSource,
|
||||
)
|
||||
|
||||
|
||||
class DashboardDataSourceHandler:
|
||||
|
@ -198,7 +202,7 @@ class DashboardDataSourceHandler:
|
|||
service_type: ServiceType,
|
||||
name: str | None = None,
|
||||
**kwargs,
|
||||
) -> DashboardDataSource:
|
||||
) -> UpdatedDashboardDataSource:
|
||||
"""
|
||||
Updates the data source and the related service with values.
|
||||
|
||||
|
@ -214,6 +218,7 @@ class DashboardDataSourceHandler:
|
|||
data_source.service.specific
|
||||
)
|
||||
original_service = data_source.service
|
||||
updated_service = None
|
||||
if service_type != original_service_type:
|
||||
# If the service type is not the same let's create
|
||||
# a new service instead of updating the existing one
|
||||
|
@ -238,9 +243,10 @@ class DashboardDataSourceHandler:
|
|||
service_to_update = self.service_handler.get_service_for_update(
|
||||
data_source.service.id
|
||||
)
|
||||
data_source.service = self.service_handler.update_service(
|
||||
updated_service = self.service_handler.update_service(
|
||||
service_type, service_to_update, **kwargs
|
||||
)
|
||||
data_source.service = updated_service.service
|
||||
|
||||
# Update data source attributes
|
||||
if name is not None:
|
||||
|
@ -252,7 +258,11 @@ class DashboardDataSourceHandler:
|
|||
if original_service.id != data_source.service.id:
|
||||
self.service_handler.delete_service(service_type, original_service)
|
||||
|
||||
return data_source
|
||||
return UpdatedDashboardDataSource(
|
||||
data_source,
|
||||
updated_service.original_service_values if updated_service else {},
|
||||
updated_service.new_service_values if updated_service else {},
|
||||
)
|
||||
|
||||
def delete_data_source(self, data_source: DashboardDataSource):
|
||||
"""
|
||||
|
@ -297,7 +307,7 @@ class DashboardDataSourceHandler:
|
|||
data_source.service.specific, dispatch_context
|
||||
)
|
||||
|
||||
return service_dispatch
|
||||
return service_dispatch.data
|
||||
|
||||
def export_data_source(
|
||||
self,
|
||||
|
|
|
@ -32,6 +32,7 @@ from .signals import (
|
|||
dashboard_data_source_deleted,
|
||||
dashboard_data_source_updated,
|
||||
)
|
||||
from .types import UpdatedDashboardDataSource
|
||||
|
||||
|
||||
class DashboardDataSourceService:
|
||||
|
@ -164,7 +165,7 @@ class DashboardDataSourceService:
|
|||
data_source_id: int,
|
||||
service_type: ServiceType,
|
||||
**kwargs,
|
||||
) -> DashboardDataSource:
|
||||
) -> UpdatedDashboardDataSource:
|
||||
"""
|
||||
Updates a data source if the user has sufficient permissions.
|
||||
Will also check if the values are allowed to be set on the
|
||||
|
@ -209,13 +210,15 @@ class DashboardDataSourceService:
|
|||
|
||||
prepared_values = service_type.prepare_values(kwargs, user, instance=service)
|
||||
|
||||
data_source = self.handler.update_data_source(
|
||||
updated_data_source = self.handler.update_data_source(
|
||||
data_source, service_type=service_type, **prepared_values
|
||||
)
|
||||
|
||||
dashboard_data_source_updated.send(self, user=user, data_source=data_source)
|
||||
dashboard_data_source_updated.send(
|
||||
self, user=user, data_source=updated_data_source.data_source
|
||||
)
|
||||
|
||||
return data_source
|
||||
return updated_data_source
|
||||
|
||||
def delete_data_source(self, user: AbstractUser, data_source_id: int):
|
||||
"""
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import NewType, TypedDict
|
||||
|
||||
from baserow.core.services.types import ServiceDictSubClass
|
||||
|
@ -14,3 +15,10 @@ class DashboardDataSourceDict(TypedDict):
|
|||
name: str
|
||||
order: str
|
||||
service: ServiceDictSubClass
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpdatedDashboardDataSource:
|
||||
data_source: DashboardDataSource
|
||||
original_values: dict[str, any]
|
||||
new_values: dict[str, any]
|
||||
|
|
206
backend/src/baserow/contrib/dashboard/widgets/actions.py
Normal file
206
backend/src/baserow/contrib/dashboard/widgets/actions.py
Normal file
|
@ -0,0 +1,206 @@
|
|||
from dataclasses import dataclass
|
||||
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from baserow.contrib.dashboard.actions import DASHBOARD_ACTION_CONTEXT
|
||||
from baserow.core.action.models import Action
|
||||
from baserow.core.action.registries import ActionTypeDescription, UndoableActionType
|
||||
from baserow.core.action.scopes import ApplicationActionScopeType
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
from .models import Widget
|
||||
from .service import WidgetService
|
||||
from .trash_types import WidgetTrashableItemType
|
||||
|
||||
|
||||
class CreateWidgetActionType(UndoableActionType):
|
||||
type = "create_widget"
|
||||
description = ActionTypeDescription(
|
||||
_("Create widget"),
|
||||
_('Widget "%(widget_title)s" (%(widget_id)s) created'),
|
||||
DASHBOARD_ACTION_CONTEXT,
|
||||
)
|
||||
analytics_params = ["dashboard_id", "widget_id", "widget_type"]
|
||||
|
||||
@dataclass
|
||||
class Params:
|
||||
dashboard_id: int
|
||||
dashboard_name: str
|
||||
widget_id: int
|
||||
widget_title: str
|
||||
widget_type: str
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
cls, user: AbstractUser, dashboard_id: int, widget_type: str, data: dict
|
||||
) -> Widget:
|
||||
widget = WidgetService().create_widget(user, widget_type, dashboard_id, **data)
|
||||
cls.register_action(
|
||||
user=user,
|
||||
params=cls.Params(
|
||||
widget.dashboard.id,
|
||||
widget.dashboard.name,
|
||||
widget.id,
|
||||
widget.title,
|
||||
widget_type,
|
||||
),
|
||||
scope=cls.scope(widget.dashboard.id),
|
||||
workspace=widget.dashboard.workspace,
|
||||
)
|
||||
return widget
|
||||
|
||||
@classmethod
|
||||
def scope(cls, dashboard_id):
|
||||
return ApplicationActionScopeType.value(dashboard_id)
|
||||
|
||||
@classmethod
|
||||
def undo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_undo: Action,
|
||||
):
|
||||
WidgetService().delete_widget(user, params.widget_id)
|
||||
|
||||
@classmethod
|
||||
def redo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_redo: Action,
|
||||
):
|
||||
TrashHandler.restore_item(
|
||||
user,
|
||||
WidgetTrashableItemType.type,
|
||||
params.widget_id,
|
||||
)
|
||||
|
||||
|
||||
class UpdateWidgetActionType(UndoableActionType):
|
||||
type = "update_widget"
|
||||
description = ActionTypeDescription(
|
||||
_("Update widget"),
|
||||
_('Widget "%(widget_title)s" (%(widget_id)s) updated'),
|
||||
DASHBOARD_ACTION_CONTEXT,
|
||||
)
|
||||
analytics_params = ["dashboard_id", "widget_id"]
|
||||
|
||||
@dataclass
|
||||
class Params:
|
||||
dashboard_id: int
|
||||
dashboard_name: str
|
||||
widget_id: int
|
||||
widget_title: str
|
||||
widget_type: str
|
||||
widget_original_params: dict[str, any]
|
||||
widget_new_params: dict[str, any]
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
widget_id: int,
|
||||
widget_type: str,
|
||||
new_data: dict,
|
||||
) -> Widget:
|
||||
updated_widget = WidgetService().update_widget(user, widget_id, **new_data)
|
||||
cls.register_action(
|
||||
user=user,
|
||||
params=cls.Params(
|
||||
updated_widget.widget.dashboard.id,
|
||||
updated_widget.widget.dashboard.name,
|
||||
updated_widget.widget.id,
|
||||
updated_widget.widget.title,
|
||||
widget_type,
|
||||
updated_widget.original_values,
|
||||
updated_widget.new_values,
|
||||
),
|
||||
scope=cls.scope(updated_widget.widget.dashboard.id),
|
||||
workspace=updated_widget.widget.dashboard.workspace,
|
||||
)
|
||||
return updated_widget.widget
|
||||
|
||||
@classmethod
|
||||
def scope(cls, dashboard_id):
|
||||
return ApplicationActionScopeType.value(dashboard_id)
|
||||
|
||||
@classmethod
|
||||
def undo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_undo: Action,
|
||||
):
|
||||
WidgetService().update_widget(
|
||||
user, params.widget_id, **params.widget_original_params
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def redo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_redo: Action,
|
||||
):
|
||||
WidgetService().update_widget(
|
||||
user, params.widget_id, **params.widget_new_params
|
||||
)
|
||||
|
||||
|
||||
class DeleteWidgetActionType(UndoableActionType):
|
||||
type = "delete_widget"
|
||||
description = ActionTypeDescription(
|
||||
_("Delete widget"),
|
||||
_('Widget "%(widget_title)s" (%(widget_id)s) deleted'),
|
||||
DASHBOARD_ACTION_CONTEXT,
|
||||
)
|
||||
analytics_params = ["dashboard_id", "widget_id"]
|
||||
|
||||
@dataclass
|
||||
class Params:
|
||||
dashboard_id: int
|
||||
dashboard_name: str
|
||||
widget_id: int
|
||||
widget_title: str
|
||||
|
||||
@classmethod
|
||||
def do(cls, user: AbstractUser, widget_id: int) -> None:
|
||||
widget = WidgetService().delete_widget(user, widget_id)
|
||||
cls.register_action(
|
||||
user=user,
|
||||
params=cls.Params(
|
||||
widget.dashboard.id,
|
||||
widget.dashboard.name,
|
||||
widget.id,
|
||||
widget.title,
|
||||
),
|
||||
scope=cls.scope(widget.dashboard.id),
|
||||
workspace=widget.dashboard.workspace,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def scope(cls, dashboard_id):
|
||||
return ApplicationActionScopeType.value(dashboard_id)
|
||||
|
||||
@classmethod
|
||||
def undo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_undo: Action,
|
||||
):
|
||||
TrashHandler.restore_item(
|
||||
user,
|
||||
WidgetTrashableItemType.type,
|
||||
params.widget_id,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def redo(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
params: Params,
|
||||
action_to_redo: Action,
|
||||
):
|
||||
WidgetService().delete_widget(user, params.widget_id)
|
|
@ -15,7 +15,7 @@ from baserow.core.utils import extract_allowed
|
|||
|
||||
from .exceptions import WidgetDoesNotExist
|
||||
from .models import Widget
|
||||
from .types import WidgetForUpdate
|
||||
from .types import UpdatedWidget, WidgetForUpdate
|
||||
|
||||
|
||||
class WidgetHandler:
|
||||
|
@ -128,7 +128,7 @@ class WidgetHandler:
|
|||
|
||||
return widget
|
||||
|
||||
def update_widget(self, widget: WidgetForUpdate, **kwargs) -> Widget:
|
||||
def update_widget(self, widget: WidgetForUpdate, **kwargs) -> UpdatedWidget:
|
||||
"""
|
||||
Updates a widget with values if the values are allowed
|
||||
to be set on the widget.
|
||||
|
@ -138,18 +138,21 @@ class WidgetHandler:
|
|||
:return: The updated widget.
|
||||
"""
|
||||
|
||||
allowed_updates = extract_allowed(kwargs, widget.get_type().allowed_fields)
|
||||
allowed_values = extract_allowed(kwargs, widget.get_type().allowed_fields)
|
||||
|
||||
allowed_updates = widget.get_type().prepare_value_for_db(
|
||||
allowed_updates, instance=widget
|
||||
original_widget_values = widget.get_type().export_prepared_values(
|
||||
instance=widget
|
||||
)
|
||||
|
||||
for key, value in allowed_updates.items():
|
||||
for key, value in allowed_values.items():
|
||||
setattr(widget, key, value)
|
||||
|
||||
widget.full_clean()
|
||||
widget.save()
|
||||
return widget
|
||||
|
||||
new_widget_values = widget.get_type().export_prepared_values(instance=widget)
|
||||
|
||||
return UpdatedWidget(widget, original_widget_values, new_widget_values)
|
||||
|
||||
def delete_widget(self, widget: Widget):
|
||||
"""
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from abc import ABC
|
||||
from decimal import Decimal
|
||||
|
||||
from baserow.contrib.dashboard.models import Dashboard
|
||||
from baserow.contrib.dashboard.types import WidgetDict
|
||||
from baserow.core.registry import (
|
||||
CustomFieldsInstanceMixin,
|
||||
|
@ -32,6 +33,17 @@ class WidgetType(
|
|||
id_mapping_name = DASHBOARD_WIDGETS
|
||||
allowed_fields = ["title", "description"]
|
||||
|
||||
def before_create(self, dashboard: Dashboard):
|
||||
"""
|
||||
This function allows you to perform checks and operations
|
||||
before a widget is created.
|
||||
|
||||
:param dashboard: The dashboard where the widget should be
|
||||
created.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def prepare_value_for_db(self, values: dict, instance: Widget | None = None):
|
||||
"""
|
||||
This function allows you to hook into the moment a widget is created or
|
||||
|
@ -44,6 +56,19 @@ class WidgetType(
|
|||
|
||||
return values
|
||||
|
||||
def export_prepared_values(self, instance: Widget):
|
||||
"""
|
||||
Returns a serializable dict of prepared values for the widget attributes.
|
||||
It is called by undo/redo ActionHandler to store the values in a way that
|
||||
could be restored later.
|
||||
|
||||
:param instance: The widget instance to export values for.
|
||||
:return: A dict of prepared values.
|
||||
"""
|
||||
|
||||
values = {key: getattr(instance, key) for key in self.allowed_fields}
|
||||
return values
|
||||
|
||||
def after_delete(self, instance: Widget):
|
||||
"""
|
||||
This function allows you to hook into the moment after a widget is
|
||||
|
|
|
@ -15,6 +15,8 @@ from baserow.core.trash.handler import TrashHandler
|
|||
|
||||
from .handler import WidgetHandler
|
||||
from .models import Widget
|
||||
from .signals import widget_created, widget_deleted, widget_updated
|
||||
from .types import UpdatedWidget
|
||||
|
||||
|
||||
class WidgetService:
|
||||
|
@ -113,6 +115,8 @@ class WidgetService:
|
|||
|
||||
widget_type_from_registry = widget_type_registry.get(widget_type)
|
||||
|
||||
widget_type_from_registry.before_create(dashboard)
|
||||
|
||||
new_widget = self.handler.create_widget(
|
||||
widget_type_from_registry,
|
||||
dashboard,
|
||||
|
@ -120,9 +124,13 @@ class WidgetService:
|
|||
**kwargs,
|
||||
)
|
||||
|
||||
widget_created.send(self, user=user, widget=new_widget)
|
||||
|
||||
return new_widget
|
||||
|
||||
def update_widget(self, user: AbstractUser, widget_id: int, **kwargs) -> Widget:
|
||||
def update_widget(
|
||||
self, user: AbstractUser, widget_id: int, **kwargs
|
||||
) -> UpdatedWidget:
|
||||
"""
|
||||
Updates a widget given the user permissions.
|
||||
|
||||
|
@ -147,9 +155,10 @@ class WidgetService:
|
|||
)
|
||||
|
||||
updated_widget = self.handler.update_widget(widget, **kwargs)
|
||||
widget_updated.send(self, user=user, widget=updated_widget.widget)
|
||||
return updated_widget
|
||||
|
||||
def delete_widget(self, user: AbstractUser, widget_id: int):
|
||||
def delete_widget(self, user: AbstractUser, widget_id: int) -> Widget:
|
||||
"""
|
||||
Deletes the widget based on the provided widget id if the
|
||||
user has correct permissions to do so.
|
||||
|
@ -172,3 +181,7 @@ class WidgetService:
|
|||
)
|
||||
|
||||
TrashHandler.trash(user, widget.dashboard.workspace, widget.dashboard, widget)
|
||||
|
||||
widget_deleted.send(self, user=user, widget=widget)
|
||||
|
||||
return widget
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from django.dispatch import Signal
|
||||
|
||||
widget_created = Signal()
|
||||
widget_updated = Signal()
|
||||
widget_deleted = Signal()
|
||||
|
|
|
@ -1,5 +1,13 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import NewType
|
||||
|
||||
from .models import Widget
|
||||
|
||||
WidgetForUpdate = NewType("WidgetForUpdate", Widget)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpdatedWidget:
|
||||
widget: Widget
|
||||
original_values: dict[str, any]
|
||||
new_values: dict[str, any]
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
from django.db import transaction
|
||||
from django.dispatch import receiver
|
||||
|
||||
from baserow.contrib.dashboard.api.data_sources.serializers import (
|
||||
DashboardDataSourceSerializer,
|
||||
)
|
||||
from baserow.contrib.dashboard.api.widgets.serializers import WidgetSerializer
|
||||
from baserow.contrib.dashboard.data_sources import signals as data_source_signals
|
||||
from baserow.contrib.dashboard.widgets import signals as widget_signals
|
||||
from baserow.contrib.dashboard.widgets.registries import widget_type_registry
|
||||
from baserow.core.services.registries import service_type_registry
|
||||
from baserow.ws.registries import page_registry
|
||||
|
||||
|
||||
|
@ -11,6 +16,7 @@ from baserow.ws.registries import page_registry
|
|||
def widget_created(
|
||||
sender,
|
||||
widget,
|
||||
user=None,
|
||||
**kwargs,
|
||||
):
|
||||
def send_ws_message():
|
||||
|
@ -23,6 +29,87 @@ def widget_created(
|
|||
"dashboard_id": widget.dashboard.id,
|
||||
"widget": widget_serializer.data,
|
||||
}
|
||||
page_type.broadcast(payload, dashboard_id=widget.dashboard.id)
|
||||
page_type.broadcast(
|
||||
payload,
|
||||
dashboard_id=widget.dashboard.id,
|
||||
ignore_web_socket_id=getattr(user, "web_socket_id", None)
|
||||
if user is not None
|
||||
else None,
|
||||
)
|
||||
|
||||
transaction.on_commit(send_ws_message)
|
||||
|
||||
|
||||
@receiver(widget_signals.widget_updated)
|
||||
def widget_updated(
|
||||
sender,
|
||||
widget,
|
||||
user,
|
||||
**kwargs,
|
||||
):
|
||||
def send_ws_message():
|
||||
page_type = page_registry.get("dashboard")
|
||||
widget_serializer = widget_type_registry.get_serializer(
|
||||
widget, WidgetSerializer
|
||||
)
|
||||
payload = {
|
||||
"type": "widget_updated",
|
||||
"dashboard_id": widget.dashboard.id,
|
||||
"widget": widget_serializer.data,
|
||||
}
|
||||
page_type.broadcast(
|
||||
payload,
|
||||
dashboard_id=widget.dashboard.id,
|
||||
ignore_web_socket_id=getattr(user, "web_socket_id", None),
|
||||
)
|
||||
|
||||
transaction.on_commit(send_ws_message)
|
||||
|
||||
|
||||
@receiver(widget_signals.widget_deleted)
|
||||
def widget_deleted(
|
||||
sender,
|
||||
user,
|
||||
widget,
|
||||
**kwargs,
|
||||
):
|
||||
def send_ws_message():
|
||||
page_type = page_registry.get("dashboard")
|
||||
widget_serializer = widget_type_registry.get_serializer(
|
||||
widget, WidgetSerializer
|
||||
)
|
||||
payload = {
|
||||
"type": "widget_deleted",
|
||||
"dashboard_id": widget.dashboard.id,
|
||||
"widget": widget_serializer.data,
|
||||
}
|
||||
page_type.broadcast(
|
||||
payload,
|
||||
dashboard_id=widget.dashboard.id,
|
||||
ignore_web_socket_id=getattr(user, "web_socket_id", None),
|
||||
)
|
||||
|
||||
transaction.on_commit(send_ws_message)
|
||||
|
||||
|
||||
@receiver(data_source_signals.dashboard_data_source_updated)
|
||||
def dashboard_data_source_updated(sender, user, data_source, **kwargs):
|
||||
def send_ws_message():
|
||||
page_type = page_registry.get("dashboard")
|
||||
data_source_serializer = service_type_registry.get_serializer(
|
||||
data_source.service,
|
||||
DashboardDataSourceSerializer,
|
||||
context={"data_source": data_source},
|
||||
)
|
||||
payload = {
|
||||
"type": "data_source_updated",
|
||||
"dashboard_id": data_source.dashboard.id,
|
||||
"data_source": data_source_serializer.data,
|
||||
}
|
||||
page_type.broadcast(
|
||||
payload,
|
||||
dashboard_id=data_source.dashboard.id,
|
||||
ignore_web_socket_id=getattr(user, "web_socket_id", None),
|
||||
)
|
||||
|
||||
transaction.on_commit(send_ws_message)
|
||||
|
|
|
@ -13,6 +13,7 @@ from baserow.core.action.scopes import (
|
|||
from baserow.core.models import Workspace
|
||||
from baserow.core.utils import ChildProgressBuilder
|
||||
|
||||
from .config import AirtableImportConfig
|
||||
from .handler import AirtableHandler
|
||||
|
||||
|
||||
|
@ -38,6 +39,7 @@ class ImportDatabaseFromAirtableActionType(ActionType):
|
|||
installed_application_name: str
|
||||
workspace_id: int
|
||||
workspace_name: str
|
||||
skip_files: bool
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
|
@ -45,6 +47,7 @@ class ImportDatabaseFromAirtableActionType(ActionType):
|
|||
user: AbstractUser,
|
||||
workspace: Workspace,
|
||||
airtable_share_id: str,
|
||||
skip_files: bool,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
**kwargs,
|
||||
) -> Database:
|
||||
|
@ -56,8 +59,14 @@ class ImportDatabaseFromAirtableActionType(ActionType):
|
|||
information.
|
||||
"""
|
||||
|
||||
config = AirtableImportConfig(skip_files=skip_files)
|
||||
|
||||
database = AirtableHandler.import_from_airtable_to_workspace(
|
||||
workspace, airtable_share_id, progress_builder=progress_builder, **kwargs
|
||||
workspace,
|
||||
airtable_share_id,
|
||||
progress_builder=progress_builder,
|
||||
config=config,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
params = cls.Params(
|
||||
|
@ -66,6 +75,7 @@ class ImportDatabaseFromAirtableActionType(ActionType):
|
|||
database.name,
|
||||
workspace.id,
|
||||
workspace.name,
|
||||
skip_files,
|
||||
)
|
||||
cls.register_action(user, params, cls.scope(workspace.id), workspace)
|
||||
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
import traceback
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from decimal import Decimal, InvalidOperation
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from baserow.contrib.database.export_serialized import DatabaseExportSerializedStructure
|
||||
from baserow.contrib.database.fields.models import (
|
||||
NUMBER_MAX_DECIMAL_PLACES,
|
||||
|
@ -30,14 +27,26 @@ from baserow.contrib.database.fields.models import (
|
|||
)
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
|
||||
from .config import AirtableImportConfig
|
||||
from .constants import AIRTABLE_NUMBER_FIELD_SEPARATOR_FORMAT_MAPPING
|
||||
from .helpers import import_airtable_date_type_options, set_select_options_on_field
|
||||
from .import_report import (
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
SCOPE_CELL,
|
||||
SCOPE_FIELD,
|
||||
AirtableImportReport,
|
||||
)
|
||||
from .registry import AirtableColumnType
|
||||
from .utils import get_airtable_row_primary_value
|
||||
|
||||
|
||||
class TextAirtableColumnType(AirtableColumnType):
|
||||
type = "text"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
validator_name = raw_airtable_column.get("typeOptions", {}).get("validatorName")
|
||||
if validator_name == "url":
|
||||
return URLField()
|
||||
|
@ -49,16 +58,30 @@ class TextAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
if isinstance(baserow_field, (EmailField, URLField)):
|
||||
try:
|
||||
field_type = field_type_registry.get_by_model(baserow_field)
|
||||
field_type.validator(value)
|
||||
except ValidationError:
|
||||
row_name = get_airtable_row_primary_value(
|
||||
raw_airtable_table, raw_airtable_row
|
||||
)
|
||||
import_report.add_failed(
|
||||
f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_CELL,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
f'Cell value "{value}" was left empty because it didn\'t pass the email or URL validation.',
|
||||
)
|
||||
return ""
|
||||
|
||||
return value
|
||||
|
@ -67,23 +90,31 @@ class TextAirtableColumnType(AirtableColumnType):
|
|||
class MultilineTextAirtableColumnType(AirtableColumnType):
|
||||
type = "multilineText"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return LongTextField()
|
||||
|
||||
|
||||
class RichTextTextAirtableColumnType(AirtableColumnType):
|
||||
type = "richText"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return LongTextField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
# We don't support rich text formatting yet, so this converts the value to
|
||||
# plain text.
|
||||
|
@ -121,58 +152,119 @@ class RichTextTextAirtableColumnType(AirtableColumnType):
|
|||
class NumberAirtableColumnType(AirtableColumnType):
|
||||
type = "number"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
decimal_places = 0
|
||||
options_format = type_options.get("format", "")
|
||||
suffix = ""
|
||||
|
||||
if type_options.get("format", "integer") == "decimal":
|
||||
# Minimum of 1 and maximum of 5 decimal places.
|
||||
decimal_places = min(
|
||||
max(1, type_options.get("precision", 1)), NUMBER_MAX_DECIMAL_PLACES
|
||||
if "percent" in options_format:
|
||||
suffix = "%"
|
||||
|
||||
decimal_places = min(
|
||||
max(0, type_options.get("precision", 0)), NUMBER_MAX_DECIMAL_PLACES
|
||||
)
|
||||
prefix = type_options.get("symbol", "")
|
||||
separator_format = type_options.get("separatorFormat", "")
|
||||
number_separator = AIRTABLE_NUMBER_FIELD_SEPARATOR_FORMAT_MAPPING.get(
|
||||
separator_format, ""
|
||||
)
|
||||
|
||||
if separator_format != "" and number_separator == "":
|
||||
import_report.add_failed(
|
||||
f"Number field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"The field was imported, but the separator format "
|
||||
f"{separator_format} was dropped because it doesn't exist in Baserow.",
|
||||
)
|
||||
|
||||
return NumberField(
|
||||
number_decimal_places=decimal_places,
|
||||
number_negative=type_options.get("negative", True),
|
||||
number_prefix=prefix,
|
||||
number_suffix=suffix,
|
||||
number_separator=number_separator,
|
||||
)
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
if value is not None:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
value = Decimal(value)
|
||||
except InvalidOperation:
|
||||
# If the value can't be parsed as decimal, then it might be corrupt, so we
|
||||
# need to inform the user and skip the import.
|
||||
row_name = get_airtable_row_primary_value(
|
||||
raw_airtable_table, raw_airtable_row
|
||||
)
|
||||
import_report.add_failed(
|
||||
f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_CELL,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
f"Cell value was left empty because the numeric value {value} "
|
||||
f'could not be parsed"',
|
||||
)
|
||||
return None
|
||||
|
||||
if value is not None and not baserow_field.number_negative and value < 0:
|
||||
value = None
|
||||
# Airtable stores 10% as 0.1, so we would need to multiply it by 100 so get the
|
||||
# correct value in Baserow.
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
options_format = type_options.get("format", "")
|
||||
if "percent" in options_format:
|
||||
value = value * 100
|
||||
|
||||
return None if value is None else str(value)
|
||||
if not baserow_field.number_negative and value < 0:
|
||||
return None
|
||||
|
||||
return str(value)
|
||||
|
||||
|
||||
class RatingAirtableColumnType(AirtableColumnType):
|
||||
type = "rating"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, values):
|
||||
return RatingField(max_value=values.get("typeOptions", {}).get("max", 5))
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return RatingField(
|
||||
max_value=raw_airtable_column.get("typeOptions", {}).get("max", 5)
|
||||
)
|
||||
|
||||
|
||||
class CheckboxAirtableColumnType(AirtableColumnType):
|
||||
type = "checkbox"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return BooleanField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
return "true" if value else "false"
|
||||
|
||||
|
@ -180,7 +272,9 @@ class CheckboxAirtableColumnType(AirtableColumnType):
|
|||
class DateAirtableColumnType(AirtableColumnType):
|
||||
type = "date"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
# Check if a timezone is provided in the type options, if so, we might want
|
||||
# to use that timezone for the conversion later on.
|
||||
|
@ -189,6 +283,13 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
|
||||
# date_force_timezone=None it the equivalent of airtable_timezone="client".
|
||||
if airtable_timezone == "client":
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
"The date field was imported, but the client timezone setting was dropped.",
|
||||
)
|
||||
airtable_timezone = None
|
||||
|
||||
return DateField(
|
||||
|
@ -200,10 +301,14 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
if value is None:
|
||||
return value
|
||||
|
@ -212,10 +317,17 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
value = datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ").replace(
|
||||
tzinfo=timezone.utc
|
||||
)
|
||||
except ValueError:
|
||||
tb = traceback.format_exc()
|
||||
print(f"Importing Airtable datetime cell failed because of: \n{tb}")
|
||||
logger.error(f"Importing Airtable datetime cell failed because of: \n{tb}")
|
||||
except ValueError as e:
|
||||
row_name = get_airtable_row_primary_value(
|
||||
raw_airtable_table, raw_airtable_row
|
||||
)
|
||||
import_report.add_failed(
|
||||
f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_CELL,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
f'Cell value was left empty because it didn\'t pass the datetime validation with error: "{str(e)}"',
|
||||
)
|
||||
return None
|
||||
|
||||
if baserow_field.date_include_time:
|
||||
|
@ -235,25 +347,39 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
class FormulaAirtableColumnType(AirtableColumnType):
|
||||
type = "formula"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
display_type = type_options.get("displayType", "")
|
||||
airtable_timezone = type_options.get("timeZone", None)
|
||||
date_show_tzinfo = type_options.get("shouldDisplayTimeZone", False)
|
||||
|
||||
is_last_modified = display_type == "lastModifiedTime"
|
||||
is_created = display_type == "createdTime"
|
||||
|
||||
if is_last_modified or is_created and airtable_timezone == "client":
|
||||
import_report.add_failed(
|
||||
raw_airtable_column["name"],
|
||||
SCOPE_FIELD,
|
||||
raw_airtable_table.get("name", ""),
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
"The field was imported, but the client timezone setting was dropped.",
|
||||
)
|
||||
|
||||
# date_force_timezone=None it the equivalent of airtable_timezone="client".
|
||||
if airtable_timezone == "client":
|
||||
airtable_timezone = None
|
||||
|
||||
# The formula conversion isn't support yet, but because the Created on and
|
||||
# Last modified fields work as a formula, we can convert those.
|
||||
if display_type == "lastModifiedTime":
|
||||
if is_last_modified:
|
||||
return LastModifiedField(
|
||||
date_show_tzinfo=date_show_tzinfo,
|
||||
date_force_timezone=airtable_timezone,
|
||||
**import_airtable_date_type_options(type_options),
|
||||
)
|
||||
elif display_type == "createdTime":
|
||||
elif is_created:
|
||||
return CreatedOnField(
|
||||
date_show_tzinfo=date_show_tzinfo,
|
||||
date_force_timezone=airtable_timezone,
|
||||
|
@ -263,10 +389,14 @@ class FormulaAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
if isinstance(baserow_field, CreatedOnField):
|
||||
# If `None`, the value will automatically be populated from the
|
||||
|
@ -286,7 +416,9 @@ class FormulaAirtableColumnType(AirtableColumnType):
|
|||
class ForeignKeyAirtableColumnType(AirtableColumnType):
|
||||
type = "foreignKey"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
foreign_table_id = type_options.get("foreignTableId")
|
||||
|
||||
|
@ -298,38 +430,72 @@ class ForeignKeyAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
foreign_table_id = raw_airtable_column["typeOptions"]["foreignTableId"]
|
||||
|
||||
# Airtable doesn't always provide an object with a `foreignRowId`. This can
|
||||
# happen with a synced table for example. Because we don't have access to the
|
||||
# source in that case, we need to skip them.
|
||||
return [
|
||||
row_id_mapping[foreign_table_id][v["foreignRowId"]]
|
||||
for v in value
|
||||
if "foreignRowId" in v
|
||||
]
|
||||
foreign_row_ids = [v["foreignRowId"] for v in value if "foreignRowId" in v]
|
||||
|
||||
value = []
|
||||
for foreign_row_id in foreign_row_ids:
|
||||
try:
|
||||
value.append(row_id_mapping[foreign_table_id][foreign_row_id])
|
||||
except KeyError:
|
||||
# If a key error is raised, then we don't have the foreign row id in
|
||||
# the mapping. This can happen if the data integrity is compromised in
|
||||
# the Airtable base. We don't want to fail the import, so we're
|
||||
# reporting instead.
|
||||
row_name = get_airtable_row_primary_value(
|
||||
raw_airtable_table, raw_airtable_row
|
||||
)
|
||||
import_report.add_failed(
|
||||
f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_CELL,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
f'Foreign row id "{foreign_row_id}" was not added as relationship in the cell value was because it was not found in the mapping.',
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class MultipleAttachmentAirtableColumnType(AirtableColumnType):
|
||||
type = "multipleAttachment"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return FileField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
new_value = []
|
||||
|
||||
# Skip adding the files to the `files_to_download` object and to the value,
|
||||
# so that they're completely ignored if desired.
|
||||
if config.skip_files:
|
||||
return new_value
|
||||
|
||||
for file in value:
|
||||
file_name = "_".join(file["url"].split("/")[-3:])
|
||||
files_to_download[file_name] = file["url"]
|
||||
|
@ -350,15 +516,21 @@ class SelectAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
table: dict,
|
||||
raw_airtable_row: dict,
|
||||
raw_airtable_column: dict,
|
||||
baserow_field: Field,
|
||||
value: Any,
|
||||
files_to_download: Dict[str, str],
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
):
|
||||
# use field id and option id for uniqueness
|
||||
return f"{raw_airtable_column.get('id')}_{value}"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
field = SingleSelectField()
|
||||
field = set_select_options_on_field(
|
||||
field,
|
||||
|
@ -374,16 +546,22 @@ class MultiSelectAirtableColumnType(AirtableColumnType):
|
|||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
table: dict,
|
||||
raw_airtable_row: dict,
|
||||
raw_airtable_column: dict,
|
||||
baserow_field: Field,
|
||||
value: Any,
|
||||
files_to_download: Dict[str, str],
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
):
|
||||
# use field id and option id for uniqueness
|
||||
column_id = raw_airtable_column.get("id")
|
||||
return [f"{column_id}_{val}" for val in value]
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
field = MultipleSelectField()
|
||||
field = set_select_options_on_field(
|
||||
field,
|
||||
|
@ -396,38 +574,60 @@ class MultiSelectAirtableColumnType(AirtableColumnType):
|
|||
class PhoneAirtableColumnType(AirtableColumnType):
|
||||
type = "phone"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
return PhoneNumberField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
try:
|
||||
field_type = field_type_registry.get_by_model(baserow_field)
|
||||
field_type.validator(value)
|
||||
return value
|
||||
except ValidationError:
|
||||
row_name = get_airtable_row_primary_value(
|
||||
raw_airtable_table, raw_airtable_row
|
||||
)
|
||||
import_report.add_failed(
|
||||
f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"",
|
||||
SCOPE_CELL,
|
||||
raw_airtable_table["name"],
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
f'Cell value "{value}" was left empty because it didn\'t pass the phone number validation.',
|
||||
)
|
||||
return ""
|
||||
|
||||
|
||||
class CountAirtableColumnType(AirtableColumnType):
|
||||
type = "count"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
def to_baserow_field(
|
||||
self, raw_airtable_table, raw_airtable_column, config, import_report
|
||||
):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
return CountField(through_field_id=type_options.get("relationColumnId"))
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
row_id_mapping,
|
||||
raw_airtable_table,
|
||||
raw_airtable_row,
|
||||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
):
|
||||
return None
|
||||
|
|
10
backend/src/baserow/contrib/database/airtable/config.py
Normal file
10
backend/src/baserow/contrib/database/airtable/config.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
import dataclasses
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class AirtableImportConfig:
|
||||
skip_files: bool = False
|
||||
"""
|
||||
Indicates whether the files should not be downloaded and included in the
|
||||
config. This can significantly improve the improvements.
|
||||
"""
|
|
@ -13,3 +13,9 @@ AIRTABLE_BASEROW_COLOR_MAPPING = {
|
|||
"purple": "dark-blue",
|
||||
"gray": "light-gray",
|
||||
}
|
||||
AIRTABLE_NUMBER_FIELD_SEPARATOR_FORMAT_MAPPING = {
|
||||
"commaPeriod": "COMMA_PERIOD",
|
||||
"periodComma": "PERIOD_COMMA",
|
||||
"spaceComma": "SPACE_COMMA",
|
||||
"spacePeriod": "SPACE_PERIOD",
|
||||
}
|
||||
|
|
|
@ -4,3 +4,7 @@ class AirtableBaseNotPublic(Exception):
|
|||
|
||||
class AirtableShareIsNotABase(Exception):
|
||||
"""Raised when shared Airtable link is not a base."""
|
||||
|
||||
|
||||
class AirtableImportNotRespectingConfig(Exception):
|
||||
"""Raised when the Airtable import is not respecting the `AirtableImportConfig`."""
|
||||
|
|
|
@ -34,7 +34,20 @@ from baserow.core.models import Workspace
|
|||
from baserow.core.registries import ImportExportConfig
|
||||
from baserow.core.utils import ChildProgressBuilder, remove_invalid_surrogate_characters
|
||||
|
||||
from .exceptions import AirtableBaseNotPublic, AirtableShareIsNotABase
|
||||
from .config import AirtableImportConfig
|
||||
from .exceptions import (
|
||||
AirtableBaseNotPublic,
|
||||
AirtableImportNotRespectingConfig,
|
||||
AirtableShareIsNotABase,
|
||||
)
|
||||
from .import_report import (
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
SCOPE_AUTOMATIONS,
|
||||
SCOPE_FIELD,
|
||||
SCOPE_INTERFACES,
|
||||
SCOPE_VIEW,
|
||||
AirtableImportReport,
|
||||
)
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
@ -193,6 +206,8 @@ class AirtableHandler:
|
|||
def to_baserow_field(
|
||||
table: dict,
|
||||
column: dict,
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
) -> Union[Tuple[None, None, None], Tuple[Field, FieldType, AirtableColumnType]]:
|
||||
"""
|
||||
Converts the provided Airtable column dict to the right Baserow field object.
|
||||
|
@ -201,6 +216,9 @@ class AirtableHandler:
|
|||
field is the primary field.
|
||||
:param column: The Airtable column dict. These values will be converted to
|
||||
Baserow format.
|
||||
:param config: Additional configuration related to the import.
|
||||
:param import_report: Used to collect what wasn't imported to report to the
|
||||
user.
|
||||
:return: The converted Baserow field, field type and the Airtable column type.
|
||||
"""
|
||||
|
||||
|
@ -208,7 +226,7 @@ class AirtableHandler:
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
table, column
|
||||
table, column, config, import_report
|
||||
)
|
||||
|
||||
if baserow_field is None:
|
||||
|
@ -238,16 +256,20 @@ class AirtableHandler:
|
|||
|
||||
@staticmethod
|
||||
def to_baserow_row_export(
|
||||
table: dict,
|
||||
row_id_mapping: Dict[str, Dict[str, int]],
|
||||
column_mapping: Dict[str, dict],
|
||||
row: dict,
|
||||
index: int,
|
||||
files_to_download: Dict[str, str],
|
||||
config: AirtableImportConfig,
|
||||
import_report: AirtableImportReport,
|
||||
) -> dict:
|
||||
"""
|
||||
Converts the provided Airtable record to a Baserow row by looping over the field
|
||||
types and executing the `from_airtable_column_value_to_serialized` method.
|
||||
|
||||
:param table: The Airtable table dict.
|
||||
:param row_id_mapping: A mapping containing the table as key as the value is
|
||||
another mapping where the Airtable row id maps the Baserow row id.
|
||||
:param column_mapping: A mapping where the Airtable column id is the value and
|
||||
|
@ -258,6 +280,9 @@ class AirtableHandler:
|
|||
:param files_to_download: A dict that contains all the user file URLs that must
|
||||
be downloaded. The key is the file name and the value the URL. Additional
|
||||
files can be added to this dict.
|
||||
:param config: Additional configuration related to the import.
|
||||
:param import_report: Used to collect what wasn't imported to report to the
|
||||
user.
|
||||
:return: The converted row in Baserow export format.
|
||||
"""
|
||||
|
||||
|
@ -289,10 +314,14 @@ class AirtableHandler:
|
|||
"airtable_column_type"
|
||||
].to_baserow_export_serialized_value(
|
||||
row_id_mapping,
|
||||
table,
|
||||
row,
|
||||
mapping_values["raw_airtable_column"],
|
||||
mapping_values["baserow_field"],
|
||||
column_value,
|
||||
files_to_download,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
exported_row[f"field_{column_id}"] = baserow_serialized_value
|
||||
|
||||
|
@ -301,6 +330,7 @@ class AirtableHandler:
|
|||
@staticmethod
|
||||
def download_files_as_zip(
|
||||
files_to_download: Dict[str, str],
|
||||
config: AirtableImportConfig,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
files_buffer: Union[None, IOBase] = None,
|
||||
) -> BytesIO:
|
||||
|
@ -311,6 +341,7 @@ class AirtableHandler:
|
|||
:param files_to_download: A dict that contains all the user file URLs that must
|
||||
be downloaded. The key is the file name and the value the URL. Additional
|
||||
files can be added to this dict.
|
||||
:param config: Additional configuration related to the import.
|
||||
:param progress_builder: If provided will be used to build a child progress bar
|
||||
and report on this methods progress to the parent of the progress_builder.
|
||||
:param files_buffer: Optionally a file buffer can be provided to store the
|
||||
|
@ -325,6 +356,17 @@ class AirtableHandler:
|
|||
progress_builder, child_total=len(files_to_download.keys())
|
||||
)
|
||||
|
||||
# Prevent downloading any file if desired. This can cause the import to fail,
|
||||
# but that's intentional because that way it can easily be discovered that the
|
||||
# `config.skip_files` is respected.
|
||||
if config.skip_files:
|
||||
if len(files_to_download.keys()) > 0:
|
||||
raise AirtableImportNotRespectingConfig(
|
||||
"Files have been added to the `files_to_download`, but "
|
||||
"`config.skip_files` is True. This is probably a mistake in the "
|
||||
"code, accidentally adding files to the `files_to_download`."
|
||||
)
|
||||
|
||||
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
|
||||
for index, (file_name, url) in enumerate(files_to_download.items()):
|
||||
response = requests.get(url, headers=BASE_HEADERS) # nosec B113
|
||||
|
@ -339,6 +381,7 @@ class AirtableHandler:
|
|||
init_data: dict,
|
||||
schema: dict,
|
||||
tables: list,
|
||||
config: AirtableImportConfig,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
download_files_buffer: Union[None, IOBase] = None,
|
||||
) -> Tuple[dict, IOBase]:
|
||||
|
@ -353,6 +396,9 @@ class AirtableHandler:
|
|||
shared base.
|
||||
:param schema: An object containing the schema of the Airtable base.
|
||||
:param tables: a list containing the table data.
|
||||
:param config: Additional configuration related to the import.
|
||||
:param import_report: Used to collect what wasn't imported to report to the
|
||||
user.
|
||||
:param progress_builder: If provided will be used to build a child progress bar
|
||||
and report on this methods progress to the parent of the progress_builder.
|
||||
:param download_files_buffer: Optionally a file buffer can be provided to store
|
||||
|
@ -361,6 +407,11 @@ class AirtableHandler:
|
|||
containing the user files.
|
||||
"""
|
||||
|
||||
# This instance allows collecting what we weren't able to import, like
|
||||
# incompatible fields, filters, etc. This will later be used to create a table
|
||||
# with an overview of what wasn't imported.
|
||||
import_report = AirtableImportReport()
|
||||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=1000)
|
||||
converting_progress = progress.create_child(
|
||||
represents_progress=500,
|
||||
|
@ -413,12 +464,19 @@ class AirtableHandler:
|
|||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(table, column)
|
||||
) = cls.to_baserow_field(table, column, config, import_report)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# None means that none of the field types know how to parse this field,
|
||||
# so we must ignore it.
|
||||
if baserow_field is None:
|
||||
import_report.add_failed(
|
||||
column["name"],
|
||||
SCOPE_FIELD,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"""Field "{column['name']}" with field type {column["type"]} was not imported because it is not supported.""",
|
||||
)
|
||||
continue
|
||||
|
||||
# Construct a mapping where the Airtable column id is the key and the
|
||||
|
@ -456,7 +514,9 @@ class AirtableHandler:
|
|||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(table, airtable_column)
|
||||
) = cls.to_baserow_field(
|
||||
table, airtable_column, config, import_report
|
||||
)
|
||||
baserow_field.primary = True
|
||||
field_mapping["primary_id"] = {
|
||||
"baserow_field": baserow_field,
|
||||
|
@ -480,11 +540,14 @@ class AirtableHandler:
|
|||
for row_index, row in enumerate(tables[table["id"]]["rows"]):
|
||||
exported_rows.append(
|
||||
cls.to_baserow_row_export(
|
||||
table,
|
||||
row_id_mapping,
|
||||
field_mapping,
|
||||
row,
|
||||
row_index,
|
||||
files_to_download_for_table,
|
||||
config,
|
||||
import_report,
|
||||
)
|
||||
)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
@ -501,6 +564,18 @@ class AirtableHandler:
|
|||
empty_serialized_grid_view["id"] = view_id
|
||||
exported_views = [empty_serialized_grid_view]
|
||||
|
||||
# Loop over all views to add them to them as failed to the import report
|
||||
# because the views are not yet supported.
|
||||
for view in table["views"]:
|
||||
import_report.add_failed(
|
||||
view["name"],
|
||||
SCOPE_VIEW,
|
||||
table["name"],
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
f"View \"{view['name']}\" was not imported because views are not "
|
||||
f"yet supported during import.",
|
||||
)
|
||||
|
||||
exported_table = DatabaseExportSerializedStructure.table(
|
||||
id=table["id"],
|
||||
name=table["name"],
|
||||
|
@ -522,6 +597,29 @@ class AirtableHandler:
|
|||
url = signed_user_content_urls[url]
|
||||
files_to_download[file_name] = url
|
||||
|
||||
# Just to be really clear that the automations and interfaces are not included.
|
||||
import_report.add_failed(
|
||||
"All automations",
|
||||
SCOPE_AUTOMATIONS,
|
||||
"",
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
"Baserow doesn't support automations.",
|
||||
)
|
||||
import_report.add_failed(
|
||||
"All interfaces",
|
||||
SCOPE_INTERFACES,
|
||||
"",
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
"Baserow doesn't support interfaces.",
|
||||
)
|
||||
|
||||
# Convert the import report to the serialized export format of a Baserow table,
|
||||
# so that a new table is created with the import report result for the user to
|
||||
# see.
|
||||
exported_tables.append(
|
||||
import_report.get_baserow_export_table(len(schema["tableSchemas"]) + 1)
|
||||
)
|
||||
|
||||
exported_database = CoreExportSerializedStructure.application(
|
||||
id=1,
|
||||
name=init_data["rawApplications"][init_data["sharedApplicationId"]]["name"],
|
||||
|
@ -538,6 +636,7 @@ class AirtableHandler:
|
|||
# done last.
|
||||
user_files_zip = cls.download_files_as_zip(
|
||||
files_to_download,
|
||||
config,
|
||||
progress.create_child_builder(represents_progress=500),
|
||||
download_files_buffer,
|
||||
)
|
||||
|
@ -552,6 +651,7 @@ class AirtableHandler:
|
|||
storage: Optional[Storage] = None,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
download_files_buffer: Union[None, IOBase] = None,
|
||||
config: Optional[AirtableImportConfig] = None,
|
||||
) -> Database:
|
||||
"""
|
||||
Downloads all the data of the provided publicly shared Airtable base, converts
|
||||
|
@ -565,9 +665,13 @@ class AirtableHandler:
|
|||
and report on this methods progress to the parent of the progress_builder.
|
||||
:param download_files_buffer: Optionally a file buffer can be provided to store
|
||||
the downloaded files in. They will be stored in memory if not provided.
|
||||
:param config: Additional configuration related to the import.
|
||||
:return: The imported database application representing the Airtable base.
|
||||
"""
|
||||
|
||||
if config is None:
|
||||
config = AirtableImportConfig()
|
||||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=1000)
|
||||
|
||||
# Execute the initial request to obtain the initial data that's needed to
|
||||
|
@ -623,6 +727,7 @@ class AirtableHandler:
|
|||
init_data,
|
||||
schema,
|
||||
tables,
|
||||
config,
|
||||
progress.create_child_builder(represents_progress=300),
|
||||
download_files_buffer,
|
||||
)
|
||||
|
|
145
backend/src/baserow/contrib/database/airtable/import_report.py
Normal file
145
backend/src/baserow/contrib/database/airtable/import_report.py
Normal file
|
@ -0,0 +1,145 @@
|
|||
import dataclasses
|
||||
import random
|
||||
|
||||
from baserow.contrib.database.export_serialized import DatabaseExportSerializedStructure
|
||||
from baserow.contrib.database.fields.models import (
|
||||
LongTextField,
|
||||
SelectOption,
|
||||
SingleSelectField,
|
||||
TextField,
|
||||
)
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.views.models import GridView
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
from baserow.core.constants import BASEROW_COLORS
|
||||
|
||||
SCOPE_FIELD = SelectOption(id="scope_field", value="Field", color="light-blue", order=1)
|
||||
SCOPE_CELL = SelectOption(id="scope_cell", value="Cell", color="light-green", order=2)
|
||||
SCOPE_VIEW = SelectOption(id="scope_view", value="View", color="light-cyan", order=3)
|
||||
SCOPE_AUTOMATIONS = SelectOption(
|
||||
id="scope_automations", value="Automations", color="light-orange", order=4
|
||||
)
|
||||
SCOPE_INTERFACES = SelectOption(
|
||||
id="scope_interfaces", value="Interfaces", color="light-yellow", order=5
|
||||
)
|
||||
ALL_SCOPES = [SCOPE_FIELD, SCOPE_CELL, SCOPE_VIEW, SCOPE_AUTOMATIONS, SCOPE_INTERFACES]
|
||||
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE = SelectOption(
|
||||
id="error_type_unsupported_feature",
|
||||
value="Unsupported feature",
|
||||
color="yellow",
|
||||
order=1,
|
||||
)
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH = SelectOption(
|
||||
id="error_type_data_type_mismatch", value="Data type mismatch", color="red", order=2
|
||||
)
|
||||
ERROR_TYPE_OTHER = SelectOption(
|
||||
id="error_type_other", value="Other", color="brown", order=3
|
||||
)
|
||||
ALL_ERROR_TYPES = [
|
||||
ERROR_TYPE_UNSUPPORTED_FEATURE,
|
||||
ERROR_TYPE_DATA_TYPE_MISMATCH,
|
||||
ERROR_TYPE_OTHER,
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ImportReportFailedItem:
|
||||
object_name: str
|
||||
scope: str
|
||||
table: str
|
||||
error_type: str
|
||||
message: str
|
||||
|
||||
|
||||
class AirtableImportReport:
|
||||
def __init__(self):
|
||||
self.items = []
|
||||
|
||||
def add_failed(self, object_name, scope, table, error_type, message):
|
||||
self.items.append(
|
||||
ImportReportFailedItem(object_name, scope, table, error_type, message)
|
||||
)
|
||||
|
||||
def get_baserow_export_table(self, order: int) -> dict:
|
||||
# Create an empty grid view because the importing of views doesn't work
|
||||
# yet. It's a bit quick and dirty, but it will be replaced soon.
|
||||
grid_view = GridView(pk=0, id=None, name="Grid", order=1)
|
||||
grid_view.get_field_options = lambda *args, **kwargs: []
|
||||
grid_view_type = view_type_registry.get_by_model(grid_view)
|
||||
empty_serialized_grid_view = grid_view_type.export_serialized(
|
||||
grid_view, None, None, None
|
||||
)
|
||||
empty_serialized_grid_view["id"] = 0
|
||||
exported_views = [empty_serialized_grid_view]
|
||||
|
||||
unique_table_names = {item.table for item in self.items if item.table}
|
||||
unique_table_select_options = {
|
||||
name: SelectOption(
|
||||
id=f"table_{name}",
|
||||
value=name,
|
||||
color=random.choice(BASEROW_COLORS), # nosec
|
||||
order=index + 1,
|
||||
)
|
||||
for index, name in enumerate(unique_table_names)
|
||||
}
|
||||
|
||||
object_name_field = TextField(
|
||||
id="object_name",
|
||||
name="Object name",
|
||||
order=0,
|
||||
primary=True,
|
||||
)
|
||||
scope_field = SingleSelectField(id="scope", pk="scope", name="Scope", order=1)
|
||||
scope_field._prefetched_objects_cache = {"select_options": ALL_SCOPES}
|
||||
table_field = SingleSelectField(
|
||||
id="table", pk="error_type", name="Table", order=2
|
||||
)
|
||||
table_field._prefetched_objects_cache = {
|
||||
"select_options": unique_table_select_options.values()
|
||||
}
|
||||
error_field_type = SingleSelectField(
|
||||
id="error_type", pk="error_type", name="Error type", order=3
|
||||
)
|
||||
error_field_type._prefetched_objects_cache = {"select_options": ALL_ERROR_TYPES}
|
||||
message_field = LongTextField(id="message", name="Message", order=4)
|
||||
|
||||
fields = [
|
||||
object_name_field,
|
||||
scope_field,
|
||||
table_field,
|
||||
error_field_type,
|
||||
message_field,
|
||||
]
|
||||
exported_fields = [
|
||||
field_type_registry.get_by_model(field).export_serialized(field)
|
||||
for field in fields
|
||||
]
|
||||
|
||||
exported_rows = []
|
||||
for index, item in enumerate(self.items):
|
||||
table_select_option = unique_table_select_options.get(item.table, None)
|
||||
row = DatabaseExportSerializedStructure.row(
|
||||
id=index + 1,
|
||||
order=f"{index + 1}.00000000000000000000",
|
||||
created_on=None,
|
||||
updated_on=None,
|
||||
)
|
||||
row["field_object_name"] = item.object_name
|
||||
row["field_scope"] = item.scope.id
|
||||
row["field_table"] = table_select_option.id if table_select_option else None
|
||||
row["field_error_type"] = item.error_type.id
|
||||
row["field_message"] = item.message
|
||||
exported_rows.append(row)
|
||||
|
||||
exported_table = DatabaseExportSerializedStructure.table(
|
||||
id="report",
|
||||
name="Airtable import report",
|
||||
order=order,
|
||||
fields=exported_fields,
|
||||
views=exported_views,
|
||||
rows=exported_rows,
|
||||
data_sync=None,
|
||||
)
|
||||
|
||||
return exported_table
|
|
@ -47,6 +47,7 @@ class AirtableImportJobType(JobType):
|
|||
"workspace_id",
|
||||
"database_id",
|
||||
"airtable_share_url",
|
||||
"skip_files",
|
||||
]
|
||||
|
||||
request_serializer_field_overrides = {
|
||||
|
@ -59,12 +60,17 @@ class AirtableImportJobType(JobType):
|
|||
help_text="The publicly shared URL of the Airtable base (e.g. "
|
||||
"https://airtable.com/shrxxxxxxxxxxxxxx)",
|
||||
),
|
||||
"skip_files": serializers.BooleanField(
|
||||
default=False,
|
||||
help_text="If true, then the files are not downloaded and imported.",
|
||||
),
|
||||
}
|
||||
|
||||
serializer_field_names = [
|
||||
"workspace_id",
|
||||
"database",
|
||||
"airtable_share_id",
|
||||
"skip_files",
|
||||
]
|
||||
|
||||
serializer_field_overrides = {
|
||||
|
@ -76,6 +82,10 @@ class AirtableImportJobType(JobType):
|
|||
help_text="Public ID of the shared Airtable base that must be imported.",
|
||||
),
|
||||
"database": PolymorphicApplicationResponseSerializer(),
|
||||
"skip_files": serializers.BooleanField(
|
||||
default=False,
|
||||
help_text="If true, then the files are not downloaded and imported.",
|
||||
),
|
||||
}
|
||||
|
||||
def prepare_values(self, values, user):
|
||||
|
@ -92,6 +102,7 @@ class AirtableImportJobType(JobType):
|
|||
return {
|
||||
"airtable_share_id": airtable_share_id,
|
||||
"workspace": workspace,
|
||||
"skip_files": values.get("skip_files", False),
|
||||
}
|
||||
|
||||
def run(self, job, progress):
|
||||
|
@ -101,6 +112,7 @@ class AirtableImportJobType(JobType):
|
|||
job.user,
|
||||
job.workspace,
|
||||
job.airtable_share_id,
|
||||
job.skip_files,
|
||||
progress_builder=progress.create_child_builder(
|
||||
represents_progress=progress.total
|
||||
),
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue