mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-07 14:25:37 +00:00
Merge branch '1689-use-psycopg3' into 'develop'
Resolve "Use psycopg3" Closes #1689 See merge request baserow/baserow!3040
This commit is contained in:
commit
774cd7efb1
41 changed files with 665 additions and 520 deletions
backend
.flake8
docker
flake8_plugins
requirements
src/baserow
cachalot_patch.py
config/settings
contrib/database
data_sync
db
fields
formula/expression_generator
migrations
0103_fix_datetimes_timezones.py0128_remove_duplicate_viewfieldoptions.py0178_remove_singleselect_missing_options.py
search
table
views
core
test_utils
tests
plugin-boilerplate/{{ cookiecutter.project_slug }}/plugins/{{ cookiecutter.project_module }}/backend/requirements
premium/backend/src/baserow_premium/migrations
|
@ -7,6 +7,7 @@ per-file-ignores =
|
|||
../enterprise/backend/tests/*: F841
|
||||
src/baserow/contrib/database/migrations/*: X1
|
||||
src/baserow/core/migrations/*: X1
|
||||
src/baserow/core/psycopg.py: BRP001
|
||||
exclude =
|
||||
.git,
|
||||
__pycache__,
|
||||
|
@ -16,4 +17,5 @@ exclude =
|
|||
[flake8:local-plugins]
|
||||
extension =
|
||||
X1 = flake8_baserow:DocstringPlugin
|
||||
BRP001 = flake8_baserow:BaserowPsycopgChecker
|
||||
paths = ./flake8_plugins
|
||||
|
|
|
@ -55,7 +55,10 @@ DATABASE_PASSWORD=$DATABASE_PASSWORD \
|
|||
DATABASE_OPTIONS=$DATABASE_OPTIONS \
|
||||
python3 << END
|
||||
import sys
|
||||
import psycopg2
|
||||
try:
|
||||
import psycopg
|
||||
except ImportError:
|
||||
import psycopg2 as psycopg
|
||||
import json
|
||||
import os
|
||||
DATABASE_NAME=os.getenv('DATABASE_NAME')
|
||||
|
@ -66,7 +69,7 @@ DATABASE_PASSWORD=os.getenv('DATABASE_PASSWORD')
|
|||
DATABASE_OPTIONS=os.getenv('DATABASE_OPTIONS')
|
||||
try:
|
||||
options = json.loads(DATABASE_OPTIONS or "{}")
|
||||
psycopg2.connect(
|
||||
psycopg.connect(
|
||||
dbname=DATABASE_NAME,
|
||||
user=DATABASE_USER,
|
||||
password=DATABASE_PASSWORD,
|
||||
|
@ -80,7 +83,7 @@ except Exception as e:
|
|||
print(e)
|
||||
print("Trying again without any DATABASE_OPTIONS:")
|
||||
try:
|
||||
psycopg2.connect(
|
||||
psycopg.connect(
|
||||
dbname=DATABASE_NAME,
|
||||
user=DATABASE_USER,
|
||||
password=DATABASE_PASSWORD,
|
||||
|
@ -99,14 +102,17 @@ else
|
|||
DATABASE_URL=$DATABASE_URL \
|
||||
python3 << END
|
||||
import sys
|
||||
import psycopg2
|
||||
try:
|
||||
import psycopg
|
||||
except ImportError:
|
||||
import psycopg2 as psycopg
|
||||
import os
|
||||
DATABASE_URL=os.getenv('DATABASE_URL')
|
||||
try:
|
||||
psycopg2.connect(
|
||||
psycopg.connect(
|
||||
DATABASE_URL
|
||||
)
|
||||
except psycopg2.OperationalError as e:
|
||||
except psycopg.OperationalError as e:
|
||||
print(f"Error: Failed to connect to the postgresql database at {DATABASE_URL}")
|
||||
print("Please see the error below for more details:")
|
||||
print(e)
|
||||
|
|
|
@ -1 +1 @@
|
|||
from .flake8_baserow import DocstringPlugin
|
||||
from .flake8_baserow import DocstringPlugin, BaserowPsycopgChecker
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from .docstring import Plugin as DocstringPlugin
|
||||
from .psycopg import BaserowPsycopgChecker
|
||||
|
||||
__all__ = ["DocstringPlugin"]
|
||||
__all__ = ["DocstringPlugin", "BaserowPsycopgChecker"]
|
||||
|
|
30
backend/flake8_plugins/flake8_baserow/psycopg.py
Normal file
30
backend/flake8_plugins/flake8_baserow/psycopg.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
import ast
|
||||
from typing import Iterator, Tuple, Any
|
||||
|
||||
class BaserowPsycopgChecker:
|
||||
name = 'flake8-baserow-psycopg'
|
||||
version = '0.1.0'
|
||||
|
||||
def __init__(self, tree: ast.AST, filename: str):
|
||||
self.tree = tree
|
||||
self.filename = filename
|
||||
|
||||
def run(self) -> Iterator[Tuple[int, int, str, Any]]:
|
||||
for node in ast.walk(self.tree):
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
if alias.name in ('psycopg', 'psycopg2'):
|
||||
yield (
|
||||
node.lineno,
|
||||
node.col_offset,
|
||||
'BRP001 Import psycopg/psycopg2 from baserow.core.psycopg instead',
|
||||
type(self)
|
||||
)
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
if node.module in ('psycopg', 'psycopg2'):
|
||||
yield (
|
||||
node.lineno,
|
||||
node.col_offset,
|
||||
'BRP001 Import psycopg/psycopg2 from baserow.core.psycopg instead',
|
||||
type(self)
|
||||
)
|
38
backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py
Normal file
38
backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
import ast
|
||||
from flake8_baserow.psycopg import BaserowPsycopgChecker
|
||||
|
||||
|
||||
def run_checker(code: str):
|
||||
tree = ast.parse(code)
|
||||
checker = BaserowPsycopgChecker(tree, 'test.py')
|
||||
return list(checker.run())
|
||||
|
||||
def test_direct_import():
|
||||
code = '''
|
||||
import psycopg
|
||||
import psycopg2
|
||||
from psycopg import connect
|
||||
from psycopg2 import connect as pg_connect
|
||||
'''
|
||||
errors = run_checker(code)
|
||||
assert len(errors) == 4
|
||||
assert all(error[2].startswith('BRP001') for error in errors)
|
||||
|
||||
def test_allowed_import():
|
||||
code = '''
|
||||
from baserow.core.psycopg import connect
|
||||
from baserow.core.psycopg import psycopg2
|
||||
'''
|
||||
errors = run_checker(code)
|
||||
assert len(errors) == 0
|
||||
|
||||
def test_mixed_imports():
|
||||
code = '''
|
||||
import psycopg
|
||||
from baserow.core.psycopg import connect
|
||||
from psycopg2 import connect as pg_connect
|
||||
'''
|
||||
errors = run_checker(code)
|
||||
assert len(errors) == 2
|
||||
assert errors[0][2].startswith('BRP001')
|
||||
assert errors[1][2].startswith('BRP001')
|
|
@ -2,7 +2,7 @@ django==5.0.9
|
|||
django-cors-headers==4.3.1
|
||||
djangorestframework==3.15.1
|
||||
djangorestframework-simplejwt==5.3.1
|
||||
psycopg2==2.9.9
|
||||
psycopg2==2.9.10
|
||||
Faker==25.0.1
|
||||
Twisted==24.3.0
|
||||
gunicorn==22.0.0
|
||||
|
@ -39,25 +39,26 @@ redis==5.0.4
|
|||
pysaml2==7.5.0
|
||||
validators==0.28.1
|
||||
requests-oauthlib==2.0.0
|
||||
opentelemetry-api==1.24.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.24.0
|
||||
opentelemetry-instrumentation==0.45b0
|
||||
opentelemetry-instrumentation-django==0.45b0
|
||||
opentelemetry-instrumentation-aiohttp-client==0.45b0
|
||||
opentelemetry-instrumentation-asgi==0.45b0
|
||||
opentelemetry-instrumentation-botocore==0.45b0
|
||||
opentelemetry-instrumentation-celery==0.45b0
|
||||
opentelemetry-instrumentation-dbapi==0.45b0
|
||||
opentelemetry-instrumentation-grpc==0.45b0
|
||||
opentelemetry-instrumentation-logging==0.45b0
|
||||
opentelemetry-instrumentation-psycopg2==0.45b0
|
||||
opentelemetry-instrumentation-redis==0.45b0
|
||||
opentelemetry-instrumentation-requests==0.45b0
|
||||
opentelemetry-instrumentation-wsgi==0.45b0
|
||||
opentelemetry-proto==1.24.0
|
||||
opentelemetry-sdk==1.24.0
|
||||
opentelemetry-semantic-conventions==0.45b0
|
||||
opentelemetry-util-http==0.45b0
|
||||
opentelemetry-api==1.29.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.29.0
|
||||
opentelemetry-instrumentation==0.50b0
|
||||
opentelemetry-instrumentation-django==0.50b0
|
||||
opentelemetry-instrumentation-aiohttp-client==0.50b0
|
||||
opentelemetry-instrumentation-asgi==0.50b0
|
||||
opentelemetry-instrumentation-botocore==0.50b0
|
||||
opentelemetry-instrumentation-celery==0.50b0
|
||||
opentelemetry-instrumentation-dbapi==0.50b0
|
||||
opentelemetry-instrumentation-grpc==0.50b0
|
||||
opentelemetry-instrumentation-logging==0.50b0
|
||||
opentelemetry-instrumentation-redis==0.50b0
|
||||
opentelemetry-instrumentation-psycopg2==0.50b0
|
||||
opentelemetry-instrumentation-psycopg==0.50b0
|
||||
opentelemetry-instrumentation-requests==0.50b0
|
||||
opentelemetry-instrumentation-wsgi==0.50b0
|
||||
opentelemetry-proto==1.29.0
|
||||
opentelemetry-sdk==1.29.0
|
||||
opentelemetry-semantic-conventions==0.50b0
|
||||
opentelemetry-util-http==0.50b0
|
||||
Brotli==1.1.0
|
||||
loguru==0.7.2
|
||||
django-cachalot==2.6.2
|
||||
|
|
|
@ -6,13 +6,15 @@
|
|||
#
|
||||
advocate==1.0.0
|
||||
# via -r base.in
|
||||
aiohttp==3.9.5
|
||||
aiohappyeyeballs==2.4.4
|
||||
# via aiohttp
|
||||
aiohttp==3.11.11
|
||||
# via
|
||||
# langchain
|
||||
# langchain-community
|
||||
aiosignal==1.3.1
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
amqp==5.2.0
|
||||
amqp==5.3.1
|
||||
# via kombu
|
||||
annotated-types==0.7.0
|
||||
# via pydantic
|
||||
|
@ -20,7 +22,7 @@ anthropic==0.37.1
|
|||
# via -r base.in
|
||||
antlr4-python3-runtime==4.9.3
|
||||
# via -r base.in
|
||||
anyio==4.4.0
|
||||
anyio==4.8.0
|
||||
# via
|
||||
# anthropic
|
||||
# httpx
|
||||
|
@ -35,38 +37,37 @@ asgiref==3.8.1
|
|||
# django
|
||||
# django-cors-headers
|
||||
# opentelemetry-instrumentation-asgi
|
||||
async-timeout==4.0.3
|
||||
async-timeout==5.0.1
|
||||
# via redis
|
||||
attrs==23.2.0
|
||||
attrs==24.3.0
|
||||
# via
|
||||
# aiohttp
|
||||
# automat
|
||||
# jsonschema
|
||||
# service-identity
|
||||
# twisted
|
||||
autobahn==23.6.2
|
||||
autobahn==24.4.2
|
||||
# via daphne
|
||||
automat==22.10.0
|
||||
automat==24.8.1
|
||||
# via twisted
|
||||
azure-core==1.30.1
|
||||
azure-core==1.32.0
|
||||
# via
|
||||
# azure-storage-blob
|
||||
# django-storages
|
||||
azure-storage-blob==12.20.0
|
||||
azure-storage-blob==12.24.0
|
||||
# via django-storages
|
||||
backoff==2.2.1
|
||||
# via posthog
|
||||
billiard==4.2.0
|
||||
billiard==4.2.1
|
||||
# via celery
|
||||
boto3==1.34.98
|
||||
# via -r base.in
|
||||
botocore==1.34.119
|
||||
botocore==1.34.162
|
||||
# via
|
||||
# boto3
|
||||
# s3transfer
|
||||
brotli==1.1.0
|
||||
# via -r base.in
|
||||
cachetools==5.3.3
|
||||
cachetools==5.5.0
|
||||
# via google-auth
|
||||
celery[redis]==5.4.0
|
||||
# via
|
||||
|
@ -80,13 +81,13 @@ celery-redbeat==2.2.0
|
|||
# via -r base.in
|
||||
celery-singleton==0.3.1
|
||||
# via -r base.in
|
||||
certifi==2024.6.2
|
||||
certifi==2024.12.14
|
||||
# via
|
||||
# httpcore
|
||||
# httpx
|
||||
# requests
|
||||
# sentry-sdk
|
||||
cffi==1.16.0
|
||||
cffi==1.17.1
|
||||
# via cryptography
|
||||
channels[daphne]==4.0.0
|
||||
# via
|
||||
|
@ -94,9 +95,9 @@ channels[daphne]==4.0.0
|
|||
# channels-redis
|
||||
channels-redis==4.1.0
|
||||
# via -r base.in
|
||||
charset-normalizer==3.3.2
|
||||
charset-normalizer==3.4.1
|
||||
# via requests
|
||||
click==8.1.7
|
||||
click==8.1.8
|
||||
# via
|
||||
# celery
|
||||
# click-didyoumean
|
||||
|
@ -111,9 +112,9 @@ click-repl==0.3.0
|
|||
# via celery
|
||||
constantly==23.10.4
|
||||
# via twisted
|
||||
cron-descriptor==1.4.3
|
||||
cron-descriptor==1.4.5
|
||||
# via django-celery-beat
|
||||
cryptography==42.0.8
|
||||
cryptography==44.0.0
|
||||
# via
|
||||
# autobahn
|
||||
# azure-storage-blob
|
||||
|
@ -122,16 +123,17 @@ cryptography==42.0.8
|
|||
# service-identity
|
||||
daphne==4.1.2
|
||||
# via channels
|
||||
dataclasses-json==0.6.6
|
||||
dataclasses-json==0.6.7
|
||||
# via
|
||||
# langchain
|
||||
# langchain-community
|
||||
defusedxml==0.7.1
|
||||
# via pysaml2
|
||||
deprecated==1.2.14
|
||||
deprecated==1.2.15
|
||||
# via
|
||||
# opentelemetry-api
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-semantic-conventions
|
||||
distro==1.9.0
|
||||
# via
|
||||
# anthropic
|
||||
|
@ -171,7 +173,7 @@ django-redis==5.4.0
|
|||
# via -r base.in
|
||||
django-storages[azure,google]==1.14.3
|
||||
# via -r base.in
|
||||
django-timezone-field==6.1.0
|
||||
django-timezone-field==7.0
|
||||
# via django-celery-beat
|
||||
djangorestframework==3.15.1
|
||||
# via
|
||||
|
@ -182,11 +184,11 @@ djangorestframework-simplejwt==5.3.1
|
|||
# via -r base.in
|
||||
drf-spectacular==0.27.2
|
||||
# via -r base.in
|
||||
elementpath==4.4.0
|
||||
elementpath==4.7.0
|
||||
# via xmlschema
|
||||
et-xmlfile==2.0.0
|
||||
# via openpyxl
|
||||
eval-type-backport==0.2.0
|
||||
eval-type-backport==0.2.2
|
||||
# via mistralai
|
||||
faker==25.0.1
|
||||
# via -r base.in
|
||||
|
@ -194,36 +196,36 @@ filelock==3.16.1
|
|||
# via huggingface-hub
|
||||
flower==2.0.1
|
||||
# via -r base.in
|
||||
frozenlist==1.4.1
|
||||
frozenlist==1.5.0
|
||||
# via
|
||||
# aiohttp
|
||||
# aiosignal
|
||||
fsspec==2024.10.0
|
||||
fsspec==2024.12.0
|
||||
# via huggingface-hub
|
||||
google-api-core==2.19.0
|
||||
google-api-core==2.24.0
|
||||
# via
|
||||
# google-cloud-core
|
||||
# google-cloud-storage
|
||||
google-auth==2.29.0
|
||||
google-auth==2.37.0
|
||||
# via
|
||||
# google-api-core
|
||||
# google-cloud-core
|
||||
# google-cloud-storage
|
||||
google-cloud-core==2.4.1
|
||||
# via google-cloud-storage
|
||||
google-cloud-storage==2.16.0
|
||||
google-cloud-storage==2.19.0
|
||||
# via django-storages
|
||||
google-crc32c==1.5.0
|
||||
google-crc32c==1.6.0
|
||||
# via
|
||||
# google-cloud-storage
|
||||
# google-resumable-media
|
||||
google-resumable-media==2.7.0
|
||||
google-resumable-media==2.7.2
|
||||
# via google-cloud-storage
|
||||
googleapis-common-protos==1.63.1
|
||||
googleapis-common-protos==1.66.0
|
||||
# via
|
||||
# google-api-core
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
greenlet==3.0.3
|
||||
greenlet==3.1.1
|
||||
# via sqlalchemy
|
||||
gunicorn==22.0.0
|
||||
# via -r base.in
|
||||
|
@ -231,19 +233,20 @@ h11==0.14.0
|
|||
# via
|
||||
# httpcore
|
||||
# uvicorn
|
||||
httpcore==1.0.5
|
||||
httpcore==1.0.7
|
||||
# via httpx
|
||||
httptools==0.6.1
|
||||
httptools==0.6.4
|
||||
# via uvicorn
|
||||
httpx==0.27.0
|
||||
httpx==0.27.2
|
||||
# via
|
||||
# anthropic
|
||||
# langsmith
|
||||
# mistralai
|
||||
# ollama
|
||||
# openai
|
||||
huggingface-hub==0.26.1
|
||||
huggingface-hub==0.27.1
|
||||
# via tokenizers
|
||||
humanize==4.9.0
|
||||
humanize==4.11.0
|
||||
# via flower
|
||||
hyperlink==21.0.0
|
||||
# via
|
||||
|
@ -251,7 +254,7 @@ hyperlink==21.0.0
|
|||
# twisted
|
||||
icalendar==5.0.12
|
||||
# via -r base.in
|
||||
idna==3.7
|
||||
idna==3.10
|
||||
# via
|
||||
# anyio
|
||||
# httpx
|
||||
|
@ -259,19 +262,19 @@ idna==3.7
|
|||
# requests
|
||||
# twisted
|
||||
# yarl
|
||||
importlib-metadata==7.0.0
|
||||
importlib-metadata==8.4.0
|
||||
# via opentelemetry-api
|
||||
incremental==22.10.0
|
||||
incremental==24.7.2
|
||||
# via twisted
|
||||
inflection==0.5.1
|
||||
# via drf-spectacular
|
||||
isodate==0.6.1
|
||||
isodate==0.7.2
|
||||
# via azure-storage-blob
|
||||
itsdangerous==2.2.0
|
||||
# via -r base.in
|
||||
jira2markdown==0.3.7
|
||||
# via -r base.in
|
||||
jiter==0.6.1
|
||||
jiter==0.8.2
|
||||
# via anthropic
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
|
@ -283,26 +286,26 @@ jsonpatch==1.33
|
|||
# langchain-core
|
||||
jsonpath-python==1.0.6
|
||||
# via mistralai
|
||||
jsonpointer==2.4
|
||||
jsonpointer==3.0.0
|
||||
# via jsonpatch
|
||||
jsonschema==4.17.3
|
||||
# via
|
||||
# -r base.in
|
||||
# drf-spectacular
|
||||
kombu==5.3.7
|
||||
kombu==5.4.2
|
||||
# via celery
|
||||
langchain==0.1.17
|
||||
# via -r base.in
|
||||
langchain-community==0.0.38
|
||||
# via langchain
|
||||
langchain-core==0.1.52
|
||||
langchain-core==0.1.53
|
||||
# via
|
||||
# langchain
|
||||
# langchain-community
|
||||
# langchain-text-splitters
|
||||
langchain-text-splitters==0.0.2
|
||||
# via langchain
|
||||
langsmith==0.1.71
|
||||
langsmith==0.1.147
|
||||
# via
|
||||
# langchain
|
||||
# langchain-community
|
||||
|
@ -311,7 +314,7 @@ loguru==0.7.2
|
|||
# via -r base.in
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
marshmallow==3.21.2
|
||||
marshmallow==3.24.1
|
||||
# via dataclasses-json
|
||||
mdurl==0.1.2
|
||||
# via markdown-it-py
|
||||
|
@ -319,9 +322,9 @@ mistralai==1.1.0
|
|||
# via -r base.in
|
||||
monotonic==1.6
|
||||
# via posthog
|
||||
msgpack==1.0.8
|
||||
msgpack==1.1.0
|
||||
# via channels-redis
|
||||
multidict==6.0.5
|
||||
multidict==6.1.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
|
@ -343,7 +346,7 @@ openai==1.30.1
|
|||
# via -r base.in
|
||||
openpyxl==3.1.5
|
||||
# via -r base.in
|
||||
opentelemetry-api==1.24.0
|
||||
opentelemetry-api==1.29.0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
|
@ -356,17 +359,19 @@ opentelemetry-api==1.24.0
|
|||
# opentelemetry-instrumentation-django
|
||||
# opentelemetry-instrumentation-grpc
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
# opentelemetry-instrumentation-redis
|
||||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
# opentelemetry-propagator-aws-xray
|
||||
# opentelemetry-sdk
|
||||
opentelemetry-exporter-otlp-proto-common==1.24.0
|
||||
# opentelemetry-semantic-conventions
|
||||
opentelemetry-exporter-otlp-proto-common==1.29.0
|
||||
# via opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-exporter-otlp-proto-http==1.24.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.29.0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation==0.45b0
|
||||
opentelemetry-instrumentation==0.50b0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-instrumentation-aiohttp-client
|
||||
|
@ -377,53 +382,57 @@ opentelemetry-instrumentation==0.45b0
|
|||
# opentelemetry-instrumentation-django
|
||||
# opentelemetry-instrumentation-grpc
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
# opentelemetry-instrumentation-redis
|
||||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
opentelemetry-instrumentation-aiohttp-client==0.45b0
|
||||
opentelemetry-instrumentation-aiohttp-client==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-asgi==0.45b0
|
||||
opentelemetry-instrumentation-asgi==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-botocore==0.45b0
|
||||
opentelemetry-instrumentation-botocore==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-celery==0.45b0
|
||||
opentelemetry-instrumentation-celery==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-dbapi==0.45b0
|
||||
opentelemetry-instrumentation-dbapi==0.50b0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
opentelemetry-instrumentation-django==0.45b0
|
||||
opentelemetry-instrumentation-django==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-grpc==0.45b0
|
||||
opentelemetry-instrumentation-grpc==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-logging==0.45b0
|
||||
opentelemetry-instrumentation-logging==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-psycopg2==0.45b0
|
||||
opentelemetry-instrumentation-psycopg==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-redis==0.45b0
|
||||
opentelemetry-instrumentation-psycopg2==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-requests==0.45b0
|
||||
opentelemetry-instrumentation-redis==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-wsgi==0.45b0
|
||||
opentelemetry-instrumentation-requests==0.50b0
|
||||
# via -r base.in
|
||||
opentelemetry-instrumentation-wsgi==0.50b0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-instrumentation-django
|
||||
opentelemetry-propagator-aws-xray==1.0.1
|
||||
opentelemetry-propagator-aws-xray==1.0.2
|
||||
# via opentelemetry-instrumentation-botocore
|
||||
opentelemetry-proto==1.24.0
|
||||
opentelemetry-proto==1.29.0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-exporter-otlp-proto-common
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-sdk==1.24.0
|
||||
opentelemetry-sdk==1.29.0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-instrumentation-grpc
|
||||
opentelemetry-semantic-conventions==0.45b0
|
||||
opentelemetry-semantic-conventions==0.50b0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-instrumentation
|
||||
# opentelemetry-instrumentation-aiohttp-client
|
||||
# opentelemetry-instrumentation-asgi
|
||||
# opentelemetry-instrumentation-botocore
|
||||
|
@ -435,7 +444,7 @@ opentelemetry-semantic-conventions==0.45b0
|
|||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
# opentelemetry-sdk
|
||||
opentelemetry-util-http==0.45b0
|
||||
opentelemetry-util-http==0.50b0
|
||||
# via
|
||||
# -r base.in
|
||||
# opentelemetry-instrumentation-aiohttp-client
|
||||
|
@ -443,7 +452,7 @@ opentelemetry-util-http==0.45b0
|
|||
# opentelemetry-instrumentation-django
|
||||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
orjson==3.10.3
|
||||
orjson==3.10.13
|
||||
# via langsmith
|
||||
packaging==23.2
|
||||
# via
|
||||
|
@ -451,19 +460,24 @@ packaging==23.2
|
|||
# huggingface-hub
|
||||
# langchain-core
|
||||
# marshmallow
|
||||
# opentelemetry-instrumentation
|
||||
pillow==10.3.0
|
||||
# via -r base.in
|
||||
posthog==3.5.0
|
||||
# via -r base.in
|
||||
prometheus-client==0.20.0
|
||||
prometheus-client==0.21.1
|
||||
# via flower
|
||||
prompt-toolkit==3.0.46
|
||||
prompt-toolkit==3.0.48
|
||||
# via click-repl
|
||||
propcache==0.2.1
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
prosemirror @ https://github.com/fellowapp/prosemirror-py/archive/refs/tags/v0.3.5.zip
|
||||
# via -r base.in
|
||||
proto-plus==1.23.0
|
||||
proto-plus==1.25.0
|
||||
# via google-api-core
|
||||
protobuf==4.25.3
|
||||
protobuf==5.29.2
|
||||
# via
|
||||
# google-api-core
|
||||
# googleapis-common-protos
|
||||
|
@ -471,16 +485,16 @@ protobuf==4.25.3
|
|||
# proto-plus
|
||||
psutil==5.9.8
|
||||
# via -r base.in
|
||||
psycopg2==2.9.9
|
||||
psycopg2==2.9.10
|
||||
# via -r base.in
|
||||
pyasn1==0.6.0
|
||||
pyasn1==0.6.1
|
||||
# via
|
||||
# advocate
|
||||
# ndg-httpsclient
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
# service-identity
|
||||
pyasn1-modules==0.4.0
|
||||
pyasn1-modules==0.4.1
|
||||
# via
|
||||
# google-auth
|
||||
# service-identity
|
||||
|
@ -496,23 +510,23 @@ pydantic==2.9.2
|
|||
# openai
|
||||
pydantic-core==2.23.4
|
||||
# via pydantic
|
||||
pygments==2.18.0
|
||||
pygments==2.19.1
|
||||
# via rich
|
||||
pyjwt==2.8.0
|
||||
pyjwt==2.10.1
|
||||
# via djangorestframework-simplejwt
|
||||
pyopenssl==24.1.0
|
||||
pyopenssl==24.3.0
|
||||
# via
|
||||
# advocate
|
||||
# ndg-httpsclient
|
||||
# pysaml2
|
||||
# twisted
|
||||
pyparsing==3.2.0
|
||||
pyparsing==3.2.1
|
||||
# via jira2markdown
|
||||
pyrsistent==0.20.0
|
||||
# via jsonschema
|
||||
pysaml2==7.5.0
|
||||
# via -r base.in
|
||||
python-crontab==3.1.0
|
||||
python-crontab==3.2.0
|
||||
# via django-celery-beat
|
||||
python-dateutil==2.8.2
|
||||
# via
|
||||
|
@ -527,12 +541,12 @@ python-dateutil==2.8.2
|
|||
# python-crontab
|
||||
python-dotenv==1.0.1
|
||||
# via uvicorn
|
||||
pytz==2024.1
|
||||
pytz==2024.2
|
||||
# via
|
||||
# flower
|
||||
# icalendar
|
||||
# pysaml2
|
||||
pyyaml==6.0.1
|
||||
pyyaml==6.0.2
|
||||
# via
|
||||
# drf-spectacular
|
||||
# huggingface-hub
|
||||
|
@ -565,13 +579,16 @@ requests==2.31.0
|
|||
# posthog
|
||||
# pysaml2
|
||||
# requests-oauthlib
|
||||
# requests-toolbelt
|
||||
requests-oauthlib==2.0.0
|
||||
# via -r base.in
|
||||
requests-toolbelt==1.0.0
|
||||
# via langsmith
|
||||
rich==13.7.1
|
||||
# via -r base.in
|
||||
rsa==4.9
|
||||
# via google-auth
|
||||
s3transfer==0.10.1
|
||||
s3transfer==0.10.4
|
||||
# via boto3
|
||||
sentry-sdk==2.0.1
|
||||
# via -r base.in
|
||||
|
@ -579,12 +596,10 @@ service-identity==24.1.0
|
|||
# via
|
||||
# -r base.in
|
||||
# twisted
|
||||
six==1.16.0
|
||||
six==1.17.0
|
||||
# via
|
||||
# advocate
|
||||
# automat
|
||||
# azure-core
|
||||
# isodate
|
||||
# posthog
|
||||
# python-dateutil
|
||||
sniffio==1.3.1
|
||||
|
@ -593,21 +608,21 @@ sniffio==1.3.1
|
|||
# anyio
|
||||
# httpx
|
||||
# openai
|
||||
sqlalchemy==2.0.30
|
||||
sqlalchemy==2.0.36
|
||||
# via
|
||||
# langchain
|
||||
# langchain-community
|
||||
sqlparse==0.5.0
|
||||
sqlparse==0.5.3
|
||||
# via django
|
||||
tenacity==8.3.0
|
||||
tenacity==8.5.0
|
||||
# via
|
||||
# celery-redbeat
|
||||
# langchain
|
||||
# langchain-community
|
||||
# langchain-core
|
||||
tokenizers==0.20.1
|
||||
tokenizers==0.21.0
|
||||
# via anthropic
|
||||
tornado==6.4.1
|
||||
tornado==6.4.2
|
||||
# via flower
|
||||
tqdm==4.66.4
|
||||
# via
|
||||
|
@ -624,6 +639,7 @@ typing-extensions==4.11.0
|
|||
# via
|
||||
# -r base.in
|
||||
# anthropic
|
||||
# anyio
|
||||
# azure-core
|
||||
# azure-storage-blob
|
||||
# dj-database-url
|
||||
|
@ -645,11 +661,12 @@ tzdata==2024.1
|
|||
# -r base.in
|
||||
# celery
|
||||
# django-celery-beat
|
||||
# kombu
|
||||
unicodecsv==0.14.1
|
||||
# via -r base.in
|
||||
uritemplate==4.1.1
|
||||
# via drf-spectacular
|
||||
urllib3==1.26.18
|
||||
urllib3==1.26.20
|
||||
# via
|
||||
# advocate
|
||||
# botocore
|
||||
|
@ -657,7 +674,7 @@ urllib3==1.26.18
|
|||
# sentry-sdk
|
||||
uvicorn[standard]==0.29.0
|
||||
# via -r base.in
|
||||
uvloop==0.19.0
|
||||
uvloop==0.21.0
|
||||
# via uvicorn
|
||||
validators==0.28.1
|
||||
# via -r base.in
|
||||
|
@ -666,7 +683,7 @@ vine==5.1.0
|
|||
# amqp
|
||||
# celery
|
||||
# kombu
|
||||
watchfiles==0.22.0
|
||||
watchfiles==1.0.3
|
||||
# via uvicorn
|
||||
wcwidth==0.2.13
|
||||
# via prompt-toolkit
|
||||
|
@ -674,7 +691,7 @@ websockets==12.0
|
|||
# via
|
||||
# -r base.in
|
||||
# uvicorn
|
||||
wrapt==1.16.0
|
||||
wrapt==1.17.0
|
||||
# via
|
||||
# deprecated
|
||||
# opentelemetry-instrumentation
|
||||
|
@ -684,7 +701,7 @@ wrapt==1.16.0
|
|||
# opentelemetry-instrumentation-redis
|
||||
xmlschema==2.5.1
|
||||
# via pysaml2
|
||||
yarl==1.9.4
|
||||
yarl==1.18.3
|
||||
# via aiohttp
|
||||
zipp==3.18.1
|
||||
# via
|
||||
|
@ -692,7 +709,7 @@ zipp==3.18.1
|
|||
# importlib-metadata
|
||||
zipstream-ng==1.8.0
|
||||
# via -r base.in
|
||||
zope-interface==6.4.post2
|
||||
zope-interface==7.2
|
||||
# via twisted
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
|
|
|
@ -10,15 +10,15 @@ asgiref==3.8.1
|
|||
# via
|
||||
# -c base.txt
|
||||
# django
|
||||
asttokens==2.4.1
|
||||
asttokens==3.0.0
|
||||
# via
|
||||
# snoop
|
||||
# stack-data
|
||||
async-timeout==4.0.3
|
||||
async-timeout==5.0.1
|
||||
# via
|
||||
# -c base.txt
|
||||
# redis
|
||||
attrs==23.2.0
|
||||
attrs==24.3.0
|
||||
# via
|
||||
# -c base.txt
|
||||
# jsonschema
|
||||
|
@ -32,21 +32,21 @@ bandit==1.7.8
|
|||
# via -r dev.in
|
||||
black==23.3.0
|
||||
# via -r dev.in
|
||||
build==1.2.2
|
||||
build==1.2.2.post1
|
||||
# via
|
||||
# -r dev.in
|
||||
# pip-tools
|
||||
certifi==2024.6.2
|
||||
certifi==2024.12.14
|
||||
# via
|
||||
# -c base.txt
|
||||
# requests
|
||||
charset-normalizer==3.3.2
|
||||
charset-normalizer==3.4.1
|
||||
# via
|
||||
# -c base.txt
|
||||
# requests
|
||||
cheap-repr==0.5.2
|
||||
# via snoop
|
||||
click==8.1.7
|
||||
click==8.1.8
|
||||
# via
|
||||
# -c base.txt
|
||||
# black
|
||||
|
@ -98,7 +98,7 @@ httpretty==1.1.4
|
|||
# via -r dev.in
|
||||
icdiff==2.0.7
|
||||
# via pytest-icdiff
|
||||
idna==3.7
|
||||
idna==3.10
|
||||
# via
|
||||
# -c base.txt
|
||||
# requests
|
||||
|
@ -106,15 +106,15 @@ iniconfig==2.0.0
|
|||
# via pytest
|
||||
ipdb==0.13.13
|
||||
# via -r dev.in
|
||||
ipython==8.27.0
|
||||
ipython==8.31.0
|
||||
# via
|
||||
# -r dev.in
|
||||
# ipdb
|
||||
isort==5.13.2
|
||||
# via -r dev.in
|
||||
jedi==0.19.1
|
||||
jedi==0.19.2
|
||||
# via ipython
|
||||
jinja2==3.1.4
|
||||
jinja2==3.1.5
|
||||
# via pytest-html
|
||||
jsonschema==4.17.3
|
||||
# via
|
||||
|
@ -132,7 +132,7 @@ markdown-it-py==3.0.0
|
|||
# via
|
||||
# -c base.txt
|
||||
# rich
|
||||
markupsafe==2.1.5
|
||||
markupsafe==3.0.2
|
||||
# via jinja2
|
||||
matplotlib-inline==0.1.7
|
||||
# via ipython
|
||||
|
@ -174,13 +174,13 @@ pexpect==4.9.0
|
|||
# via ipython
|
||||
pip-tools==7.4.1
|
||||
# via -r dev.in
|
||||
platformdirs==4.3.3
|
||||
platformdirs==4.3.6
|
||||
# via black
|
||||
pluggy==1.5.0
|
||||
# via pytest
|
||||
pprintpp==0.4.0
|
||||
# via pytest-icdiff
|
||||
prompt-toolkit==3.0.46
|
||||
prompt-toolkit==3.0.48
|
||||
# via
|
||||
# -c base.txt
|
||||
# ipython
|
||||
|
@ -196,7 +196,7 @@ pyfakefs==5.4.1
|
|||
# via -r dev.in
|
||||
pyflakes==3.2.0
|
||||
# via flake8
|
||||
pygments==2.18.0
|
||||
pygments==2.19.1
|
||||
# via
|
||||
# -c base.txt
|
||||
# ipython
|
||||
|
@ -204,7 +204,7 @@ pygments==2.18.0
|
|||
# snoop
|
||||
pyinstrument==4.6.2
|
||||
# via -r dev.in
|
||||
pyproject-hooks==1.1.0
|
||||
pyproject-hooks==1.2.0
|
||||
# via
|
||||
# build
|
||||
# pip-tools
|
||||
|
@ -258,7 +258,7 @@ python-dateutil==2.8.2
|
|||
# via
|
||||
# -c base.txt
|
||||
# freezegun
|
||||
pyyaml==6.0.1
|
||||
pyyaml==6.0.2
|
||||
# via
|
||||
# -c base.txt
|
||||
# bandit
|
||||
|
@ -281,10 +281,9 @@ rich==13.7.1
|
|||
# via
|
||||
# -c base.txt
|
||||
# bandit
|
||||
six==1.16.0
|
||||
six==1.17.0
|
||||
# via
|
||||
# -c base.txt
|
||||
# asttokens
|
||||
# python-dateutil
|
||||
# rfc3339-validator
|
||||
# snoop
|
||||
|
@ -292,24 +291,24 @@ snoop==0.4.3
|
|||
# via -r dev.in
|
||||
sortedcontainers==2.4.0
|
||||
# via fakeredis
|
||||
sqlparse==0.5.0
|
||||
sqlparse==0.5.3
|
||||
# via
|
||||
# -c base.txt
|
||||
# django
|
||||
# django-silk
|
||||
stack-data==0.6.3
|
||||
# via ipython
|
||||
stevedore==5.3.0
|
||||
stevedore==5.4.0
|
||||
# via bandit
|
||||
tomli==2.0.1
|
||||
tomli==2.2.1
|
||||
# via django-stubs
|
||||
traitlets==5.14.3
|
||||
# via
|
||||
# ipython
|
||||
# matplotlib-inline
|
||||
types-pytz==2024.2.0.20240913
|
||||
types-pytz==2024.2.0.20241221
|
||||
# via django-stubs
|
||||
types-pyyaml==6.0.12.20240917
|
||||
types-pyyaml==6.0.12.20241230
|
||||
# via django-stubs
|
||||
typing-extensions==4.11.0
|
||||
# via
|
||||
|
@ -318,7 +317,7 @@ typing-extensions==4.11.0
|
|||
# django-stubs-ext
|
||||
# ipython
|
||||
# mypy
|
||||
urllib3==1.26.18
|
||||
urllib3==1.26.20
|
||||
# via
|
||||
# -c base.txt
|
||||
# requests
|
||||
|
@ -329,7 +328,7 @@ wcwidth==0.2.13
|
|||
# via
|
||||
# -c base.txt
|
||||
# prompt-toolkit
|
||||
wheel==0.44.0
|
||||
wheel==0.45.1
|
||||
# via pip-tools
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
|
|
|
@ -6,49 +6,60 @@ from django.conf import settings
|
|||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.transaction import get_connection
|
||||
|
||||
from cachalot import utils as cachalot_utils
|
||||
from cachalot.settings import cachalot_settings
|
||||
from django_redis import get_redis_connection
|
||||
from loguru import logger
|
||||
from psycopg2.sql import Composed
|
||||
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
@contextmanager
|
||||
def cachalot_enabled():
|
||||
"""
|
||||
A context manager that enables cachalot for the duration of the context. This is
|
||||
useful when you want to enable cachalot for a specific query but you don't want
|
||||
to enable it globally.
|
||||
Please note that the query have to be executed within the context of the context
|
||||
manager in order for it to be cached.
|
||||
"""
|
||||
if settings.CACHALOT_ENABLED:
|
||||
from cachalot.settings import cachalot_disabled, cachalot_settings # noqa: F401
|
||||
|
||||
from cachalot.api import LOCAL_STORAGE
|
||||
@contextmanager
|
||||
def cachalot_enabled():
|
||||
"""
|
||||
A context manager that enables cachalot for the duration of the context. This is
|
||||
useful when you want to enable cachalot for a specific query but you don't want
|
||||
to enable it globally. Please note that the query have to be executed within the
|
||||
context of the context manager in order for it to be cached.
|
||||
"""
|
||||
|
||||
was_enabled = getattr(
|
||||
LOCAL_STORAGE, "cachalot_enabled", cachalot_settings.CACHALOT_ENABLED
|
||||
)
|
||||
LOCAL_STORAGE.cachalot_enabled = True
|
||||
try:
|
||||
from cachalot.api import LOCAL_STORAGE
|
||||
|
||||
was_enabled = getattr(
|
||||
LOCAL_STORAGE, "cachalot_enabled", cachalot_settings.CACHALOT_ENABLED
|
||||
)
|
||||
LOCAL_STORAGE.cachalot_enabled = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
LOCAL_STORAGE.cachalot_enabled = was_enabled
|
||||
|
||||
else:
|
||||
|
||||
@contextmanager
|
||||
def cachalot_enabled():
|
||||
yield
|
||||
|
||||
@contextmanager
|
||||
def cachalot_disabled():
|
||||
yield
|
||||
finally:
|
||||
LOCAL_STORAGE.cachalot_enabled = was_enabled
|
||||
|
||||
|
||||
def patch_cachalot_for_baserow():
|
||||
"""
|
||||
This function patches the cachalot library to make it work with baserow
|
||||
dynamic models. The problem we're trying to solve here is that the only way
|
||||
to limit what cachalot caches is to provide a fix list of tables, but
|
||||
baserow creates dynamic models on the fly so we can't know what tables will
|
||||
be created in advance, so we need to include all the tables that start with
|
||||
the USER_TABLE_DATABASE_NAME_PREFIX prefix in the list of cachable tables.
|
||||
This function patches the cachalot library to make it work with baserow dynamic
|
||||
models. The problem we're trying to solve here is that the only way to limit what
|
||||
cachalot caches is to provide a fix list of tables, but baserow creates dynamic
|
||||
models on the fly so we can't know what tables will be created in advance, so we
|
||||
need to include all the tables that start with the USER_TABLE_DATABASE_NAME_PREFIX
|
||||
prefix in the list of cachable tables.
|
||||
|
||||
`filter_cachable` and `is_cachable` are called to invalidate the cache when
|
||||
a table is changed. `are_all_cachable` is called to check if a query can be
|
||||
cached.
|
||||
`filter_cachable` and `is_cachable` are called to invalidate the cache when a table
|
||||
is changed. `are_all_cachable` is called to check if a query can be cached.
|
||||
"""
|
||||
|
||||
from cachalot import utils as cachalot_utils
|
||||
|
||||
from baserow.contrib.database.table.constants import (
|
||||
LINK_ROW_THROUGH_TABLE_PREFIX,
|
||||
MULTIPLE_COLLABORATOR_THROUGH_TABLE_PREFIX,
|
||||
|
@ -97,13 +108,12 @@ def patch_cachalot_for_baserow():
|
|||
@wraps(original_are_all_cachable)
|
||||
def patched_are_all_cachable(tables):
|
||||
"""
|
||||
This patch works because cachalot does not explicitly set this thread
|
||||
local variable, but it assumes to be True by default if CACHALOT_ENABLED
|
||||
is not set otherwise. Since we are explicitly setting it to True in our
|
||||
code for the query we want to cache, we can check if the value has been
|
||||
set or not to exclude our dynamic tables from the list of tables that
|
||||
cachalot will check, making all of them cachable for the queries
|
||||
wrapped in the `cachalot_enabled` context manager.
|
||||
This patch works because cachalot does not explicitly set this thread local
|
||||
variable, but it assumes to be True by default if CACHALOT_ENABLED is not set
|
||||
otherwise. Since we are explicitly setting it to True in our code for the query
|
||||
we want to cache, we can check if the value has been set or not to exclude our
|
||||
dynamic tables from the list of tables that cachalot will check, making all of
|
||||
them cachable for the queries wrapped in the `cachalot_enabled` context manager.
|
||||
"""
|
||||
|
||||
from cachalot.api import LOCAL_STORAGE
|
||||
|
@ -139,21 +149,21 @@ def patch_cachalot_for_baserow():
|
|||
def lower(self):
|
||||
"""
|
||||
Cachalot wants this method to lowercase the queries to check if they are
|
||||
cachable, but the Composed class in psycopg2.sql does not have a lower
|
||||
cachable, but the Composed class in psycopg.sql does not have a lower
|
||||
method, so we add it here to add the support for it.
|
||||
"""
|
||||
|
||||
cursor = get_connection().cursor()
|
||||
return self.as_string(cursor.cursor).lower()
|
||||
|
||||
Composed.lower = lower
|
||||
sql.Composed.lower = lower
|
||||
|
||||
|
||||
def clear_cachalot_cache():
|
||||
"""
|
||||
This function clears the cachalot cache. It can be used in the tests to make
|
||||
sure that the cache is cleared between tests or as post_migrate receiver to
|
||||
ensure to start with a clean cache after migrations.
|
||||
This function clears the cachalot cache. It can be used in the tests to make sure
|
||||
that the cache is cleared between tests or as post_migrate receiver to ensure to
|
||||
start with a clean cache after migrations.
|
||||
"""
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -179,9 +189,8 @@ def clear_cachalot_cache():
|
|||
|
||||
def _delete_pattern(key_prefix: str) -> int:
|
||||
"""
|
||||
Allows deleting every redis key that matches a pattern. Copied from the
|
||||
django-redis implementation but modified to allow deleting all versions in the
|
||||
cache at once.
|
||||
Allows deleting every redis key that matches a pattern. Copied from the django-redis
|
||||
implementation but modified to allow deleting all versions in the cache at once.
|
||||
"""
|
||||
|
||||
client = get_redis_connection("default")
|
||||
|
|
|
@ -17,7 +17,6 @@ from corsheaders.defaults import default_headers
|
|||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
from sentry_sdk.scrubber import DEFAULT_DENYLIST, EventScrubber
|
||||
|
||||
from baserow.cachalot_patch import patch_cachalot_for_baserow
|
||||
from baserow.config.settings.utils import (
|
||||
Setting,
|
||||
get_crontab_from_env,
|
||||
|
@ -254,68 +253,6 @@ CACHES = {
|
|||
},
|
||||
}
|
||||
|
||||
|
||||
CACHALOT_TIMEOUT = int(os.getenv("BASEROW_CACHALOT_TIMEOUT", 60 * 60 * 24 * 7))
|
||||
BASEROW_CACHALOT_ONLY_CACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_ONLY_CACHABLE_TABLES", None
|
||||
)
|
||||
BASEROW_CACHALOT_MODE = os.getenv("BASEROW_CACHALOT_MODE", "default")
|
||||
if BASEROW_CACHALOT_MODE == "full":
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = []
|
||||
|
||||
elif BASEROW_CACHALOT_ONLY_CACHABLE_TABLES:
|
||||
# Please avoid to add tables with more than 50 modifications per minute
|
||||
# to this list, as described here:
|
||||
# https://django-cachalot.readthedocs.io/en/latest/limits.html
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = BASEROW_CACHALOT_ONLY_CACHABLE_TABLES.split(",")
|
||||
else:
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = [
|
||||
"auth_user",
|
||||
"django_content_type",
|
||||
"core_settings",
|
||||
"core_userprofile",
|
||||
"core_application",
|
||||
"core_operation",
|
||||
"core_template",
|
||||
"core_trashentry",
|
||||
"core_workspace",
|
||||
"core_workspaceuser",
|
||||
"core_workspaceuserinvitation",
|
||||
"core_authprovidermodel",
|
||||
"core_passwordauthprovidermodel",
|
||||
"database_database",
|
||||
"database_table",
|
||||
"database_field",
|
||||
"database_fieldependency",
|
||||
"database_linkrowfield",
|
||||
"database_selectoption",
|
||||
"baserow_premium_license",
|
||||
"baserow_premium_licenseuser",
|
||||
"baserow_enterprise_role",
|
||||
"baserow_enterprise_roleassignment",
|
||||
"baserow_enterprise_team",
|
||||
"baserow_enterprise_teamsubject",
|
||||
]
|
||||
|
||||
# This list will have priority over CACHALOT_ONLY_CACHABLE_TABLES.
|
||||
BASEROW_CACHALOT_UNCACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_UNCACHABLE_TABLES", None
|
||||
)
|
||||
|
||||
if BASEROW_CACHALOT_UNCACHABLE_TABLES:
|
||||
CACHALOT_UNCACHABLE_TABLES = list(
|
||||
filter(bool, BASEROW_CACHALOT_UNCACHABLE_TABLES.split(","))
|
||||
)
|
||||
|
||||
CACHALOT_ENABLED = os.getenv("BASEROW_CACHALOT_ENABLED", "false") == "true"
|
||||
CACHALOT_CACHE = "cachalot"
|
||||
CACHALOT_UNCACHABLE_TABLES = [
|
||||
"django_migrations",
|
||||
"core_action",
|
||||
"database_token",
|
||||
"baserow_enterprise_auditlogentry",
|
||||
]
|
||||
|
||||
BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS = int(
|
||||
# Default TTL is 10 minutes: 60 seconds * 10
|
||||
os.getenv("BASEROW_BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS")
|
||||
|
@ -328,26 +265,6 @@ BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS = int(
|
|||
)
|
||||
|
||||
|
||||
def install_cachalot():
|
||||
global INSTALLED_APPS
|
||||
|
||||
INSTALLED_APPS.append("cachalot")
|
||||
|
||||
patch_cachalot_for_baserow()
|
||||
|
||||
|
||||
if CACHALOT_ENABLED:
|
||||
install_cachalot()
|
||||
|
||||
CACHES[CACHALOT_CACHE] = {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": REDIS_URL,
|
||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": VERSION,
|
||||
}
|
||||
|
||||
|
||||
CELERY_SINGLETON_BACKEND_CLASS = (
|
||||
"baserow.celery_singleton_backend.RedisBackendForSingleton"
|
||||
)
|
||||
|
@ -1347,3 +1264,88 @@ BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE = int(
|
|||
)
|
||||
|
||||
BASEROW_USE_LOCAL_CACHE = str_to_bool(os.getenv("BASEROW_USE_LOCAL_CACHE", "true"))
|
||||
|
||||
# -- CACHALOT SETTINGS --
|
||||
CACHALOT_TIMEOUT = int(os.getenv("BASEROW_CACHALOT_TIMEOUT", 60 * 60 * 24 * 7))
|
||||
BASEROW_CACHALOT_ONLY_CACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_ONLY_CACHABLE_TABLES", None
|
||||
)
|
||||
BASEROW_CACHALOT_MODE = os.getenv("BASEROW_CACHALOT_MODE", "default")
|
||||
if BASEROW_CACHALOT_MODE == "full":
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = []
|
||||
|
||||
elif BASEROW_CACHALOT_ONLY_CACHABLE_TABLES:
|
||||
# Please avoid to add tables with more than 50 modifications per minute to this
|
||||
# list, as described here:
|
||||
# https://django-cachalot.readthedocs.io/en/latest/limits.html
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = BASEROW_CACHALOT_ONLY_CACHABLE_TABLES.split(",")
|
||||
else:
|
||||
CACHALOT_ONLY_CACHABLE_TABLES = [
|
||||
"auth_user",
|
||||
"django_content_type",
|
||||
"core_settings",
|
||||
"core_userprofile",
|
||||
"core_application",
|
||||
"core_operation",
|
||||
"core_template",
|
||||
"core_trashentry",
|
||||
"core_workspace",
|
||||
"core_workspaceuser",
|
||||
"core_workspaceuserinvitation",
|
||||
"core_authprovidermodel",
|
||||
"core_passwordauthprovidermodel",
|
||||
"database_database",
|
||||
"database_table",
|
||||
"database_field",
|
||||
"database_fieldependency",
|
||||
"database_linkrowfield",
|
||||
"database_selectoption",
|
||||
"baserow_premium_license",
|
||||
"baserow_premium_licenseuser",
|
||||
"baserow_enterprise_role",
|
||||
"baserow_enterprise_roleassignment",
|
||||
"baserow_enterprise_team",
|
||||
"baserow_enterprise_teamsubject",
|
||||
]
|
||||
|
||||
# This list will have priority over CACHALOT_ONLY_CACHABLE_TABLES.
|
||||
BASEROW_CACHALOT_UNCACHABLE_TABLES = os.getenv(
|
||||
"BASEROW_CACHALOT_UNCACHABLE_TABLES", None
|
||||
)
|
||||
|
||||
if BASEROW_CACHALOT_UNCACHABLE_TABLES:
|
||||
CACHALOT_UNCACHABLE_TABLES = list(
|
||||
filter(bool, BASEROW_CACHALOT_UNCACHABLE_TABLES.split(","))
|
||||
)
|
||||
|
||||
CACHALOT_ENABLED = str_to_bool(os.getenv("BASEROW_CACHALOT_ENABLED", ""))
|
||||
CACHALOT_CACHE = "cachalot"
|
||||
CACHALOT_UNCACHABLE_TABLES = [
|
||||
"django_migrations",
|
||||
"core_action",
|
||||
"database_token",
|
||||
"baserow_enterprise_auditlogentry",
|
||||
]
|
||||
|
||||
|
||||
def install_cachalot():
|
||||
from baserow.cachalot_patch import patch_cachalot_for_baserow
|
||||
|
||||
global INSTALLED_APPS
|
||||
|
||||
INSTALLED_APPS.append("cachalot")
|
||||
|
||||
patch_cachalot_for_baserow()
|
||||
|
||||
|
||||
if CACHALOT_ENABLED:
|
||||
install_cachalot()
|
||||
|
||||
CACHES[CACHALOT_CACHE] = {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": REDIS_URL,
|
||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": VERSION,
|
||||
}
|
||||
# -- END CACHALOT SETTINGS --
|
||||
|
|
|
@ -36,6 +36,11 @@ CELERY_TASK_EAGER_PROPAGATES = True
|
|||
|
||||
CHANNEL_LAYERS = {"default": {"BACKEND": "channels.layers.InMemoryChannelLayer"}}
|
||||
|
||||
# Disable default optimizations for the tests because they make tests slower.
|
||||
DATABASES["default"]["OPTIONS"] = {
|
||||
"server_side_binding": False,
|
||||
"prepare_threshold": None,
|
||||
}
|
||||
# Open a second database connection that can be used to test transactions.
|
||||
DATABASES["default-copy"] = deepcopy(DATABASES["default"])
|
||||
|
||||
|
@ -59,11 +64,6 @@ CACHES = {
|
|||
"KEY_PREFIX": f"baserow-{GENERATED_MODEL_CACHE_NAME}-cache",
|
||||
"VERSION": None,
|
||||
},
|
||||
CACHALOT_CACHE: {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": None,
|
||||
},
|
||||
}
|
||||
|
||||
# Disable the default throttle classes because ConcurrentUserRequestsThrottle is
|
||||
|
@ -71,10 +71,6 @@ CACHES = {
|
|||
# Look into tests.baserow.api.test_api_utils.py if you need to test the throttle
|
||||
REST_FRAMEWORK["DEFAULT_THROTTLE_CLASSES"] = []
|
||||
|
||||
if "cachalot" not in INSTALLED_APPS:
|
||||
install_cachalot()
|
||||
|
||||
CACHALOT_ENABLED = False
|
||||
|
||||
BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS = 10
|
||||
BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS = 300
|
||||
|
@ -105,3 +101,14 @@ STORAGES["default"] = {"BACKEND": BASE_FILE_STORAGE}
|
|||
BASEROW_LOGIN_ACTION_LOG_LIMIT = RateLimit.from_string("1000/s")
|
||||
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS = False
|
||||
|
||||
|
||||
CACHALOT_ENABLED = str_to_bool(os.getenv("CACHALOT_ENABLED", "false"))
|
||||
if CACHALOT_ENABLED:
|
||||
CACHES[CACHALOT_CACHE] = {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"KEY_PREFIX": f"baserow-{CACHALOT_CACHE}-cache",
|
||||
"VERSION": None,
|
||||
}
|
||||
|
||||
install_cachalot()
|
||||
|
|
|
@ -4,9 +4,6 @@ from typing import Any, Dict, List, Optional
|
|||
from django.conf import settings
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
|
||||
import psycopg2
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.fields.models import (
|
||||
NUMBER_MAX_DECIMAL_PLACES,
|
||||
BooleanField,
|
||||
|
@ -15,6 +12,7 @@ from baserow.contrib.database.fields.models import (
|
|||
NumberField,
|
||||
TextField,
|
||||
)
|
||||
from baserow.core.psycopg import psycopg, sql
|
||||
from baserow.core.utils import ChildProgressBuilder, are_hostnames_same
|
||||
|
||||
from .exceptions import SyncError
|
||||
|
@ -171,7 +169,7 @@ class PostgreSQLDataSyncType(DataSyncType):
|
|||
if baserow_postgresql_connection or data_sync_blacklist:
|
||||
raise SyncError("It's not allowed to connect to this hostname.")
|
||||
try:
|
||||
connection = psycopg2.connect(
|
||||
connection = psycopg.connect(
|
||||
host=instance.postgresql_host,
|
||||
dbname=instance.postgresql_database,
|
||||
user=instance.postgresql_username,
|
||||
|
@ -181,7 +179,7 @@ class PostgreSQLDataSyncType(DataSyncType):
|
|||
)
|
||||
cursor = connection.cursor()
|
||||
yield cursor
|
||||
except psycopg2.Error as e:
|
||||
except psycopg.Error as e:
|
||||
raise SyncError(str(e))
|
||||
finally:
|
||||
if cursor:
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
from django.db.transaction import Atomic
|
||||
|
||||
from cachalot.api import cachalot_disabled
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.core.db import IsolationLevel, transaction_atomic
|
||||
from baserow.cachalot_patch import cachalot_disabled
|
||||
from baserow.core.db import IsolationLevel, sql, transaction_atomic
|
||||
|
||||
|
||||
def read_repeatable_single_database_atomic_transaction(
|
||||
|
|
|
@ -5,11 +5,10 @@ from django.core.management.color import no_style
|
|||
from django.db import connection
|
||||
from django.db.models import ManyToManyField
|
||||
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.db.schema import safe_django_schema_editor
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.table.models import GeneratedTableModel, Table
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
BackupData = Dict[str, Any]
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ class FieldDependencyHandler:
|
|||
return []
|
||||
|
||||
query_parameters = {
|
||||
"pks": tuple(field_ids),
|
||||
"pks": list(field_ids),
|
||||
"max_depth": settings.MAX_FIELD_REFERENCE_DEPTH,
|
||||
"table_id": table_id,
|
||||
"database_id": database_id_prefilter,
|
||||
|
@ -117,11 +117,11 @@ class FieldDependencyHandler:
|
|||
if associated_relations_changed:
|
||||
associated_relations_changed_query = f"""
|
||||
OR (
|
||||
first.via_id IN %(pks)s
|
||||
OR linkrowfield.link_row_related_field_id IN %(pks)s
|
||||
first.via_id = ANY(%(pks)s)
|
||||
OR linkrowfield.link_row_related_field_id = ANY(%(pks)s)
|
||||
)
|
||||
AND NOT (
|
||||
first.dependant_id IN %(pks)s
|
||||
first.dependant_id = ANY(%(pks)s)
|
||||
)
|
||||
"""
|
||||
else:
|
||||
|
@ -167,7 +167,7 @@ class FieldDependencyHandler:
|
|||
*/
|
||||
CASE
|
||||
WHEN (
|
||||
first.via_id IS NOT NULL
|
||||
first.via_id IS DISTINCT FROM NULL
|
||||
AND (
|
||||
dependant.table_id != %(table_id)s
|
||||
OR dependency.table_id = %(table_id)s
|
||||
|
@ -186,7 +186,7 @@ class FieldDependencyHandler:
|
|||
LEFT OUTER JOIN {field_table} as dependency
|
||||
ON first.dependency_id = dependency.id
|
||||
WHERE
|
||||
first.dependency_id IN %(pks)s
|
||||
first.dependency_id = ANY(%(pks)s)
|
||||
{associated_relations_changed_query}
|
||||
-- LIMITING_FK_EDGES_CLAUSE_1
|
||||
-- DISALLOWED_ANCESTORS_NODES_CLAUSE_1
|
||||
|
|
|
@ -2,12 +2,11 @@ from dataclasses import dataclass
|
|||
|
||||
from django.db import models, transaction
|
||||
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.db.schema import (
|
||||
lenient_schema_editor,
|
||||
safe_django_schema_editor,
|
||||
)
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
from .models import (
|
||||
AutonumberField,
|
||||
|
|
|
@ -309,6 +309,9 @@ class DurationField(models.DurationField):
|
|||
value = duration_value_to_timedelta(value, self.duration_format)
|
||||
return super().get_prep_value(value)
|
||||
|
||||
def to_python(self, value):
|
||||
return super().to_python(value)
|
||||
|
||||
|
||||
class IntegerFieldWithSequence(models.IntegerField):
|
||||
"""
|
||||
|
|
|
@ -22,7 +22,6 @@ from django.db.utils import DatabaseError, DataError, ProgrammingError
|
|||
|
||||
from loguru import logger
|
||||
from opentelemetry import trace
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.db.schema import (
|
||||
lenient_schema_editor,
|
||||
|
@ -51,7 +50,7 @@ from baserow.contrib.database.fields.operations import (
|
|||
)
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.core.db import specific_iterator
|
||||
from baserow.core.db import specific_iterator, sql
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import TrashEntry, User
|
||||
from baserow.core.telemetry.utils import baserow_trace_methods
|
||||
|
|
|
@ -14,6 +14,8 @@ from django.db.models import (
|
|||
)
|
||||
from django.db.models.functions import Cast, Extract, Mod
|
||||
|
||||
from baserow.core.psycopg import is_psycopg3
|
||||
|
||||
H_M = "h:mm"
|
||||
H_M_S = "h:mm:ss"
|
||||
H_M_S_S = "h:mm:ss.s"
|
||||
|
@ -27,6 +29,7 @@ D_H_M_S_NO_COLONS = "d h mm ss" # 1d2h3m4s, 1h 2m
|
|||
|
||||
MOST_ACCURATE_DURATION_FORMAT = H_M_S_SSS
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from baserow.contrib.database.fields.models import DurationField
|
||||
|
||||
|
@ -702,3 +705,20 @@ def text_value_sql_to_duration(field: "DurationField") -> str:
|
|||
]
|
||||
args = [f"'{arg or 'NULL'}'" for arg in db_function_args]
|
||||
return f"br_text_to_interval(p_in, {','.join(args)});"
|
||||
|
||||
|
||||
if is_psycopg3:
|
||||
from psycopg.types.datetime import IntervalLoader # noqa: BRP001
|
||||
|
||||
from baserow.core.psycopg import psycopg
|
||||
|
||||
class BaserowIntervalLoader(IntervalLoader):
|
||||
"""
|
||||
We're not doing anything special here, but if we don't register this
|
||||
adapter tests will fail when parsing negative intervals.
|
||||
"""
|
||||
|
||||
def load(self, data):
|
||||
return super().load(data)
|
||||
|
||||
psycopg.adapters.register_loader("interval", BaserowIntervalLoader)
|
||||
|
|
|
@ -22,7 +22,7 @@ class BinaryOpExpr(Transform):
|
|||
|
||||
|
||||
class IsNullExpr(Transform):
|
||||
template = "(%(expressions)s) IS NULL"
|
||||
template = "(%(expressions)s) IS NOT DISTINCT FROM NULL"
|
||||
arity = 1
|
||||
|
||||
|
||||
|
|
|
@ -2,14 +2,13 @@
|
|||
|
||||
from django.db import connection, migrations
|
||||
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.fields.models import (
|
||||
CreatedOnField,
|
||||
DateField,
|
||||
FormulaField,
|
||||
LastModifiedField,
|
||||
)
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
# Generated by Django 3.2.21 on 2023-09-19 08:11
|
||||
|
||||
from django.db import connection, migrations
|
||||
|
||||
from psycopg2 import sql
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
|
||||
def remove_duplicates(model, view):
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from django.db import ProgrammingError, connection, migrations, transaction
|
||||
from django.db.models.expressions import F
|
||||
|
||||
from psycopg2 import sql
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
|
|
|
@ -13,7 +13,6 @@ from django.utils.encoding import force_str
|
|||
|
||||
from loguru import logger
|
||||
from opentelemetry import trace
|
||||
from psycopg2 import sql
|
||||
from redis.exceptions import LockNotOwnedError
|
||||
|
||||
from baserow.contrib.database.db.schema import safe_django_schema_editor
|
||||
|
@ -30,6 +29,7 @@ from baserow.contrib.database.table.cache import invalidate_table_in_model_cache
|
|||
from baserow.contrib.database.table.constants import (
|
||||
ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME,
|
||||
)
|
||||
from baserow.core.psycopg import sql
|
||||
from baserow.core.telemetry.utils import baserow_trace_methods
|
||||
from baserow.core.utils import ChildProgressBuilder, exception_capturer
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ from django.utils import translation
|
|||
from django.utils.translation import gettext as _
|
||||
|
||||
from opentelemetry import trace
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.db.schema import safe_django_schema_editor
|
||||
from baserow.contrib.database.fields.constants import RESERVED_BASEROW_FIELD_NAMES
|
||||
|
@ -40,6 +39,7 @@ from baserow.contrib.database.views.handler import ViewHandler
|
|||
from baserow.contrib.database.views.models import View
|
||||
from baserow.contrib.database.views.view_types import GridViewType
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.psycopg import sql
|
||||
from baserow.core.registries import ImportExportConfig, application_type_registry
|
||||
from baserow.core.telemetry.utils import baserow_trace_methods
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
|
|
@ -21,7 +21,6 @@ from django.db.models.query import QuerySet
|
|||
import jwt
|
||||
from loguru import logger
|
||||
from opentelemetry import trace
|
||||
from psycopg2 import sql
|
||||
from redis.exceptions import LockNotOwnedError
|
||||
|
||||
from baserow.contrib.database.api.utils import get_include_exclude_field_ids
|
||||
|
@ -84,7 +83,7 @@ from baserow.contrib.database.views.registries import (
|
|||
view_ownership_type_registry,
|
||||
)
|
||||
from baserow.contrib.database.views.view_filter_groups import ViewGroupedFiltersAdapter
|
||||
from baserow.core.db import specific_iterator, transaction_atomic
|
||||
from baserow.core.db import specific_iterator, sql, transaction_atomic
|
||||
from baserow.core.exceptions import PermissionDenied
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.models import Workspace
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import connection, models
|
||||
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.core.mixins import (
|
||||
CreatedAndUpdatedOnMixin,
|
||||
PolymorphicContentTypeMixin,
|
||||
WithRegistry,
|
||||
)
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
|
||||
class BaseAuthProviderModel(
|
||||
|
|
|
@ -26,7 +26,8 @@ from django.db.models.sql.query import LOOKUP_SEP
|
|||
from django.db.transaction import Atomic, get_connection
|
||||
|
||||
from loguru import logger
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
from .utils import find_intermediate_order
|
||||
|
||||
|
|
|
@ -9,8 +9,6 @@ from datetime import datetime, timezone
|
|||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import psycopg2
|
||||
|
||||
from baserow.contrib.database.fields.models import (
|
||||
LinkRowField,
|
||||
MultipleCollaboratorsField,
|
||||
|
@ -18,6 +16,7 @@ from baserow.contrib.database.fields.models import (
|
|||
)
|
||||
from baserow.contrib.database.table.constants import USER_TABLE_DATABASE_NAME_PREFIX
|
||||
from baserow.core.management.backup.exceptions import InvalidBaserowBackupArchive
|
||||
from baserow.core.psycopg import psycopg
|
||||
|
||||
NO_USER_TABLES_BACKUP_SUB_FOLDER = "everything_but_user_tables"
|
||||
|
||||
|
@ -156,7 +155,7 @@ class BaserowBackupRunner:
|
|||
return ["pg_restore"] + self._get_postgres_tool_args() + extra_command
|
||||
|
||||
def _build_connection(self):
|
||||
return psycopg2.connect(
|
||||
return psycopg.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
database=self.database,
|
||||
|
|
8
backend/src/baserow/core/psycopg.py
Normal file
8
backend/src/baserow/core/psycopg.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
from django.db.backends.postgresql.psycopg_any import is_psycopg3
|
||||
|
||||
if is_psycopg3:
|
||||
import psycopg # noqa: F401
|
||||
from psycopg import sql # noqa: F401
|
||||
else:
|
||||
import psycopg2 as psycopg # noqa: F401
|
||||
from psycopg2 import sql # noqa: F401
|
|
@ -11,7 +11,6 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExport
|
|||
from opentelemetry.instrumentation.botocore import BotocoreInstrumentor
|
||||
from opentelemetry.instrumentation.celery import CeleryInstrumentor
|
||||
from opentelemetry.instrumentation.django import DjangoInstrumentor
|
||||
from opentelemetry.instrumentation.psycopg2 import Psycopg2Instrumentor
|
||||
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
||||
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
||||
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
||||
|
@ -20,9 +19,17 @@ from opentelemetry.sdk.metrics import MeterProvider
|
|||
from opentelemetry.sdk.metrics._internal.export import PeriodicExportingMetricReader
|
||||
from opentelemetry.trace import ProxyTracerProvider
|
||||
|
||||
from baserow.core.psycopg import is_psycopg3
|
||||
from baserow.core.telemetry.provider import DifferentSamplerPerLibraryTracerProvider
|
||||
from baserow.core.telemetry.utils import BatchBaggageSpanProcessor, otel_is_enabled
|
||||
|
||||
if is_psycopg3:
|
||||
from opentelemetry.instrumentation.psycopg import PsycopgInstrumentor
|
||||
else:
|
||||
from opentelemetry.instrumentation.psycopg2 import (
|
||||
Psycopg2Instrumentor as PsycopgInstrumentor,
|
||||
)
|
||||
|
||||
|
||||
class LogGuruCompatibleLoggerHandler(LoggingHandler):
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
|
@ -148,7 +155,7 @@ def _setup_celery_metrics():
|
|||
|
||||
def _setup_standard_backend_instrumentation():
|
||||
BotocoreInstrumentor().instrument()
|
||||
Psycopg2Instrumentor().instrument()
|
||||
PsycopgInstrumentor().instrument()
|
||||
RedisInstrumentor().instrument()
|
||||
RequestsInstrumentor().instrument()
|
||||
CeleryInstrumentor().instrument()
|
||||
|
|
|
@ -13,7 +13,6 @@ from django.contrib.auth.models import AbstractUser
|
|||
from django.db import connection
|
||||
from django.utils.dateparse import parse_date, parse_datetime
|
||||
|
||||
import psycopg2
|
||||
from freezegun import freeze_time
|
||||
from pytest_unordered import unordered
|
||||
|
||||
|
@ -27,6 +26,7 @@ from baserow.contrib.database.rows.handler import RowHandler
|
|||
from baserow.core.action.models import Action
|
||||
from baserow.core.action.registries import ActionType
|
||||
from baserow.core.models import Workspace
|
||||
from baserow.core.psycopg import psycopg
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
@ -508,9 +508,9 @@ def assert_undo_redo_actions_fails_with_error(
|
|||
@contextmanager
|
||||
def independent_test_db_connection():
|
||||
d = connection.settings_dict
|
||||
conn = psycopg2.connect(
|
||||
conn = psycopg.connect(
|
||||
host=d["HOST"],
|
||||
database=d["NAME"],
|
||||
dbname=d["NAME"],
|
||||
user=d["USER"],
|
||||
password=d["PASSWORD"],
|
||||
port=d["PORT"],
|
||||
|
|
9
backend/tests/__init__.py
Normal file → Executable file
9
backend/tests/__init__.py
Normal file → Executable file
|
@ -1,9 +0,0 @@
|
|||
from django.core.signals import setting_changed
|
||||
from django.dispatch import receiver
|
||||
|
||||
from cachalot.settings import cachalot_settings
|
||||
|
||||
|
||||
@receiver(setting_changed)
|
||||
def reload_settings(sender, **kwargs):
|
||||
cachalot_settings.reload()
|
|
@ -467,10 +467,7 @@ def test_convert_duration_field_to_text_to_duration_field(
|
|||
|
||||
row_1 = model.objects.first()
|
||||
updated_value = getattr(row_1, f"field_{field.id}")
|
||||
# compare timedelta values
|
||||
# assert updated_value == dest_value, ( # inital_value, (
|
||||
# input_format, input_value, dest_format, dest_value, updated_value,
|
||||
# )
|
||||
|
||||
if updated_value is not None:
|
||||
formatted = format_duration_value(updated_value, dest_format)
|
||||
else:
|
||||
|
|
|
@ -4,11 +4,11 @@ from django.db import connection
|
|||
from django.test import override_settings
|
||||
|
||||
import pytest
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
|
||||
# @pytest.mark.disabled_in_ci # Disable this test in CI in next release.
|
||||
|
|
|
@ -7,10 +7,8 @@ from django.conf import settings
|
|||
from django.core.cache import caches
|
||||
from django.db import connection, models
|
||||
from django.db.models import Field
|
||||
from django.test.utils import override_settings
|
||||
|
||||
import pytest
|
||||
from cachalot.settings import cachalot_settings
|
||||
from pytest_unordered import unordered
|
||||
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
|
@ -991,59 +989,65 @@ def test_table_hierarchy(data_fixture):
|
|||
assert row.get_root() == workspace
|
||||
|
||||
|
||||
@override_settings(CACHALOT_ENABLED=True)
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_cachalot_cache_only_count_query_correctly(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
workspace = data_fixture.create_workspace(user=user)
|
||||
app = data_fixture.create_database_application(workspace=workspace, name="Test 1")
|
||||
table = data_fixture.create_database_table(name="Cars", database=app)
|
||||
cache = caches[settings.CACHALOT_CACHE]
|
||||
if settings.CACHALOT_ENABLED:
|
||||
from cachalot.settings import cachalot_settings
|
||||
|
||||
queries = {}
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_cachalot_cache_only_count_query_correctly(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
workspace = data_fixture.create_workspace(user=user)
|
||||
app = data_fixture.create_database_application(
|
||||
workspace=workspace, name="Test 1"
|
||||
)
|
||||
table = data_fixture.create_database_table(name="Cars", database=app)
|
||||
cache = caches[settings.CACHALOT_CACHE]
|
||||
|
||||
def get_mocked_query_cache_key(compiler):
|
||||
sql, _ = compiler.as_sql()
|
||||
sql_lower = sql.lower()
|
||||
if "count(*)" in sql_lower:
|
||||
key = "count"
|
||||
elif f"database_table_{table.id}" in sql_lower:
|
||||
key = "select_table"
|
||||
else:
|
||||
key = f"{time()}"
|
||||
queries[key] = sql_lower
|
||||
return key
|
||||
queries = {}
|
||||
|
||||
cachalot_settings.CACHALOT_QUERY_KEYGEN = get_mocked_query_cache_key
|
||||
cachalot_settings.CACHALOT_TABLE_KEYGEN = lambda _, table: table.rsplit("_", 1)[1]
|
||||
def get_mocked_query_cache_key(compiler):
|
||||
sql, _ = compiler.as_sql()
|
||||
sql_lower = sql.lower()
|
||||
if "count(*)" in sql_lower:
|
||||
key = "count"
|
||||
elif f"database_table_{table.id}" in sql_lower:
|
||||
key = "select_table"
|
||||
else:
|
||||
key = f"{time()}"
|
||||
queries[key] = sql_lower
|
||||
return key
|
||||
|
||||
table_model = table.get_model()
|
||||
row = table_model.objects.create()
|
||||
cachalot_settings.CACHALOT_QUERY_KEYGEN = get_mocked_query_cache_key
|
||||
cachalot_settings.CACHALOT_TABLE_KEYGEN = lambda _, table: table.rsplit("_", 1)[
|
||||
1
|
||||
]
|
||||
|
||||
# listing items should not cache the result
|
||||
assert [r.id for r in table_model.objects.all()] == [row.id]
|
||||
assert cache.get("select_table") is None, queries["select_table"]
|
||||
table_model = table.get_model()
|
||||
row = table_model.objects.create()
|
||||
|
||||
def assert_cachalot_cache_queryset_count_of(expected_count):
|
||||
# count() should save the result of the query in the cache
|
||||
assert table_model.objects.count() == expected_count
|
||||
# listing items should not cache the result
|
||||
assert [r.id for r in table_model.objects.all()] == [row.id]
|
||||
assert cache.get("select_table") is None, queries["select_table"]
|
||||
|
||||
# the count query has been cached
|
||||
inserted_cache_entry = cache.get("count")
|
||||
assert inserted_cache_entry is not None
|
||||
assert inserted_cache_entry[1][0] == expected_count
|
||||
def assert_cachalot_cache_queryset_count_of(expected_count):
|
||||
# count() should save the result of the query in the cache
|
||||
assert table_model.objects.count() == expected_count
|
||||
|
||||
assert_cachalot_cache_queryset_count_of(1)
|
||||
# the count query has been cached
|
||||
inserted_cache_entry = cache.get("count")
|
||||
assert inserted_cache_entry is not None
|
||||
assert inserted_cache_entry[1][0] == expected_count
|
||||
|
||||
# creating a new row should invalidate the cache result
|
||||
table_model.objects.create()
|
||||
assert_cachalot_cache_queryset_count_of(1)
|
||||
|
||||
# cachalot invalidate the cache by setting the timestamp for the table
|
||||
# greater than the timestamp of the cache entry
|
||||
invalidation_timestamp = cache.get(table.id)
|
||||
assert invalidation_timestamp > cache.get("count")[0]
|
||||
# creating a new row should invalidate the cache result
|
||||
table_model.objects.create()
|
||||
|
||||
assert_cachalot_cache_queryset_count_of(2)
|
||||
# cachalot invalidate the cache by setting the timestamp for the table
|
||||
# greater than the timestamp of the cache entry
|
||||
invalidation_timestamp = cache.get(table.id)
|
||||
assert invalidation_timestamp > cache.get("count")[0]
|
||||
|
||||
assert_cachalot_cache_queryset_count_of(2)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
|
@ -2,126 +2,133 @@ from time import time
|
|||
|
||||
from django.conf import settings
|
||||
from django.core.cache import caches
|
||||
from django.test import override_settings
|
||||
from django.urls import reverse
|
||||
|
||||
import pytest
|
||||
from cachalot.settings import cachalot_settings
|
||||
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.test_utils.helpers import AnyInt
|
||||
|
||||
if settings.CACHALOT_ENABLED:
|
||||
"""
|
||||
Cachalot cannot be activated in a fixture because once it patches the Django ORM, it
|
||||
remains patched for the rest of the test suite. Since it's disabled by default and
|
||||
we haven't been using it lately, nor have we tested it properly after the last
|
||||
Django library update, we are disabling it for now. However, we can still enable it
|
||||
with the CACHALOT_ENABLED setting whenever we want to test it.
|
||||
"""
|
||||
|
||||
@override_settings(CACHALOT_ENABLED=True)
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_cachalot_cache_count_for_filtered_views(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table_a, _, link_field = data_fixture.create_two_linked_tables(user=user)
|
||||
cache = caches[settings.CACHALOT_CACHE]
|
||||
from cachalot.settings import cachalot_settings
|
||||
|
||||
grid_view = data_fixture.create_grid_view(table=table_a)
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_cachalot_cache_count_for_filtered_views(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table_a, _, link_field = data_fixture.create_two_linked_tables(user=user)
|
||||
cache = caches[settings.CACHALOT_CACHE]
|
||||
|
||||
ViewHandler().create_filter(
|
||||
user=user,
|
||||
view=grid_view,
|
||||
field=link_field,
|
||||
type_name="link_row_has",
|
||||
value="1",
|
||||
)
|
||||
grid_view = data_fixture.create_grid_view(table=table_a)
|
||||
|
||||
queries = {}
|
||||
ViewHandler().create_filter(
|
||||
user=user,
|
||||
view=grid_view,
|
||||
field=link_field,
|
||||
type_name="link_row_has",
|
||||
value="1",
|
||||
)
|
||||
|
||||
def get_mocked_query_cache_key(compiler):
|
||||
sql, _ = compiler.as_sql()
|
||||
sql_lower = sql.lower()
|
||||
if "count(*)" in sql_lower:
|
||||
key = "count"
|
||||
elif f"database_table_{table_a.id}" in sql_lower:
|
||||
key = "select_table"
|
||||
else:
|
||||
key = f"{time()}"
|
||||
queries[key] = sql_lower
|
||||
return key
|
||||
queries = {}
|
||||
|
||||
cachalot_settings.CACHALOT_QUERY_KEYGEN = get_mocked_query_cache_key
|
||||
cachalot_settings.CACHALOT_TABLE_KEYGEN = lambda _, table: table.rsplit("_", 1)[1]
|
||||
def get_mocked_query_cache_key(compiler):
|
||||
sql, _ = compiler.as_sql()
|
||||
sql_lower = sql.lower()
|
||||
if "count(*)" in sql_lower:
|
||||
key = "count"
|
||||
elif f"database_table_{table_a.id}" in sql_lower:
|
||||
key = "select_table"
|
||||
else:
|
||||
key = f"{time()}"
|
||||
queries[key] = sql_lower
|
||||
return key
|
||||
|
||||
table_model = table_a.get_model()
|
||||
table_model.objects.create()
|
||||
queryset = ViewHandler().get_queryset(view=grid_view)
|
||||
cachalot_settings.CACHALOT_QUERY_KEYGEN = get_mocked_query_cache_key
|
||||
cachalot_settings.CACHALOT_TABLE_KEYGEN = lambda _, table: table.rsplit("_", 1)[
|
||||
1
|
||||
]
|
||||
|
||||
def assert_cachalot_cache_queryset_count_of(expected_count):
|
||||
# count() should save the result of the query in the cache
|
||||
assert queryset.count() == expected_count
|
||||
table_model = table_a.get_model()
|
||||
table_model.objects.create()
|
||||
queryset = ViewHandler().get_queryset(view=grid_view)
|
||||
|
||||
# the count query has been cached
|
||||
inserted_cache_entry = cache.get("count")
|
||||
assert inserted_cache_entry is not None
|
||||
assert inserted_cache_entry[1][0] == expected_count
|
||||
def assert_cachalot_cache_queryset_count_of(expected_count):
|
||||
# count() should save the result of the query in the cache
|
||||
assert queryset.count() == expected_count
|
||||
|
||||
assert_cachalot_cache_queryset_count_of(0)
|
||||
# the count query has been cached
|
||||
inserted_cache_entry = cache.get("count")
|
||||
assert inserted_cache_entry is not None
|
||||
assert inserted_cache_entry[1][0] == expected_count
|
||||
|
||||
assert_cachalot_cache_queryset_count_of(0)
|
||||
|
||||
@override_settings(CACHALOT_ENABLED=True)
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_cachalot_cache_multiple_select_correctly(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_cachalot_cache_multiple_select_correctly(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
table = data_fixture.create_database_table(database=database)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
grid_view = data_fixture.create_grid_view(table=table)
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
grid_view = data_fixture.create_grid_view(table=table)
|
||||
|
||||
field = field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
name="Multiple select",
|
||||
type_name="multiple_select",
|
||||
select_options=[
|
||||
{"value": "Option 1", "color": "red"},
|
||||
{"value": "Option 2", "color": "blue"},
|
||||
{"value": "Option 3", "color": "orange"},
|
||||
{"value": "Option 4", "color": "black"},
|
||||
],
|
||||
)
|
||||
field = field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
name="Multiple select",
|
||||
type_name="multiple_select",
|
||||
select_options=[
|
||||
{"value": "Option 1", "color": "red"},
|
||||
{"value": "Option 2", "color": "blue"},
|
||||
{"value": "Option 3", "color": "orange"},
|
||||
{"value": "Option 4", "color": "black"},
|
||||
],
|
||||
)
|
||||
|
||||
select_options = field.select_options.all()
|
||||
model = table.get_model()
|
||||
select_options = field.select_options.all()
|
||||
model = table.get_model()
|
||||
|
||||
rows = row_handler.create_rows(
|
||||
user,
|
||||
table,
|
||||
rows_values=[
|
||||
{f"field_{field.id}": [select_options[0].id, select_options[1].value]},
|
||||
{f"field_{field.id}": [select_options[2].value, select_options[0].id]},
|
||||
],
|
||||
)
|
||||
rows = row_handler.create_rows(
|
||||
user,
|
||||
table,
|
||||
rows_values=[
|
||||
{f"field_{field.id}": [select_options[0].id, select_options[1].value]},
|
||||
{f"field_{field.id}": [select_options[2].value, select_options[0].id]},
|
||||
],
|
||||
)
|
||||
|
||||
url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id})
|
||||
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
|
||||
response_json = response.json()
|
||||
assert response_json["count"] == 2
|
||||
assert response_json["results"][0][f"field_{field.id}"] == [
|
||||
{"id": AnyInt(), "value": "Option 1", "color": "red"},
|
||||
{"id": AnyInt(), "value": "Option 2", "color": "blue"},
|
||||
]
|
||||
url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id})
|
||||
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
|
||||
response_json = response.json()
|
||||
assert response_json["count"] == 2
|
||||
assert response_json["results"][0][f"field_{field.id}"] == [
|
||||
{"id": AnyInt(), "value": "Option 1", "color": "red"},
|
||||
{"id": AnyInt(), "value": "Option 2", "color": "blue"},
|
||||
]
|
||||
|
||||
row_handler.update_rows(
|
||||
user,
|
||||
table,
|
||||
[
|
||||
{"id": rows[0].id, f"field_{field.id}": []},
|
||||
],
|
||||
model,
|
||||
[rows[0]],
|
||||
)
|
||||
row_handler.update_rows(
|
||||
user,
|
||||
table,
|
||||
[
|
||||
{"id": rows[0].id, f"field_{field.id}": []},
|
||||
],
|
||||
model,
|
||||
[rows[0]],
|
||||
)
|
||||
|
||||
# Before #1772 this would raise an error because the cache would not be correctly
|
||||
# invalidated when updating a row so the old value would be returned.
|
||||
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
|
||||
response_json = response.json()
|
||||
assert response_json["count"] == 2
|
||||
assert response_json["results"][0][f"field_{field.id}"] == []
|
||||
# Before #1772 this would raise an error because the cache would not be
|
||||
# correctly invalidated when updating a row so the old value would be returned.
|
||||
response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"})
|
||||
response_json = response.json()
|
||||
assert response_json["count"] == 2
|
||||
assert response_json["results"][0][f"field_{field.id}"] == []
|
||||
|
|
|
@ -11,6 +11,7 @@ from freezegun import freeze_time
|
|||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.core.management.backup.backup_runner import BaserowBackupRunner
|
||||
from baserow.core.management.backup.exceptions import InvalidBaserowBackupArchive
|
||||
from baserow.core.psycopg import is_psycopg3
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
|
||||
|
||||
|
@ -71,7 +72,7 @@ def test_can_backup_and_restore_baserow_reverting_changes(data_fixture, environ)
|
|||
|
||||
|
||||
@patch("tempfile.TemporaryDirectory")
|
||||
@patch("psycopg2.connect")
|
||||
@patch("psycopg.connect" if is_psycopg3 else "psycopg2.connect")
|
||||
@patch("subprocess.check_output")
|
||||
def test_backup_baserow_dumps_database_in_batches(
|
||||
mock_check_output, mock_connect, mock_tempfile, fs, environ
|
||||
|
@ -141,7 +142,7 @@ def test_backup_baserow_dumps_database_in_batches(
|
|||
|
||||
|
||||
@patch("tempfile.TemporaryDirectory")
|
||||
@patch("psycopg2.connect")
|
||||
@patch("psycopg.connect" if is_psycopg3 else "psycopg2.connect")
|
||||
@patch("subprocess.check_output")
|
||||
def test_can_change_num_jobs_and_insert_extra_args_for_baserow_backup(
|
||||
mock_check_output, mock_connect, mock_tempfile, fs, environ
|
||||
|
@ -226,7 +227,7 @@ def test_can_change_num_jobs_and_insert_extra_args_for_baserow_backup(
|
|||
|
||||
|
||||
@patch("tempfile.TemporaryDirectory")
|
||||
@patch("psycopg2.connect")
|
||||
@patch("psycopg.connect" if is_psycopg3 else "psycopg2.connect")
|
||||
@patch("subprocess.check_output")
|
||||
def test_backup_baserow_table_batches_includes_all_tables_when_final_batch_small(
|
||||
mock_check_output, mock_connect, mock_tempfile, fs, environ
|
||||
|
@ -285,7 +286,7 @@ def test_backup_baserow_table_batches_includes_all_tables_when_final_batch_small
|
|||
|
||||
|
||||
@patch("tempfile.TemporaryDirectory")
|
||||
@patch("psycopg2.connect")
|
||||
@patch("psycopg.connect" if is_psycopg3 else "psycopg2.connect")
|
||||
@patch("subprocess.check_output")
|
||||
def test_backup_baserow_includes_all_tables_when_batch_size_matches_num_tables(
|
||||
mock_check_output, mock_connect, mock_tempfile, fs, environ
|
||||
|
@ -336,7 +337,7 @@ def test_backup_baserow_includes_all_tables_when_batch_size_matches_num_tables(
|
|||
|
||||
|
||||
@patch("tempfile.TemporaryDirectory")
|
||||
@patch("psycopg2.connect")
|
||||
@patch("psycopg.connect" if is_psycopg3 else "psycopg2.connect")
|
||||
@patch("subprocess.check_output")
|
||||
def test_backup_baserow_does_no_table_batches_when_no_user_tables_found(
|
||||
mock_check_output, mock_connect, mock_tempfile, fs, environ
|
||||
|
|
|
@ -170,6 +170,7 @@ deprecated==1.2.14
|
|||
# via
|
||||
# opentelemetry-api
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-semantic-conventions
|
||||
distro==1.9.0
|
||||
# via
|
||||
# anthropic
|
||||
|
@ -456,7 +457,7 @@ openapi-spec-validator==0.5.6
|
|||
# via -r dev.in
|
||||
openpyxl==3.1.5
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-api==1.24.0
|
||||
opentelemetry-api==1.29.0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
|
@ -469,17 +470,18 @@ opentelemetry-api==1.24.0
|
|||
# opentelemetry-instrumentation-django
|
||||
# opentelemetry-instrumentation-grpc
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-redis
|
||||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
# opentelemetry-propagator-aws-xray
|
||||
# opentelemetry-sdk
|
||||
opentelemetry-exporter-otlp-proto-common==1.24.0
|
||||
# opentelemetry-semantic-conventions
|
||||
opentelemetry-exporter-otlp-proto-common==1.29.0
|
||||
# via opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-exporter-otlp-proto-http==1.24.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.29.0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation==0.45b0
|
||||
opentelemetry-instrumentation==0.50b0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-instrumentation-aiohttp-client
|
||||
|
@ -490,53 +492,53 @@ opentelemetry-instrumentation==0.45b0
|
|||
# opentelemetry-instrumentation-django
|
||||
# opentelemetry-instrumentation-grpc
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
# opentelemetry-instrumentation-redis
|
||||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
opentelemetry-instrumentation-aiohttp-client==0.45b0
|
||||
opentelemetry-instrumentation-aiohttp-client==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-asgi==0.45b0
|
||||
opentelemetry-instrumentation-asgi==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-botocore==0.45b0
|
||||
opentelemetry-instrumentation-botocore==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-celery==0.45b0
|
||||
opentelemetry-instrumentation-celery==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-dbapi==0.45b0
|
||||
opentelemetry-instrumentation-dbapi==0.50b0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-instrumentation-psycopg2
|
||||
opentelemetry-instrumentation-django==0.45b0
|
||||
# opentelemetry-instrumentation-psycopg
|
||||
opentelemetry-instrumentation-django==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-grpc==0.45b0
|
||||
opentelemetry-instrumentation-grpc==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-logging==0.45b0
|
||||
opentelemetry-instrumentation-logging==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-psycopg2==0.45b0
|
||||
opentelemetry-instrumentation-psycopg==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-redis==0.45b0
|
||||
opentelemetry-instrumentation-redis==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-requests==0.45b0
|
||||
opentelemetry-instrumentation-requests==0.50b0
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
opentelemetry-instrumentation-wsgi==0.45b0
|
||||
opentelemetry-instrumentation-wsgi==0.50b0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-instrumentation-django
|
||||
opentelemetry-propagator-aws-xray==1.0.1
|
||||
# via opentelemetry-instrumentation-botocore
|
||||
opentelemetry-proto==1.24.0
|
||||
opentelemetry-proto==1.29.0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-exporter-otlp-proto-common
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-sdk==1.24.0
|
||||
opentelemetry-sdk==1.29.0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-instrumentation-grpc
|
||||
opentelemetry-semantic-conventions==0.45b0
|
||||
opentelemetry-semantic-conventions==0.50b0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-instrumentation
|
||||
# opentelemetry-instrumentation-aiohttp-client
|
||||
# opentelemetry-instrumentation-asgi
|
||||
# opentelemetry-instrumentation-botocore
|
||||
|
@ -548,7 +550,7 @@ opentelemetry-semantic-conventions==0.45b0
|
|||
# opentelemetry-instrumentation-requests
|
||||
# opentelemetry-instrumentation-wsgi
|
||||
# opentelemetry-sdk
|
||||
opentelemetry-util-http==0.45b0
|
||||
opentelemetry-util-http==0.50b0
|
||||
# via
|
||||
# -r /baserow/backend/requirements/base.in
|
||||
# opentelemetry-instrumentation-aiohttp-client
|
||||
|
@ -566,6 +568,7 @@ packaging==23.2
|
|||
# huggingface-hub
|
||||
# langchain-core
|
||||
# marshmallow
|
||||
# opentelemetry-instrumentation
|
||||
# pytest
|
||||
parso==0.8.4
|
||||
# via jedi
|
||||
|
@ -599,7 +602,7 @@ prosemirror @ https://github.com/fellowapp/prosemirror-py/archive/refs/tags/v0.3
|
|||
# via -r /baserow/backend/requirements/base.in
|
||||
proto-plus==1.24.0
|
||||
# via google-api-core
|
||||
protobuf==4.25.4
|
||||
protobuf==5.29.2
|
||||
# via
|
||||
# google-api-core
|
||||
# googleapis-common-protos
|
||||
|
@ -607,7 +610,9 @@ protobuf==4.25.4
|
|||
# proto-plus
|
||||
psutil==5.9.8
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
psycopg2==2.9.9
|
||||
psycopg==3.2.3
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
psycopg-binary==3.2.3
|
||||
# via -r /baserow/backend/requirements/base.in
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
|
@ -867,6 +872,7 @@ typing-extensions==4.11.0
|
|||
# openai
|
||||
# opentelemetry-sdk
|
||||
# prosemirror
|
||||
# psycopg
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# sqlalchemy
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from django.db import connection, migrations
|
||||
|
||||
from psycopg2 import sql
|
||||
from baserow.core.psycopg import sql
|
||||
|
||||
|
||||
def remove_duplicates(model, view):
|
||||
|
|
Loading…
Add table
Reference in a new issue