mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-07 06:15:36 +00:00
Resolve "Introduce get_default_storage"
This commit is contained in:
parent
016455cd3a
commit
f3f3163f3e
29 changed files with 417 additions and 303 deletions
backend
src/baserow
api/user_files
contrib
builder/domains
database
core
test_utils
tests/baserow
api/user_files
contrib/database
core/jobs
enterprise/backend
src/baserow_enterprise
tests/baserow_enterprise_tests
api
audit_log
secure_file_serve
audit_log
secure_file_serve
premium/backend
src/baserow_premium/generative_ai
tests/baserow_premium_tests
api/export
export
fields
generative_ai
|
@ -1,11 +1,11 @@
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import extend_schema_field
|
from drf_spectacular.utils import extend_schema_field
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from baserow.core.models import UserFile
|
from baserow.core.models import UserFile
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.user_files.handler import UserFileHandler
|
from baserow.core.user_files.handler import UserFileHandler
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,9 +22,10 @@ class UserFileURLAndThumbnailsSerializerMixin(serializers.Serializer):
|
||||||
|
|
||||||
@extend_schema_field(OpenApiTypes.URI)
|
@extend_schema_field(OpenApiTypes.URI)
|
||||||
def get_url(self, instance):
|
def get_url(self, instance):
|
||||||
|
storage = get_default_storage()
|
||||||
name = self.get_instance_attr(instance, "name")
|
name = self.get_instance_attr(instance, "name")
|
||||||
path = UserFileHandler().user_file_path(name)
|
path = UserFileHandler().user_file_path(name)
|
||||||
url = default_storage.url(path)
|
url = storage.url(path)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||||
|
@ -33,10 +34,11 @@ class UserFileURLAndThumbnailsSerializerMixin(serializers.Serializer):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
name = self.get_instance_attr(instance, "name")
|
name = self.get_instance_attr(instance, "name")
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
thumbnail_name: {
|
thumbnail_name: {
|
||||||
"url": default_storage.url(
|
"url": storage.url(
|
||||||
UserFileHandler().user_file_thumbnail_path(name, thumbnail_name)
|
UserFileHandler().user_file_thumbnail_path(name, thumbnail_name)
|
||||||
),
|
),
|
||||||
"width": size[0],
|
"width": size[0],
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Iterable, List, cast
|
from typing import Iterable, List, cast
|
||||||
|
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
|
|
||||||
|
@ -17,6 +16,7 @@ from baserow.contrib.builder.models import Builder
|
||||||
from baserow.core.db import specific_iterator
|
from baserow.core.db import specific_iterator
|
||||||
from baserow.core.exceptions import IdDoesNotExist
|
from baserow.core.exceptions import IdDoesNotExist
|
||||||
from baserow.core.registries import ImportExportConfig, application_type_registry
|
from baserow.core.registries import ImportExportConfig, application_type_registry
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.trash.handler import TrashHandler
|
from baserow.core.trash.handler import TrashHandler
|
||||||
from baserow.core.utils import Progress, extract_allowed
|
from baserow.core.utils import Progress, extract_allowed
|
||||||
|
|
||||||
|
@ -206,6 +206,8 @@ class DomainHandler:
|
||||||
include_permission_data=True, reduce_disk_space_usage=False
|
include_permission_data=True, reduce_disk_space_usage=False
|
||||||
)
|
)
|
||||||
|
|
||||||
|
default_storage = get_default_storage()
|
||||||
|
|
||||||
exported_builder = builder_application_type.export_serialized(
|
exported_builder = builder_application_type.export_serialized(
|
||||||
builder, import_export_config, None, default_storage
|
builder, import_export_config, None, default_storage
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.utils.functional import lazy
|
from django.utils.functional import lazy
|
||||||
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
@ -9,6 +8,7 @@ from baserow.contrib.database.export.handler import ExportHandler
|
||||||
from baserow.contrib.database.export.models import ExportJob
|
from baserow.contrib.database.export.models import ExportJob
|
||||||
from baserow.contrib.database.export.registries import table_exporter_registry
|
from baserow.contrib.database.export.registries import table_exporter_registry
|
||||||
from baserow.core.context import clear_current_workspace_id, set_current_workspace_id
|
from baserow.core.context import clear_current_workspace_id, set_current_workspace_id
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
|
|
||||||
# This is a map from the front end supported charsets to the internal python supported
|
# This is a map from the front end supported charsets to the internal python supported
|
||||||
# charset value as they do not always match up.
|
# charset value as they do not always match up.
|
||||||
|
@ -88,7 +88,8 @@ class ExportedFileURLSerializerMixin(serializers.Serializer):
|
||||||
name = self.get_instance_attr(instance, "exported_file_name")
|
name = self.get_instance_attr(instance, "exported_file_name")
|
||||||
if name:
|
if name:
|
||||||
path = ExportHandler().export_file_path(name)
|
path = ExportHandler().export_file_path(name)
|
||||||
return default_storage.url(path)
|
storage = get_default_storage()
|
||||||
|
return storage.url(path)
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ from typing import Any, BinaryIO, Dict, Optional
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
@ -27,6 +26,7 @@ from baserow.contrib.database.views.exceptions import ViewNotInTable
|
||||||
from baserow.contrib.database.views.models import View
|
from baserow.contrib.database.views.models import View
|
||||||
from baserow.contrib.database.views.registries import view_type_registry
|
from baserow.contrib.database.views.registries import view_type_registry
|
||||||
from baserow.core.handler import CoreHandler
|
from baserow.core.handler import CoreHandler
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
|
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
ExportJobCanceledException,
|
ExportJobCanceledException,
|
||||||
|
@ -167,6 +167,7 @@ class ExportHandler:
|
||||||
|
|
||||||
jobs = ExportJob.jobs_requiring_cleanup(datetime.now(tz=timezone.utc))
|
jobs = ExportJob.jobs_requiring_cleanup(datetime.now(tz=timezone.utc))
|
||||||
logger.info(f"Cleaning up {jobs.count()} old jobs")
|
logger.info(f"Cleaning up {jobs.count()} old jobs")
|
||||||
|
storage = get_default_storage()
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
if job.exported_file_name:
|
if job.exported_file_name:
|
||||||
# Note the django file storage api will not raise an exception if
|
# Note the django file storage api will not raise an exception if
|
||||||
|
@ -174,9 +175,7 @@ class ExportHandler:
|
||||||
# their exported_file_name and then write to that file, so if the
|
# their exported_file_name and then write to that file, so if the
|
||||||
# write step fails it is possible that the exported_file_name does not
|
# write step fails it is possible that the exported_file_name does not
|
||||||
# exist.
|
# exist.
|
||||||
default_storage.delete(
|
storage.delete(ExportHandler.export_file_path(job.exported_file_name))
|
||||||
ExportHandler.export_file_path(job.exported_file_name)
|
|
||||||
)
|
|
||||||
job.exported_file_name = None
|
job.exported_file_name = None
|
||||||
|
|
||||||
job.state = EXPORT_JOB_EXPIRED_STATUS
|
job.state = EXPORT_JOB_EXPIRED_STATUS
|
||||||
|
@ -304,22 +303,25 @@ def _generate_random_file_name_with_extension(file_extension):
|
||||||
return str(uuid.uuid4()) + file_extension
|
return str(uuid.uuid4()) + file_extension
|
||||||
|
|
||||||
|
|
||||||
def _create_storage_dir_if_missing_and_open(storage_location) -> BinaryIO:
|
def _create_storage_dir_if_missing_and_open(storage_location, storage=None) -> BinaryIO:
|
||||||
"""
|
"""
|
||||||
Attempts to open the provided storage location in binary overwriting write mode.
|
Attempts to open the provided storage location in binary overwriting write mode.
|
||||||
If it encounters a FileNotFound error will attempt to create the folder structure
|
If it encounters a FileNotFound error will attempt to create the folder structure
|
||||||
leading upto to the storage location and then open again.
|
leading upto to the storage location and then open again.
|
||||||
|
|
||||||
:param storage_location: The storage location to open and ensure folders for.
|
:param storage_location: The storage location to open and ensure folders for.
|
||||||
|
:param storage: The storage to use, if None will use the default storage.
|
||||||
:return: The open file descriptor for the storage_location
|
:return: The open file descriptor for the storage_location
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
storage = storage or get_default_storage()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return default_storage.open(storage_location, "wb+")
|
return storage.open(storage_location, "wb+")
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
# django's file system storage will not attempt to creating a missing
|
# django's file system storage will not attempt to creating a missing
|
||||||
# EXPORT_FILES_DIRECTORY and instead will throw a FileNotFoundError.
|
# EXPORT_FILES_DIRECTORY and instead will throw a FileNotFoundError.
|
||||||
# So we first save an empty file which will create any missing directories
|
# So we first save an empty file which will create any missing directories
|
||||||
# and then open again.
|
# and then open again.
|
||||||
default_storage.save(storage_location, BytesIO())
|
storage.save(storage_location, BytesIO())
|
||||||
return default_storage.open(storage_location, "wb")
|
return storage.open(storage_location, "wb")
|
||||||
|
|
|
@ -16,7 +16,7 @@ from django.contrib.auth.models import AbstractUser
|
||||||
from django.contrib.postgres.aggregates import StringAgg
|
from django.contrib.postgres.aggregates import StringAgg
|
||||||
from django.contrib.postgres.fields import JSONField
|
from django.contrib.postgres.fields import JSONField
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.files.storage import Storage, default_storage
|
from django.core.files.storage import Storage
|
||||||
from django.db import OperationalError, connection, models
|
from django.db import OperationalError, connection, models
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
Case,
|
Case,
|
||||||
|
@ -117,6 +117,7 @@ from baserow.core.formula.parser.exceptions import FormulaFunctionTypeDoesNotExi
|
||||||
from baserow.core.handler import CoreHandler
|
from baserow.core.handler import CoreHandler
|
||||||
from baserow.core.models import UserFile, WorkspaceUser
|
from baserow.core.models import UserFile, WorkspaceUser
|
||||||
from baserow.core.registries import ImportExportConfig
|
from baserow.core.registries import ImportExportConfig
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
||||||
from baserow.core.user_files.handler import UserFileHandler
|
from baserow.core.user_files.handler import UserFileHandler
|
||||||
from baserow.core.utils import list_to_comma_separated_string
|
from baserow.core.utils import list_to_comma_separated_string
|
||||||
|
@ -3295,11 +3296,12 @@ class FileFieldType(FieldType):
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_export_value(self, value, field_object, rich_value=False):
|
def get_export_value(self, value, field_object, rich_value=False):
|
||||||
|
storage = get_default_storage()
|
||||||
files = []
|
files = []
|
||||||
for file in value:
|
for file in value:
|
||||||
if "name" in file:
|
if "name" in file:
|
||||||
path = UserFileHandler().user_file_path(file["name"])
|
path = UserFileHandler().user_file_path(file["name"])
|
||||||
url = default_storage.url(path)
|
url = storage.url(path)
|
||||||
else:
|
else:
|
||||||
url = None
|
url = None
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,6 @@ from decimal import Decimal
|
||||||
from typing import Any, List, Optional, Set, Type, Union
|
from typing import Any, List, Optional, Set, Type, Union
|
||||||
|
|
||||||
from django.contrib.postgres.fields import ArrayField, JSONField
|
from django.contrib.postgres.fields import ArrayField, JSONField
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Expression, F, Func, Q, QuerySet, TextField, Value
|
from django.db.models import Expression, F, Func, Q, QuerySet, TextField, Value
|
||||||
from django.db.models.functions import Cast, Concat
|
from django.db.models.functions import Cast, Concat
|
||||||
|
@ -50,6 +49,7 @@ from baserow.contrib.database.formula.types.formula_type import (
|
||||||
BaserowFormulaValidType,
|
BaserowFormulaValidType,
|
||||||
UnTyped,
|
UnTyped,
|
||||||
)
|
)
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.utils import list_to_comma_separated_string
|
from baserow.core.utils import list_to_comma_separated_string
|
||||||
|
|
||||||
|
|
||||||
|
@ -921,8 +921,10 @@ class BaserowFormulaSingleFileType(BaserowJSONBObjectBaseType):
|
||||||
elif "name" in file:
|
elif "name" in file:
|
||||||
from baserow.core.user_files.handler import UserFileHandler
|
from baserow.core.user_files.handler import UserFileHandler
|
||||||
|
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
path = UserFileHandler().user_file_path(file["name"])
|
path = UserFileHandler().user_file_path(file["name"])
|
||||||
url = default_storage.url(path)
|
url = storage.url(path)
|
||||||
else:
|
else:
|
||||||
url = None
|
url = None
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ from zipfile import ZIP_DEFLATED, ZipFile
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from django.contrib.auth.models import AbstractUser, AnonymousUser
|
from django.contrib.auth.models import AbstractUser, AnonymousUser
|
||||||
from django.core.files.storage import Storage, default_storage
|
from django.core.files.storage import Storage
|
||||||
from django.db import OperationalError, transaction
|
from django.db import OperationalError, transaction
|
||||||
from django.db.models import Count, Prefetch, Q, QuerySet
|
from django.db.models import Count, Prefetch, Q, QuerySet
|
||||||
from django.utils import translation
|
from django.utils import translation
|
||||||
|
@ -103,6 +103,7 @@ from .signals import (
|
||||||
workspace_user_updated,
|
workspace_user_updated,
|
||||||
workspaces_reordered,
|
workspaces_reordered,
|
||||||
)
|
)
|
||||||
|
from .storage import get_default_storage
|
||||||
from .telemetry.utils import baserow_trace_methods, disable_instrumentation
|
from .telemetry.utils import baserow_trace_methods, disable_instrumentation
|
||||||
from .trash.handler import TrashHandler
|
from .trash.handler import TrashHandler
|
||||||
from .types import (
|
from .types import (
|
||||||
|
@ -1629,8 +1630,7 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
|
||||||
:rtype: list
|
:rtype: list
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not storage:
|
storage = storage or get_default_storage()
|
||||||
storage = default_storage
|
|
||||||
|
|
||||||
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
|
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
|
||||||
exported_applications = []
|
exported_applications = []
|
||||||
|
@ -1679,8 +1679,7 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
|
||||||
progress_builder, len(exported_applications) * 1000
|
progress_builder, len(exported_applications) * 1000
|
||||||
)
|
)
|
||||||
|
|
||||||
if not storage:
|
storage = storage or get_default_storage()
|
||||||
storage = default_storage
|
|
||||||
|
|
||||||
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
|
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
|
||||||
id_mapping: Dict[str, Any] = {}
|
id_mapping: Dict[str, Any] = {}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.user_files.handler import UserFileHandler
|
from baserow.core.user_files.handler import UserFileHandler
|
||||||
from baserow.core.user_files.models import UserFile
|
from baserow.core.user_files.models import UserFile
|
||||||
|
|
||||||
|
@ -33,6 +33,7 @@ class Command(BaseCommand):
|
||||||
buffer_size = 100
|
buffer_size = 100
|
||||||
queryset = UserFile.objects.filter(is_image=True)
|
queryset = UserFile.objects.filter(is_image=True)
|
||||||
count = queryset.count()
|
count = queryset.count()
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
while i < count:
|
while i < count:
|
||||||
user_files = queryset[i : min(count, i + buffer_size)]
|
user_files = queryset[i : min(count, i + buffer_size)]
|
||||||
|
@ -40,14 +41,14 @@ class Command(BaseCommand):
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
full_path = handler.user_file_path(user_file)
|
full_path = handler.user_file_path(user_file)
|
||||||
stream = default_storage.open(full_path)
|
stream = storage.open(full_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
image = Image.open(stream)
|
image = Image.open(stream)
|
||||||
handler.generate_and_save_image_thumbnails(
|
handler.generate_and_save_image_thumbnails(
|
||||||
image,
|
image,
|
||||||
user_file,
|
user_file,
|
||||||
storage=default_storage,
|
storage=storage,
|
||||||
only_with_name=options["name"],
|
only_with_name=options["name"],
|
||||||
)
|
)
|
||||||
image.close()
|
image.close()
|
||||||
|
|
|
@ -2,7 +2,6 @@ from datetime import datetime, timedelta, timezone
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.db import OperationalError
|
from django.db import OperationalError
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
|
|
||||||
|
@ -28,6 +27,7 @@ from baserow.core.snapshots.exceptions import (
|
||||||
SnapshotIsBeingRestored,
|
SnapshotIsBeingRestored,
|
||||||
SnapshotNameNotUnique,
|
SnapshotNameNotUnique,
|
||||||
)
|
)
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.utils import Progress
|
from baserow.core.utils import Progress
|
||||||
|
|
||||||
from .job_types import CreateSnapshotJobType, RestoreSnapshotJobType
|
from .job_types import CreateSnapshotJobType, RestoreSnapshotJobType
|
||||||
|
@ -373,6 +373,8 @@ class SnapshotHandler:
|
||||||
as the application.
|
as the application.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
if snapshot is None:
|
if snapshot is None:
|
||||||
raise SnapshotDoesNotExist()
|
raise SnapshotDoesNotExist()
|
||||||
|
|
||||||
|
@ -395,7 +397,7 @@ class SnapshotHandler:
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
exported_application = application_type.export_serialized(
|
exported_application = application_type.export_serialized(
|
||||||
application, snapshot_import_export_config, None, default_storage
|
application, snapshot_import_export_config, None, storage
|
||||||
)
|
)
|
||||||
except OperationalError as e:
|
except OperationalError as e:
|
||||||
# Detect if this `OperationalError` is due to us exceeding the
|
# Detect if this `OperationalError` is due to us exceeding the
|
||||||
|
@ -418,7 +420,7 @@ class SnapshotHandler:
|
||||||
snapshot_import_export_config,
|
snapshot_import_export_config,
|
||||||
id_mapping,
|
id_mapping,
|
||||||
None,
|
None,
|
||||||
default_storage,
|
storage,
|
||||||
progress_builder=progress.create_child_builder(represents_progress=50),
|
progress_builder=progress.create_child_builder(represents_progress=50),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -435,6 +437,8 @@ class SnapshotHandler:
|
||||||
:returns: Application that is a copy of the snapshot.
|
:returns: Application that is a copy of the snapshot.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
if snapshot is None:
|
if snapshot is None:
|
||||||
raise SnapshotDoesNotExist()
|
raise SnapshotDoesNotExist()
|
||||||
|
|
||||||
|
@ -456,7 +460,7 @@ class SnapshotHandler:
|
||||||
# be correctly set during the import process.
|
# be correctly set during the import process.
|
||||||
application.workspace = workspace
|
application.workspace = workspace
|
||||||
exported_application = application_type.export_serialized(
|
exported_application = application_type.export_serialized(
|
||||||
application, restore_snapshot_import_export_config, None, default_storage
|
application, restore_snapshot_import_export_config, None, storage
|
||||||
)
|
)
|
||||||
progress.increment(by=50)
|
progress.increment(by=50)
|
||||||
|
|
||||||
|
@ -466,7 +470,7 @@ class SnapshotHandler:
|
||||||
restore_snapshot_import_export_config,
|
restore_snapshot_import_export_config,
|
||||||
{},
|
{},
|
||||||
None,
|
None,
|
||||||
default_storage,
|
storage,
|
||||||
progress_builder=progress.create_child_builder(represents_progress=50),
|
progress_builder=progress.create_child_builder(represents_progress=50),
|
||||||
)
|
)
|
||||||
imported_application.name = CoreHandler().find_unused_application_name(
|
imported_application.name = CoreHandler().find_unused_application_name(
|
||||||
|
|
|
@ -1,9 +1,21 @@
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import Storage, default_storage
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_storage() -> Storage:
|
||||||
|
"""
|
||||||
|
Returns the default storage. This method is mainly used to have
|
||||||
|
a single point of entry for the default storage, so it's easier to
|
||||||
|
test and mock.
|
||||||
|
|
||||||
|
:return: The django default storage.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return default_storage
|
||||||
|
|
||||||
|
|
||||||
class OverwritingStorageHandler:
|
class OverwritingStorageHandler:
|
||||||
def __init__(self, storage):
|
def __init__(self, storage=None):
|
||||||
self.storage = storage if storage else default_storage
|
self.storage = storage or get_default_storage()
|
||||||
|
|
||||||
def save(self, name, content):
|
def save(self, name, content):
|
||||||
if self.storage.exists(name):
|
if self.storage.exists(name):
|
||||||
|
|
|
@ -9,7 +9,7 @@ from urllib.parse import urlparse
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.files.storage import Storage, default_storage
|
from django.core.files.storage import Storage
|
||||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.utils.http import parse_header_parameters
|
from django.utils.http import parse_header_parameters
|
||||||
|
@ -20,7 +20,7 @@ from PIL import Image, ImageOps
|
||||||
from requests.exceptions import RequestException
|
from requests.exceptions import RequestException
|
||||||
|
|
||||||
from baserow.core.models import UserFile
|
from baserow.core.models import UserFile
|
||||||
from baserow.core.storage import OverwritingStorageHandler
|
from baserow.core.storage import OverwritingStorageHandler, get_default_storage
|
||||||
from baserow.core.utils import random_string, sha256_hash, stream_size, truncate_middle
|
from baserow.core.utils import random_string, sha256_hash, stream_size, truncate_middle
|
||||||
|
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
|
@ -148,7 +148,7 @@ class UserFileHandler:
|
||||||
if not user_file.is_image:
|
if not user_file.is_image:
|
||||||
raise ValueError("The provided user file is not an image.")
|
raise ValueError("The provided user file is not an image.")
|
||||||
|
|
||||||
storage = storage or default_storage
|
storage = storage or get_default_storage()
|
||||||
image_width = user_file.image_width
|
image_width = user_file.image_width
|
||||||
image_height = user_file.image_height
|
image_height = user_file.image_height
|
||||||
|
|
||||||
|
@ -211,7 +211,7 @@ class UserFileHandler:
|
||||||
"The provided file is too large.",
|
"The provided file is too large.",
|
||||||
)
|
)
|
||||||
|
|
||||||
storage = storage or default_storage
|
storage = storage or get_default_storage()
|
||||||
stream_hash = sha256_hash(stream)
|
stream_hash = sha256_hash(stream)
|
||||||
file_name = truncate_middle(file_name, 64)
|
file_name = truncate_middle(file_name, 64)
|
||||||
|
|
||||||
|
@ -377,7 +377,7 @@ class UserFileHandler:
|
||||||
if cache is None:
|
if cache is None:
|
||||||
cache = {}
|
cache = {}
|
||||||
|
|
||||||
storage = storage or default_storage
|
storage = storage or get_default_storage()
|
||||||
|
|
||||||
if not user_file:
|
if not user_file:
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -11,7 +11,6 @@ from unittest.mock import patch
|
||||||
|
|
||||||
from django.conf import settings as django_settings
|
from django.conf import settings as django_settings
|
||||||
from django.core import cache
|
from django.core import cache
|
||||||
from django.core.files.storage import Storage
|
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.db import DEFAULT_DB_ALIAS, OperationalError, connection
|
from django.db import DEFAULT_DB_ALIAS, OperationalError, connection
|
||||||
from django.db.migrations.executor import MigrationExecutor
|
from django.db.migrations.executor import MigrationExecutor
|
||||||
|
@ -719,20 +718,6 @@ def enable_locmem_testing(settings):
|
||||||
cache.cache.clear()
|
cache.cache.clear()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def stubbed_storage(monkeypatch):
|
|
||||||
class StubbedStorage(Storage):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def save(self, name, content, **kwargs):
|
|
||||||
return name
|
|
||||||
|
|
||||||
storage_instance = StubbedStorage()
|
|
||||||
monkeypatch.setattr("django.core.files.storage.default_storage", storage_instance)
|
|
||||||
return storage_instance
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def mutable_generative_ai_model_type_registry():
|
def mutable_generative_ai_model_type_registry():
|
||||||
from baserow.core.generative_ai.registries import generative_ai_model_type_registry
|
from baserow.core.generative_ai.registries import generative_ai_model_type_registry
|
||||||
|
|
|
@ -68,7 +68,9 @@ def test_upload_file_with_jwt_auth(api_client, data_fixture, tmpdir):
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = data_fixture.generate_token(user)
|
token = data_fixture.generate_token(user)
|
||||||
|
@ -97,7 +99,9 @@ def test_upload_file_with_jwt_auth(api_client, data_fixture, tmpdir):
|
||||||
file_path = tmpdir.join("user_files", user_file.name)
|
file_path = tmpdir.join("user_files", user_file.name)
|
||||||
assert file_path.isfile()
|
assert file_path.isfile()
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = data_fixture.generate_token(user)
|
token = data_fixture.generate_token(user)
|
||||||
response_2 = api_client.post(
|
response_2 = api_client.post(
|
||||||
|
@ -117,7 +121,9 @@ def test_upload_file_with_jwt_auth(api_client, data_fixture, tmpdir):
|
||||||
image.save(file, format="PNG")
|
image.save(file, format="PNG")
|
||||||
file.seek(0)
|
file.seek(0)
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
data={"file": file},
|
data={"file": file},
|
||||||
|
@ -199,7 +205,9 @@ def test_upload_file_with_token_auth(api_client, data_fixture, tmpdir):
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -227,7 +235,9 @@ def test_upload_file_with_token_auth(api_client, data_fixture, tmpdir):
|
||||||
file_path = tmpdir.join("user_files", user_file.name)
|
file_path = tmpdir.join("user_files", user_file.name)
|
||||||
assert file_path.isfile()
|
assert file_path.isfile()
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response_2 = api_client.post(
|
response_2 = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -245,7 +255,9 @@ def test_upload_file_with_token_auth(api_client, data_fixture, tmpdir):
|
||||||
image.save(file, format="PNG")
|
image.save(file, format="PNG")
|
||||||
file.seek(0)
|
file.seek(0)
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
data={"file": file},
|
data={"file": file},
|
||||||
|
@ -361,7 +373,9 @@ def test_upload_file_via_url_with_jwt_auth(api_client, data_fixture, tmpdir):
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_via_url"),
|
reverse("api:user_files:upload_via_url"),
|
||||||
data={"url": "http://localhost/test.txt"},
|
data={"url": "http://localhost/test.txt"},
|
||||||
|
@ -470,7 +484,9 @@ def test_upload_file_via_url_with_token_auth(api_client, data_fixture, tmpdir):
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage", new=lambda: storage
|
||||||
|
):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_via_url"),
|
reverse("api:user_files:upload_via_url"),
|
||||||
data={"url": "http://localhost/test.txt"},
|
data={"url": "http://localhost/test.txt"},
|
||||||
|
|
|
@ -231,7 +231,11 @@ def test_exporting_csv_writes_file_to_storage(
|
||||||
)
|
)
|
||||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.contrib.database.export.handler.get_default_storage"
|
||||||
|
) as get_storage_mock:
|
||||||
|
get_storage_mock.return_value = storage
|
||||||
|
|
||||||
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
||||||
# DRF uses some custom internal date time formatting, use the field itself
|
# DRF uses some custom internal date time formatting, use the field itself
|
||||||
# so the test doesn't break if we set a different default timezone format etc
|
# so the test doesn't break if we set a different default timezone format etc
|
||||||
|
@ -351,7 +355,10 @@ def test_exporting_csv_table_writes_file_to_storage(
|
||||||
)
|
)
|
||||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.contrib.database.export.handler.get_default_storage"
|
||||||
|
) as get_storage_mock:
|
||||||
|
get_storage_mock.return_value = storage
|
||||||
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
||||||
# DRF uses some custom internal date time formatting, use the field itself
|
# DRF uses some custom internal date time formatting, use the field itself
|
||||||
# so the test doesn't break if we set a different default timezone format etc
|
# so the test doesn't break if we set a different default timezone format etc
|
||||||
|
|
|
@ -2687,7 +2687,11 @@ def test_upload_file_view(api_client, data_fixture, tmpdir):
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage"
|
||||||
|
) as get_storage_mock:
|
||||||
|
get_storage_mock.return_value = storage
|
||||||
|
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = data_fixture.generate_token(user)
|
token = data_fixture.generate_token(user)
|
||||||
|
@ -2719,7 +2723,11 @@ def test_upload_file_view(api_client, data_fixture, tmpdir):
|
||||||
file_path = tmpdir.join("user_files", user_file.name)
|
file_path = tmpdir.join("user_files", user_file.name)
|
||||||
assert file_path.isfile()
|
assert file_path.isfile()
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage"
|
||||||
|
) as get_storage_mock:
|
||||||
|
get_storage_mock.return_value = storage
|
||||||
|
|
||||||
token = data_fixture.generate_token(user)
|
token = data_fixture.generate_token(user)
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response_2 = api_client.post(
|
response_2 = api_client.post(
|
||||||
|
@ -2742,7 +2750,11 @@ def test_upload_file_view(api_client, data_fixture, tmpdir):
|
||||||
image.save(file, format="PNG")
|
image.save(file, format="PNG")
|
||||||
file.seek(0)
|
file.seek(0)
|
||||||
|
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.core.user_files.handler.get_default_storage"
|
||||||
|
) as get_storage_mock:
|
||||||
|
get_storage_mock.return_value = storage
|
||||||
|
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse(
|
reverse(
|
||||||
"api:database:views:form:upload_file",
|
"api:database:views:form:upload_file",
|
||||||
|
@ -2783,7 +2795,7 @@ def test_upload_file_view_with_no_public_file_field(api_client, data_fixture, tm
|
||||||
data_fixture.create_form_view_field_option(view, field=file_field, enabled=False)
|
data_fixture.create_form_view_field_option(view, field=file_field, enabled=False)
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -2812,7 +2824,7 @@ def test_upload_file_view_with_a_rich_text_field_is_possible(
|
||||||
)
|
)
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -2830,7 +2842,7 @@ def test_upload_file_view_with_a_rich_text_field_is_possible(
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_upload_file_form_view_does_not_exist(api_client, data_fixture, tmpdir):
|
def test_upload_file_form_view_does_not_exist(api_client, data_fixture, tmpdir):
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -2854,7 +2866,7 @@ def test_upload_file_view_form_is_password_protected(api_client, data_fixture, t
|
||||||
data_fixture.create_form_view_field_option(view, field=file_field, enabled=True)
|
data_fixture.create_form_view_field_option(view, field=file_field, enabled=True)
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -2880,7 +2892,7 @@ def test_upload_file_view_form_is_password_protected(api_client, data_fixture, t
|
||||||
assert public_view_token is not None
|
assert public_view_token is not None
|
||||||
|
|
||||||
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
storage = FileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with freeze_time("2020-01-01 12:00"):
|
with freeze_time("2020-01-01 12:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from typing import List
|
from typing import List
|
||||||
from unittest.mock import patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.test.utils import CaptureQueriesContext
|
from django.test.utils import CaptureQueriesContext
|
||||||
|
@ -50,8 +50,11 @@ def _parse_date(date):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_hidden_fields_are_excluded(storage_mock, data_fixture):
|
def test_hidden_fields_are_excluded(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
text_field = data_fixture.create_text_field(table=table, name="text_field", order=1)
|
text_field = data_fixture.create_text_field(table=table, name="text_field", order=1)
|
||||||
|
@ -76,8 +79,10 @@ def test_hidden_fields_are_excluded(storage_mock, data_fixture):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_csv_is_sorted_by_sorts(storage_mock, data_fixture):
|
def test_csv_is_sorted_by_sorts(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
text_field = data_fixture.create_text_field(table=table, name="text_field")
|
text_field = data_fixture.create_text_field(table=table, name="text_field")
|
||||||
|
@ -101,8 +106,10 @@ def test_csv_is_sorted_by_sorts(storage_mock, data_fixture):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_csv_is_filtered_by_filters(storage_mock, data_fixture):
|
def test_csv_is_filtered_by_filters(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
text_field = data_fixture.create_text_field(table=table, name="text_field")
|
text_field = data_fixture.create_text_field(table=table, name="text_field")
|
||||||
|
@ -128,8 +135,12 @@ def test_csv_is_filtered_by_filters(storage_mock, data_fixture):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_exporting_table_ignores_view_filters_sorts_hides(storage_mock, data_fixture):
|
def test_exporting_table_ignores_view_filters_sorts_hides(
|
||||||
|
get_storage_mock, data_fixture
|
||||||
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
text_field = data_fixture.create_text_field(table=table, name="text_field", order=1)
|
text_field = data_fixture.create_text_field(table=table, name="text_field", order=1)
|
||||||
|
@ -168,8 +179,10 @@ def test_exporting_table_ignores_view_filters_sorts_hides(storage_mock, data_fix
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_columns_are_exported_by_order_then_field_id(storage_mock, data_fixture):
|
def test_columns_are_exported_by_order_then_field_id(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
field_a = data_fixture.create_text_field(table=table, name="field_a")
|
field_a = data_fixture.create_text_field(table=table, name="field_a")
|
||||||
|
@ -210,10 +223,12 @@ def test_columns_are_exported_by_order_then_field_id(storage_mock, data_fixture)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_can_export_every_interesting_different_field_to_csv(
|
def test_can_export_every_interesting_different_field_to_csv(
|
||||||
storage_mock, data_fixture
|
get_storage_mock, data_fixture
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
contents = run_export_job_over_interesting_table(
|
contents = run_export_job_over_interesting_table(
|
||||||
data_fixture, storage_mock, {"exporter_type": "csv"}
|
data_fixture, storage_mock, {"exporter_type": "csv"}
|
||||||
)
|
)
|
||||||
|
@ -270,10 +285,12 @@ def run_export_job_over_interesting_table(data_fixture, storage_mock, options):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_can_export_special_characters_in_arabic_encoding_to_csv(
|
def test_can_export_special_characters_in_arabic_encoding_to_csv(
|
||||||
storage_mock, data_fixture
|
get_storage_mock, data_fixture
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
database = data_fixture.create_database_application(user=user)
|
database = data_fixture.create_database_application(user=user)
|
||||||
table = data_fixture.create_database_table(database=database)
|
table = data_fixture.create_database_table(database=database)
|
||||||
|
@ -331,10 +348,12 @@ def test_creating_a_new_export_job_will_cancel_any_already_running_jobs_for_that
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_a_complete_export_job_which_has_expired_will_have_its_file_deleted(
|
def test_a_complete_export_job_which_has_expired_will_have_its_file_deleted(
|
||||||
storage_mock, data_fixture, settings
|
get_storage_mock, data_fixture, settings
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
handler = ExportHandler()
|
handler = ExportHandler()
|
||||||
job_start = datetime.now(tz=timezone.utc)
|
job_start = datetime.now(tz=timezone.utc)
|
||||||
half_file_duration = timedelta(minutes=int(settings.EXPORT_FILE_EXPIRE_MINUTES / 2))
|
half_file_duration = timedelta(minutes=int(settings.EXPORT_FILE_EXPIRE_MINUTES / 2))
|
||||||
|
@ -365,12 +384,14 @@ def test_a_complete_export_job_which_has_expired_will_have_its_file_deleted(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_a_pending_job_which_has_expired_will_be_cleaned_up(
|
def test_a_pending_job_which_has_expired_will_be_cleaned_up(
|
||||||
storage_mock,
|
get_storage_mock,
|
||||||
data_fixture,
|
data_fixture,
|
||||||
settings,
|
settings,
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
other_user = data_fixture.create_user()
|
other_user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
|
@ -404,10 +425,12 @@ def test_a_pending_job_which_has_expired_will_be_cleaned_up(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_a_running_export_job_which_has_expired_will_be_stopped(
|
def test_a_running_export_job_which_has_expired_will_be_stopped(
|
||||||
storage_mock, data_fixture, settings
|
get_storage_mock, data_fixture, settings
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
other_user = data_fixture.create_user()
|
other_user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
|
@ -524,11 +547,13 @@ def test_attempting_to_export_a_view_for_a_type_which_doesnt_support_it_fails(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_an_export_job_which_fails_will_be_marked_as_a_failed_job(
|
def test_an_export_job_which_fails_will_be_marked_as_a_failed_job(
|
||||||
storage_mock,
|
get_storage_mock,
|
||||||
data_fixture,
|
data_fixture,
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
handler = ExportHandler()
|
handler = ExportHandler()
|
||||||
|
@ -585,8 +610,11 @@ def test_an_export_job_which_fails_will_be_marked_as_a_failed_job(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_can_export_csv_without_header(storage_mock, data_fixture):
|
def test_can_export_csv_without_header(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
_, contents = setup_table_and_run_export_decoding_result(
|
_, contents = setup_table_and_run_export_decoding_result(
|
||||||
data_fixture,
|
data_fixture,
|
||||||
storage_mock,
|
storage_mock,
|
||||||
|
@ -602,8 +630,11 @@ def test_can_export_csv_without_header(storage_mock, data_fixture):
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@pytest.mark.once_per_day_in_ci
|
@pytest.mark.once_per_day_in_ci
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_can_export_csv_with_different_charsets(storage_mock, data_fixture):
|
def test_can_export_csv_with_different_charsets(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
for _, charset in SUPPORTED_EXPORT_CHARSETS:
|
for _, charset in SUPPORTED_EXPORT_CHARSETS:
|
||||||
_, contents = setup_table_and_run_export_decoding_result(
|
_, contents = setup_table_and_run_export_decoding_result(
|
||||||
data_fixture,
|
data_fixture,
|
||||||
|
@ -623,8 +654,13 @@ def test_can_export_csv_with_different_charsets(storage_mock, data_fixture):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_can_export_csv_with_different_column_separators(storage_mock, data_fixture):
|
def test_can_export_csv_with_different_column_separators(
|
||||||
|
get_storage_mock, data_fixture
|
||||||
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
for _, col_sep in SUPPORTED_CSV_COLUMN_SEPARATORS:
|
for _, col_sep in SUPPORTED_CSV_COLUMN_SEPARATORS:
|
||||||
_, contents = setup_table_and_run_export_decoding_result(
|
_, contents = setup_table_and_run_export_decoding_result(
|
||||||
data_fixture,
|
data_fixture,
|
||||||
|
@ -646,10 +682,12 @@ def test_can_export_csv_with_different_column_separators(storage_mock, data_fixt
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_adding_more_rows_doesnt_increase_number_of_queries_run(
|
def test_adding_more_rows_doesnt_increase_number_of_queries_run(
|
||||||
storage_mock, data_fixture, django_assert_num_queries
|
get_storage_mock, data_fixture, django_assert_num_queries
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
add_row, add_linked_row, user, table, grid_view = setup_testing_table(data_fixture)
|
add_row, add_linked_row, user, table, grid_view = setup_testing_table(data_fixture)
|
||||||
|
|
||||||
# Ensure we test with linked rows and select options as they are the fields which
|
# Ensure we test with linked rows and select options as they are the fields which
|
||||||
|
@ -718,6 +756,9 @@ def run_export_job_with_mock_storage(
|
||||||
if "export_charset" not in options:
|
if "export_charset" not in options:
|
||||||
options["export_charset"] = "utf-8"
|
options["export_charset"] = "utf-8"
|
||||||
|
|
||||||
|
storage_instance = MagicMock()
|
||||||
|
storage_mock.return_value = storage_instance
|
||||||
|
|
||||||
stub_file = BytesIO()
|
stub_file = BytesIO()
|
||||||
storage_mock.open.return_value = stub_file
|
storage_mock.open.return_value = stub_file
|
||||||
close = stub_file.close
|
close = stub_file.close
|
||||||
|
@ -838,10 +879,12 @@ def setup_table_and_run_export_decoding_result(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_a_column_without_a_grid_view_option_has_an_option_made_and_is_exported(
|
def test_a_column_without_a_grid_view_option_has_an_option_made_and_is_exported(
|
||||||
storage_mock, data_fixture
|
get_storage_mock, data_fixture
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
field_with_an_option = data_fixture.create_text_field(table=table, name="field_a")
|
field_with_an_option = data_fixture.create_text_field(table=table, name="field_a")
|
||||||
|
@ -873,8 +916,10 @@ def test_a_column_without_a_grid_view_option_has_an_option_made_and_is_exported(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_action_done_is_emitted_when_the_export_finish(storage_mock, data_fixture):
|
def test_action_done_is_emitted_when_the_export_finish(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
|
|
||||||
|
@ -898,8 +943,10 @@ def test_action_done_is_emitted_when_the_export_finish(storage_mock, data_fixtur
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_csv_is_escaped(storage_mock, data_fixture):
|
def test_csv_is_escaped(get_storage_mock, data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = data_fixture.create_user()
|
user = data_fixture.create_user()
|
||||||
table = data_fixture.create_database_table(user=user)
|
table = data_fixture.create_database_table(user=user)
|
||||||
text_field = data_fixture.create_text_field(table=table, name="text_field")
|
text_field = data_fixture.create_text_field(table=table, name="text_field")
|
||||||
|
|
|
@ -175,7 +175,7 @@ def test_run_task_with_exception_mapping(mock_get_by_model, data_fixture):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.core.storage.get_default_storage")
|
||||||
def test_cleanup_file_import_job(storage_mock, data_fixture, settings):
|
def test_cleanup_file_import_job(storage_mock, data_fixture, settings):
|
||||||
now = datetime.now(tz=timezone.utc)
|
now = datetime.now(tz=timezone.utc)
|
||||||
time_before_expiration = now - timedelta(
|
time_before_expiration = now - timedelta(
|
||||||
|
|
|
@ -2,7 +2,6 @@ from collections import OrderedDict
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.core.paginator import Paginator
|
from django.core.paginator import Paginator
|
||||||
from django.utils.functional import lazy
|
from django.utils.functional import lazy
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
@ -25,6 +24,7 @@ from baserow.contrib.database.export.handler import (
|
||||||
)
|
)
|
||||||
from baserow.core.action.registries import action_type_registry
|
from baserow.core.action.registries import action_type_registry
|
||||||
from baserow.core.jobs.registries import JobType
|
from baserow.core.jobs.registries import JobType
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.utils import ChildProgressBuilder
|
from baserow.core.utils import ChildProgressBuilder
|
||||||
from baserow_enterprise.features import AUDIT_LOG
|
from baserow_enterprise.features import AUDIT_LOG
|
||||||
|
|
||||||
|
@ -189,9 +189,11 @@ class AuditLogExportJobType(JobType):
|
||||||
if not job.exported_file_name:
|
if not job.exported_file_name:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
storage = get_default_storage()
|
||||||
storage_location = ExportHandler.export_file_path(job.exported_file_name)
|
storage_location = ExportHandler.export_file_path(job.exported_file_name)
|
||||||
|
print("before delete ===", storage)
|
||||||
try:
|
try:
|
||||||
default_storage.delete(storage_location)
|
storage.delete(storage_location)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Could not delete file %s for 'audit_log_export' job %s",
|
"Could not delete file %s for 'audit_log_export' job %s",
|
||||||
|
|
|
@ -3,11 +3,11 @@ from dataclasses import dataclass
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.core.signing import BadSignature, SignatureExpired
|
from django.core.signing import BadSignature, SignatureExpired
|
||||||
|
|
||||||
from baserow.core.handler import CoreHandler
|
from baserow.core.handler import CoreHandler
|
||||||
from baserow.core.operations import ReadWorkspaceOperationType
|
from baserow.core.operations import ReadWorkspaceOperationType
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
||||||
from baserow_enterprise.secure_file_serve.exceptions import SecureFileServeException
|
from baserow_enterprise.secure_file_serve.exceptions import SecureFileServeException
|
||||||
from baserow_enterprise.secure_file_serve.storage import (
|
from baserow_enterprise.secure_file_serve.storage import (
|
||||||
|
@ -24,7 +24,8 @@ class SecureFile:
|
||||||
path: str
|
path: str
|
||||||
|
|
||||||
def open(self, mode="rb"):
|
def open(self, mode="rb"):
|
||||||
return default_storage.open(self.path, mode)
|
storage = get_default_storage()
|
||||||
|
return storage.open(self.path, mode)
|
||||||
|
|
||||||
|
|
||||||
class SecureFileServeHandler:
|
class SecureFileServeHandler:
|
||||||
|
@ -85,9 +86,10 @@ class SecureFileServeHandler:
|
||||||
raise SecureFileServeException("Can't access file")
|
raise SecureFileServeException("Can't access file")
|
||||||
|
|
||||||
def get_file_path(self, data: SecureFileServeSignerPayload) -> str:
|
def get_file_path(self, data: SecureFileServeSignerPayload) -> str:
|
||||||
|
storage = get_default_storage()
|
||||||
file_path = data.name
|
file_path = data.name
|
||||||
|
|
||||||
if not default_storage.exists(file_path):
|
if not storage.exists(file_path):
|
||||||
raise SecureFileServeException("File does not exist")
|
raise SecureFileServeException("File does not exist")
|
||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
|
|
|
@ -544,7 +544,6 @@ def test_audit_log_can_export_to_csv_all_entries(
|
||||||
enterprise_data_fixture,
|
enterprise_data_fixture,
|
||||||
synced_roles,
|
synced_roles,
|
||||||
django_capture_on_commit_callbacks,
|
django_capture_on_commit_callbacks,
|
||||||
stubbed_storage,
|
|
||||||
):
|
):
|
||||||
admin_user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
admin_user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
||||||
|
|
||||||
|
|
|
@ -203,7 +203,6 @@ def test_workspace_audit_log_user_filter_returns_only_workspace_users(
|
||||||
def test_staff_member_can_access_audit_log_for_their_own_workspace(
|
def test_staff_member_can_access_audit_log_for_their_own_workspace(
|
||||||
api_client,
|
api_client,
|
||||||
enterprise_data_fixture,
|
enterprise_data_fixture,
|
||||||
stubbed_storage,
|
|
||||||
alternative_per_workspace_license_service,
|
alternative_per_workspace_license_service,
|
||||||
url_name,
|
url_name,
|
||||||
):
|
):
|
||||||
|
@ -252,7 +251,6 @@ def test_staff_member_can_access_audit_log_for_any_workspace(
|
||||||
def test_staff_member_cant_access_audit_log_for_own_workspace_without_license(
|
def test_staff_member_cant_access_audit_log_for_own_workspace_without_license(
|
||||||
api_client,
|
api_client,
|
||||||
enterprise_data_fixture,
|
enterprise_data_fixture,
|
||||||
stubbed_storage,
|
|
||||||
alternative_per_workspace_license_service,
|
alternative_per_workspace_license_service,
|
||||||
url_name,
|
url_name,
|
||||||
):
|
):
|
||||||
|
@ -276,7 +274,6 @@ def test_workspace_audit_log_can_export_to_csv_filtered_entries(
|
||||||
enterprise_data_fixture,
|
enterprise_data_fixture,
|
||||||
synced_roles,
|
synced_roles,
|
||||||
django_capture_on_commit_callbacks,
|
django_capture_on_commit_callbacks,
|
||||||
stubbed_storage,
|
|
||||||
):
|
):
|
||||||
enterprise_data_fixture.enable_enterprise()
|
enterprise_data_fixture.enable_enterprise()
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ from io import BytesIO
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
from django.shortcuts import reverse
|
from django.shortcuts import reverse
|
||||||
from django.test.utils import override_settings
|
from django.test.utils import override_settings
|
||||||
|
@ -19,12 +18,15 @@ from rest_framework_simplejwt.tokens import RefreshToken
|
||||||
from rest_framework_simplejwt.utils import datetime_from_epoch
|
from rest_framework_simplejwt.utils import datetime_from_epoch
|
||||||
|
|
||||||
from baserow.core.context import clear_current_workspace_id, set_current_workspace_id
|
from baserow.core.context import clear_current_workspace_id, set_current_workspace_id
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.user.handler import UserHandler
|
from baserow.core.user.handler import UserHandler
|
||||||
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
||||||
|
|
||||||
|
|
||||||
def dummy_storage(tmpdir):
|
def dummy_storage(tmpdir):
|
||||||
class FakeFileSystemStorage(default_storage.__class__):
|
storage = get_default_storage()
|
||||||
|
|
||||||
|
class FakeFileSystemStorage(storage.__class__):
|
||||||
def exists(self, name: str) -> bool:
|
def exists(self, name: str) -> bool:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -47,9 +49,7 @@ def test_files_are_served_by_base_file_storage_by_default(
|
||||||
):
|
):
|
||||||
_, token = enterprise_data_fixture.create_user_and_token()
|
_, token = enterprise_data_fixture.create_user_and_token()
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=dummy_storage(tmpdir)):
|
||||||
"baserow.core.user_files.handler.default_storage", new=dummy_storage(tmpdir)
|
|
||||||
):
|
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -78,9 +78,7 @@ def test_files_can_be_served_by_the_backend(
|
||||||
):
|
):
|
||||||
_, token = enterprise_data_fixture.create_user_and_token()
|
_, token = enterprise_data_fixture.create_user_and_token()
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=dummy_storage(tmpdir)):
|
||||||
"baserow.core.user_files.handler.default_storage", new=dummy_storage(tmpdir)
|
|
||||||
):
|
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -109,9 +107,7 @@ def test_secure_file_serve_requires_license_to_download_files(
|
||||||
):
|
):
|
||||||
_, token = enterprise_data_fixture.create_user_and_token()
|
_, token = enterprise_data_fixture.create_user_and_token()
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=dummy_storage(tmpdir)):
|
||||||
"baserow.core.user_files.handler.default_storage", new=dummy_storage(tmpdir)
|
|
||||||
):
|
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -146,7 +142,7 @@ def test_files_can_be_downloaded_by_the_backend_with_valid_license(
|
||||||
_, token = enterprise_data_fixture.create_user_and_token()
|
_, token = enterprise_data_fixture.create_user_and_token()
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -158,9 +154,7 @@ def test_files_can_be_downloaded_by_the_backend_with_valid_license(
|
||||||
assert response.status_code == HTTP_200_OK, response.json()
|
assert response.status_code == HTTP_200_OK, response.json()
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
)
|
)
|
||||||
|
@ -184,7 +178,7 @@ def test_files_urls_must_be_valid(
|
||||||
_, token = enterprise_data_fixture.create_user_and_token()
|
_, token = enterprise_data_fixture.create_user_and_token()
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -198,9 +192,7 @@ def test_files_urls_must_be_valid(
|
||||||
|
|
||||||
# Even with a dummy storage returning always the same file, if the signed data is
|
# Even with a dummy storage returning always the same file, if the signed data is
|
||||||
# invalid the file cannot be downloaded
|
# invalid the file cannot be downloaded
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
reverse("api:enterprise:files:download", kwargs={"signed_data": ""}),
|
reverse("api:enterprise:files:download", kwargs={"signed_data": ""}),
|
||||||
)
|
)
|
||||||
|
@ -208,18 +200,14 @@ def test_files_urls_must_be_valid(
|
||||||
|
|
||||||
# Even with a dummy storage returning always the same file, if the signed data is
|
# Even with a dummy storage returning always the same file, if the signed data is
|
||||||
# invalid the file cannot be downloaded
|
# invalid the file cannot be downloaded
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
reverse("api:enterprise:files:download", kwargs={"signed_data": "invalid"}),
|
reverse("api:enterprise:files:download", kwargs={"signed_data": "invalid"}),
|
||||||
)
|
)
|
||||||
assert response.status_code == HTTP_403_FORBIDDEN
|
assert response.status_code == HTTP_403_FORBIDDEN
|
||||||
|
|
||||||
# Remove a couple of characters from the signed data
|
# Remove a couple of characters from the signed data
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", "")[:-2],
|
backend_file_url.replace("http://localhost:8000", "")[:-2],
|
||||||
)
|
)
|
||||||
|
@ -244,7 +232,7 @@ def test_files_urls_can_expire(
|
||||||
user = enterprise_data_fixture.create_user()
|
user = enterprise_data_fixture.create_user()
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with freeze_time("2024-01-01 12:00:00"):
|
with freeze_time("2024-01-01 12:00:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
|
@ -260,9 +248,7 @@ def test_files_urls_can_expire(
|
||||||
|
|
||||||
# before expiration the url can be accessed
|
# before expiration the url can be accessed
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
freeze_time("2024-01-01 12:00:59"),
|
freeze_time("2024-01-01 12:00:59"),
|
||||||
):
|
):
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
|
@ -274,9 +260,7 @@ def test_files_urls_can_expire(
|
||||||
|
|
||||||
# After expiration the url cannot be accessed anymore
|
# After expiration the url cannot be accessed anymore
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
freeze_time("2024-01-01 12:01:00"),
|
freeze_time("2024-01-01 12:01:00"),
|
||||||
):
|
):
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
|
@ -313,7 +297,7 @@ def test_only_authenticated_users_can_download_files(
|
||||||
cookie = response.json()["user_session"]
|
cookie = response.json()["user_session"]
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -327,9 +311,7 @@ def test_only_authenticated_users_can_download_files(
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
# without cookie the url cannot be accessed
|
# without cookie the url cannot be accessed
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
|
@ -338,9 +320,7 @@ def test_only_authenticated_users_can_download_files(
|
||||||
assert response.status_code == HTTP_403_FORBIDDEN
|
assert response.status_code == HTTP_403_FORBIDDEN
|
||||||
|
|
||||||
# with cookie the url can be accessed
|
# with cookie the url can be accessed
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
|
@ -377,7 +357,7 @@ def test_sign_out_prevents_file_download(
|
||||||
refresh_token = response.json()["refresh_token"]
|
refresh_token = response.json()["refresh_token"]
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -389,9 +369,7 @@ def test_sign_out_prevents_file_download(
|
||||||
assert response.status_code == HTTP_200_OK, response.json()
|
assert response.status_code == HTTP_200_OK, response.json()
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
|
@ -404,9 +382,7 @@ def test_sign_out_prevents_file_download(
|
||||||
expires_at = datetime_from_epoch(RefreshToken(refresh_token)["exp"])
|
expires_at = datetime_from_epoch(RefreshToken(refresh_token)["exp"])
|
||||||
UserHandler().blacklist_refresh_token(refresh_token, expires_at)
|
UserHandler().blacklist_refresh_token(refresh_token, expires_at)
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
|
@ -442,7 +418,7 @@ def test_deactivate_user_prevents_file_download(
|
||||||
cookie = response.json()["user_session"]
|
cookie = response.json()["user_session"]
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -454,9 +430,7 @@ def test_deactivate_user_prevents_file_download(
|
||||||
assert response.status_code == HTTP_200_OK, response.json()
|
assert response.status_code == HTTP_200_OK, response.json()
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
|
@ -468,9 +442,7 @@ def test_deactivate_user_prevents_file_download(
|
||||||
user.is_active = False
|
user.is_active = False
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
|
@ -507,7 +479,7 @@ def test_files_urls_can_expire_also_for_authenticated_users(
|
||||||
cookie = response.json()["user_session"]
|
cookie = response.json()["user_session"]
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with freeze_time("2024-01-01 12:00:00"):
|
with freeze_time("2024-01-01 12:00:00"):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
|
@ -523,9 +495,7 @@ def test_files_urls_can_expire_also_for_authenticated_users(
|
||||||
|
|
||||||
# without cookie the url cannot be accessed
|
# without cookie the url cannot be accessed
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
freeze_time("2024-01-01 12:00:59"),
|
freeze_time("2024-01-01 12:00:59"),
|
||||||
):
|
):
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
|
@ -537,9 +507,7 @@ def test_files_urls_can_expire_also_for_authenticated_users(
|
||||||
|
|
||||||
# with cookie the url can be accessed
|
# with cookie the url can be accessed
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
freeze_time("2024-01-01 12:00:59"),
|
freeze_time("2024-01-01 12:00:59"),
|
||||||
):
|
):
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
|
@ -552,9 +520,7 @@ def test_files_urls_can_expire_also_for_authenticated_users(
|
||||||
|
|
||||||
# after expiration the url cannot be accessed anymore, even with cookie
|
# after expiration the url cannot be accessed anymore, even with cookie
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
freeze_time("2024-01-01 12:01:00"),
|
freeze_time("2024-01-01 12:01:00"),
|
||||||
):
|
):
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
|
@ -586,7 +552,7 @@ def test_user_must_have_access_to_workspace_to_download_file(
|
||||||
|
|
||||||
set_current_workspace_id(workspace.id)
|
set_current_workspace_id(workspace.id)
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -600,9 +566,7 @@ def test_user_must_have_access_to_workspace_to_download_file(
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
patch(
|
patch(
|
||||||
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
||||||
) as mock_authenticate,
|
) as mock_authenticate,
|
||||||
|
@ -635,7 +599,7 @@ def test_user_with_wrong_workspace_cannot_download_file(
|
||||||
|
|
||||||
set_current_workspace_id(workspace.id)
|
set_current_workspace_id(workspace.id)
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -649,9 +613,7 @@ def test_user_with_wrong_workspace_cannot_download_file(
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
patch(
|
patch(
|
||||||
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
||||||
) as mock_authenticate,
|
) as mock_authenticate,
|
||||||
|
@ -681,7 +643,7 @@ def test_staff_user_can_download_file_without_workspace(
|
||||||
user, token = enterprise_data_fixture.create_user_and_token(is_staff=True)
|
user, token = enterprise_data_fixture.create_user_and_token(is_staff=True)
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -694,9 +656,7 @@ def test_staff_user_can_download_file_without_workspace(
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
patch(
|
patch(
|
||||||
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
||||||
) as mock_authenticate,
|
) as mock_authenticate,
|
||||||
|
@ -730,7 +690,7 @@ def test_staff_user_cannot_download_file_outside_own_workspace(
|
||||||
set_current_workspace_id(workspace.id)
|
set_current_workspace_id(workspace.id)
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -744,9 +704,7 @@ def test_staff_user_cannot_download_file_outside_own_workspace(
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
patch(
|
patch(
|
||||||
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate"
|
||||||
) as mock_authenticate,
|
) as mock_authenticate,
|
||||||
|
@ -781,7 +739,7 @@ def test_exporting_csv_writes_file_to_storage_and_its_served_by_the_backend(
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
|
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
with django_capture_on_commit_callbacks(execute=True):
|
with django_capture_on_commit_callbacks(execute=True):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -811,9 +769,7 @@ def test_exporting_csv_writes_file_to_storage_and_its_served_by_the_backend(
|
||||||
assert json["url"].startswith("http://localhost:8000/api/files/")
|
assert json["url"].startswith("http://localhost:8000/api/files/")
|
||||||
|
|
||||||
# download it
|
# download it
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(json["url"].replace("http://localhost:8000", ""))
|
response = api_client.get(json["url"].replace("http://localhost:8000", ""))
|
||||||
|
|
||||||
assert response.status_code == HTTP_200_OK
|
assert response.status_code == HTTP_200_OK
|
||||||
|
@ -848,7 +804,7 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend(
|
||||||
}
|
}
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with django_capture_on_commit_callbacks(execute=True):
|
with django_capture_on_commit_callbacks(execute=True):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:enterprise:audit_log:async_export"),
|
reverse("api:enterprise:audit_log:async_export"),
|
||||||
|
@ -879,9 +835,7 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend(
|
||||||
assert job["url"].startswith("http://localhost:8000/api/files/")
|
assert job["url"].startswith("http://localhost:8000/api/files/")
|
||||||
|
|
||||||
# download it
|
# download it
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(job["url"].replace("http://localhost:8000", ""))
|
response = api_client.get(job["url"].replace("http://localhost:8000", ""))
|
||||||
|
|
||||||
assert response.status_code == HTTP_200_OK
|
assert response.status_code == HTTP_200_OK
|
||||||
|
@ -903,7 +857,7 @@ def test_files_can_be_downloaded_with_dl_query_param_as_filename(
|
||||||
_, token = enterprise_data_fixture.create_user_and_token()
|
_, token = enterprise_data_fixture.create_user_and_token()
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:user_files:upload_file"),
|
reverse("api:user_files:upload_file"),
|
||||||
|
@ -916,27 +870,21 @@ def test_files_can_be_downloaded_with_dl_query_param_as_filename(
|
||||||
backend_file_url = response.json()["url"]
|
backend_file_url = response.json()["url"]
|
||||||
file_name = response.json()["name"]
|
file_name = response.json()["name"]
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", ""),
|
backend_file_url.replace("http://localhost:8000", ""),
|
||||||
)
|
)
|
||||||
assert response.status_code == HTTP_200_OK
|
assert response.status_code == HTTP_200_OK
|
||||||
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
|
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", "") + "?dl=",
|
backend_file_url.replace("http://localhost:8000", "") + "?dl=",
|
||||||
)
|
)
|
||||||
assert response.status_code == HTTP_200_OK
|
assert response.status_code == HTTP_200_OK
|
||||||
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
|
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", "") + "?dl=download.txt",
|
backend_file_url.replace("http://localhost:8000", "") + "?dl=download.txt",
|
||||||
)
|
)
|
||||||
|
@ -945,9 +893,7 @@ def test_files_can_be_downloaded_with_dl_query_param_as_filename(
|
||||||
response.headers["Content-Disposition"] == 'attachment; filename="download.txt"'
|
response.headers["Content-Disposition"] == 'attachment; filename="download.txt"'
|
||||||
)
|
)
|
||||||
|
|
||||||
with patch(
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
):
|
|
||||||
response = api_client.get(
|
response = api_client.get(
|
||||||
backend_file_url.replace("http://localhost:8000", "") + "?dl=1",
|
backend_file_url.replace("http://localhost:8000", "") + "?dl=1",
|
||||||
)
|
)
|
||||||
|
@ -991,7 +937,7 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend_with_workspace
|
||||||
}
|
}
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
with django_capture_on_commit_callbacks(execute=True):
|
with django_capture_on_commit_callbacks(execute=True):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
reverse("api:enterprise:audit_log:async_export"),
|
reverse("api:enterprise:audit_log:async_export"),
|
||||||
|
@ -1022,9 +968,7 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend_with_workspace
|
||||||
|
|
||||||
# download it
|
# download it
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
patch(
|
patch(
|
||||||
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate",
|
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate",
|
||||||
side_effect=[(wp_admin_user, None), (other_wp_admin_user, None)],
|
side_effect=[(wp_admin_user, None), (other_wp_admin_user, None)],
|
||||||
|
@ -1062,7 +1006,7 @@ def test_exporting_csv_writes_file_to_storage_and_its_served_by_the_backend_with
|
||||||
|
|
||||||
storage = dummy_storage(tmpdir)
|
storage = dummy_storage(tmpdir)
|
||||||
|
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch("baserow.core.storage.get_default_storage", new=storage):
|
||||||
token = enterprise_data_fixture.generate_token(user)
|
token = enterprise_data_fixture.generate_token(user)
|
||||||
with django_capture_on_commit_callbacks(execute=True):
|
with django_capture_on_commit_callbacks(execute=True):
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
|
@ -1093,9 +1037,7 @@ def test_exporting_csv_writes_file_to_storage_and_its_served_by_the_backend_with
|
||||||
|
|
||||||
# download it
|
# download it
|
||||||
with (
|
with (
|
||||||
patch(
|
patch("baserow.core.storage.get_default_storage", new=storage),
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
|
||||||
),
|
|
||||||
patch(
|
patch(
|
||||||
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate",
|
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate",
|
||||||
side_effect=[(user, None), (other_user, None)],
|
side_effect=[(user, None), (other_user, None)],
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from unittest.mock import patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from django.test.utils import override_settings
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
|
@ -16,10 +16,12 @@ from baserow_enterprise.audit_log.job_types import AuditLogExportJobType
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_audit_log_export_csv_correctly(
|
def test_audit_log_export_csv_correctly(
|
||||||
storage_mock, enterprise_data_fixture, synced_roles
|
get_storage_mock, enterprise_data_fixture, synced_roles
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
||||||
|
|
||||||
with freeze_time("2023-01-01 12:00:00"):
|
with freeze_time("2023-01-01 12:00:00"):
|
||||||
|
@ -94,12 +96,14 @@ def test_audit_log_export_csv_correctly(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@pytest.mark.skip("Need to re-build the translations first.")
|
@pytest.mark.skip("Need to re-build the translations first.")
|
||||||
def test_audit_log_export_csv_in_the_user_language(
|
def test_audit_log_export_csv_in_the_user_language(
|
||||||
storage_mock, enterprise_data_fixture, synced_roles
|
get_storage_mock, enterprise_data_fixture, synced_roles
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token(
|
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token(
|
||||||
language="it"
|
language="it"
|
||||||
)
|
)
|
||||||
|
@ -136,11 +140,13 @@ def test_audit_log_export_csv_in_the_user_language(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
def test_deleting_audit_log_export_job_also_delete_exported_file(
|
def test_deleting_audit_log_export_job_also_delete_exported_file(
|
||||||
storage_mock, enterprise_data_fixture, synced_roles
|
get_storage_mock, enterprise_data_fixture, synced_roles
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
||||||
csv_settings = {
|
csv_settings = {
|
||||||
"csv_column_separator": ",",
|
"csv_column_separator": ",",
|
||||||
|
@ -163,19 +169,26 @@ def test_deleting_audit_log_export_job_also_delete_exported_file(
|
||||||
# ensure the clean_job method will delete the file
|
# ensure the clean_job method will delete the file
|
||||||
assert csv_export_job.exported_file_name is not None
|
assert csv_export_job.exported_file_name is not None
|
||||||
|
|
||||||
with patch("django.core.files.storage.default_storage.delete") as remove_mock:
|
with patch(
|
||||||
|
"baserow_enterprise.audit_log.job_types.get_default_storage"
|
||||||
|
) as get_remove_mock:
|
||||||
|
remove_mock = storage_mock
|
||||||
|
get_remove_mock.return_value = remove_mock
|
||||||
AuditLogExportJobType().before_delete(csv_export_job)
|
AuditLogExportJobType().before_delete(csv_export_job)
|
||||||
remove_mock.assert_called_once_with(
|
remove_mock.delete.assert_called_once_with(
|
||||||
ExportHandler.export_file_path(csv_export_job.exported_file_name)
|
ExportHandler.export_file_path(csv_export_job.exported_file_name)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
def test_audit_log_export_filters_work_correctly(
|
def test_audit_log_export_filters_work_correctly(
|
||||||
storage_mock, enterprise_data_fixture, synced_roles
|
get_storage_mock, enterprise_data_fixture, synced_roles
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
||||||
|
|
||||||
with freeze_time("2023-01-01 12:00:00"):
|
with freeze_time("2023-01-01 12:00:00"):
|
||||||
|
@ -224,10 +237,12 @@ def test_audit_log_export_filters_work_correctly(
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_audit_log_export_workspace_csv_correctly(
|
def test_audit_log_export_workspace_csv_correctly(
|
||||||
storage_mock, enterprise_data_fixture, synced_roles
|
get_storage_mock, enterprise_data_fixture, synced_roles
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
user, _ = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
||||||
workspace = enterprise_data_fixture.create_workspace(user=user)
|
workspace = enterprise_data_fixture.create_workspace(user=user)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.core.signing import SignatureExpired
|
from django.core.signing import SignatureExpired
|
||||||
|
@ -63,11 +64,14 @@ def test_secure_file_handler_get_file_path_exists():
|
||||||
handler = SecureFileServeHandler()
|
handler = SecureFileServeHandler()
|
||||||
|
|
||||||
data = SecureFileServeSignerPayload(name="path/to/file.txt", workspace_id=None)
|
data = SecureFileServeSignerPayload(name="path/to/file.txt", workspace_id=None)
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
file_path = handler.get_file_path(data=data)
|
file_path = handler.get_file_path(data=data)
|
||||||
assert file_path == data.name
|
assert file_path == data.name
|
||||||
|
|
||||||
|
@ -78,7 +82,7 @@ def test_secure_file_handler_get_file_path_does_not_exist():
|
||||||
data = SecureFileServeSignerPayload(name="path/to/file.txt", workspace_id=None)
|
data = SecureFileServeSignerPayload(name="path/to/file.txt", workspace_id=None)
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow.core.storage.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as mocked_default_storage:
|
||||||
mocked_default_storage.exists.return_value = False
|
mocked_default_storage.exists.return_value = False
|
||||||
with pytest.raises(SecureFileServeException) as error:
|
with pytest.raises(SecureFileServeException) as error:
|
||||||
|
@ -116,7 +120,7 @@ def test_secure_file_handler_extract_file_info_or_raise_non_existing_file():
|
||||||
handler = SecureFileServeHandler()
|
handler = SecureFileServeHandler()
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow.core.storage.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as mocked_default_storage:
|
||||||
mocked_default_storage.exists.return_value = False
|
mocked_default_storage.exists.return_value = False
|
||||||
|
|
||||||
|
@ -137,9 +141,12 @@ def test_secure_file_handler_extract_file_info_or_raise_valid_data_disabled_perm
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
secure_file = handler.extract_file_info_or_raise(
|
secure_file = handler.extract_file_info_or_raise(
|
||||||
user=AnonymousUser(), signed_data=signed_data
|
user=AnonymousUser(), signed_data=signed_data
|
||||||
|
@ -161,9 +168,12 @@ def test_secure_file_handler_extract_file_info_or_raise_valid_data_signed_in_wit
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
with pytest.raises(SecureFileServeException) as error:
|
with pytest.raises(SecureFileServeException) as error:
|
||||||
handler.extract_file_info_or_raise(
|
handler.extract_file_info_or_raise(
|
||||||
|
@ -188,9 +198,12 @@ def test_secure_file_handler_extract_file_info_or_raise_valid_data_signed_in_wit
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
secure_file = handler.extract_file_info_or_raise(
|
secure_file = handler.extract_file_info_or_raise(
|
||||||
user=user, signed_data=signed_data
|
user=user, signed_data=signed_data
|
||||||
|
@ -212,9 +225,12 @@ def test_secure_file_handler_extract_file_info_or_raise_valid_data_workspace_wit
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
with pytest.raises(SecureFileServeException) as error:
|
with pytest.raises(SecureFileServeException) as error:
|
||||||
handler.extract_file_info_or_raise(
|
handler.extract_file_info_or_raise(
|
||||||
|
@ -243,9 +259,12 @@ def test_secure_file_handler_extract_file_info_or_raise_valid_data_workspace_wro
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
with pytest.raises(SecureFileServeException) as error:
|
with pytest.raises(SecureFileServeException) as error:
|
||||||
handler.extract_file_info_or_raise(user=user_1, signed_data=signed_data)
|
handler.extract_file_info_or_raise(user=user_1, signed_data=signed_data)
|
||||||
|
@ -270,9 +289,12 @@ def test_secure_file_handler_extract_file_info_or_raise_valid_data_workspace_wit
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
secure_file = handler.extract_file_info_or_raise(
|
secure_file = handler.extract_file_info_or_raise(
|
||||||
user=user, signed_data=signed_data
|
user=user, signed_data=signed_data
|
||||||
|
@ -298,9 +320,12 @@ def test_secure_file_handler_extract_file_info_or_raise_staff_user_no_workspace(
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
secure_file = handler.extract_file_info_or_raise(
|
secure_file = handler.extract_file_info_or_raise(
|
||||||
user=user, signed_data=signed_data
|
user=user, signed_data=signed_data
|
||||||
|
@ -328,9 +353,12 @@ def test_secure_file_handler_extract_file_info_or_raise_staff_user_within_own_wo
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
secure_file = handler.extract_file_info_or_raise(
|
secure_file = handler.extract_file_info_or_raise(
|
||||||
user=user, signed_data=signed_data
|
user=user, signed_data=signed_data
|
||||||
|
@ -359,9 +387,12 @@ def test_secure_file_handler_extract_file_info_or_raise_staff_user_outside_own_w
|
||||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
"baserow_enterprise.secure_file_serve.handler.get_default_storage"
|
||||||
) as mocked_default_storage:
|
) as get_storage_mock:
|
||||||
mocked_default_storage.exists.return_value = True
|
storage_mock = MagicMock()
|
||||||
|
storage_mock.exists.return_value = True
|
||||||
|
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
with pytest.raises(SecureFileServeException) as error:
|
with pytest.raises(SecureFileServeException) as error:
|
||||||
handler.extract_file_info_or_raise(user=user_1, signed_data=signed_data)
|
handler.extract_file_info_or_raise(user=user_1, signed_data=signed_data)
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
|
|
||||||
from baserow.core.generative_ai.types import FileId
|
from baserow.core.generative_ai.types import FileId
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.user_files.handler import UserFileHandler
|
from baserow.core.user_files.handler import UserFileHandler
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,6 +19,8 @@ class AIFileManager:
|
||||||
:param workspace: Optional workspace of the file field.
|
:param workspace: Optional workspace of the file field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
all_cell_files = getattr(row, f"field_{ai_field.ai_file_field.id}")
|
all_cell_files = getattr(row, f"field_{ai_field.ai_file_field.id}")
|
||||||
if not isinstance(all_cell_files, list):
|
if not isinstance(all_cell_files, list):
|
||||||
# just a single file
|
# just a single file
|
||||||
|
@ -34,7 +35,7 @@ class AIFileManager:
|
||||||
for file in compatible_files:
|
for file in compatible_files:
|
||||||
file_path = UserFileHandler().user_file_path(file["name"])
|
file_path = UserFileHandler().user_file_path(file["name"])
|
||||||
try:
|
try:
|
||||||
file_size = default_storage.size(file_path)
|
file_size = storage.size(file_path)
|
||||||
if file_size > max_file_size:
|
if file_size > max_file_size:
|
||||||
# skip files too large
|
# skip files too large
|
||||||
continue
|
continue
|
||||||
|
@ -43,7 +44,7 @@ class AIFileManager:
|
||||||
return []
|
return []
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
continue
|
continue
|
||||||
with default_storage.open(file_path, mode="rb") as storage_file:
|
with storage.open(file_path, mode="rb") as storage_file:
|
||||||
file_ids.append(
|
file_ids.append(
|
||||||
generative_ai_model_type.upload_file(
|
generative_ai_model_type.upload_file(
|
||||||
storage_file.name, storage_file.read(), workspace=workspace
|
storage_file.name, storage_file.read(), workspace=workspace
|
||||||
|
|
|
@ -74,7 +74,10 @@ def test_exporting_json_writes_file_to_storage(
|
||||||
)
|
)
|
||||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.contrib.database.export.handler.get_default_storage"
|
||||||
|
) as get_storage_mock:
|
||||||
|
get_storage_mock.return_value = storage
|
||||||
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
||||||
expected_created_at = DateTimeField().to_representation(run_time)
|
expected_created_at = DateTimeField().to_representation(run_time)
|
||||||
with freeze_time(run_time):
|
with freeze_time(run_time):
|
||||||
|
@ -228,7 +231,10 @@ def test_exporting_xml_writes_file_to_storage(
|
||||||
)
|
)
|
||||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||||
|
|
||||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
with patch(
|
||||||
|
"baserow.contrib.database.export.handler.get_default_storage"
|
||||||
|
) as get_storage_mock:
|
||||||
|
get_storage_mock.return_value = storage
|
||||||
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
|
||||||
with freeze_time(run_time):
|
with freeze_time(run_time):
|
||||||
token = premium_data_fixture.generate_token(user)
|
token = premium_data_fixture.generate_token(user)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from unittest.mock import patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from django.test.utils import override_settings
|
from django.test.utils import override_settings
|
||||||
from django.utils.dateparse import parse_date, parse_datetime
|
from django.utils.dateparse import parse_date, parse_datetime
|
||||||
|
@ -23,10 +23,13 @@ def _parse_date(date):
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_can_export_every_interesting_different_field_to_json(
|
def test_can_export_every_interesting_different_field_to_json(
|
||||||
storage_mock, premium_data_fixture
|
get_storage_mock, premium_data_fixture
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
|
|
||||||
contents = run_export_over_interesting_test_table(
|
contents = run_export_over_interesting_test_table(
|
||||||
premium_data_fixture,
|
premium_data_fixture,
|
||||||
storage_mock,
|
storage_mock,
|
||||||
|
@ -236,8 +239,12 @@ def test_can_export_every_interesting_different_field_to_json(
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_cannot_export_json_without_premium_license(storage_mock, premium_data_fixture):
|
def test_cannot_export_json_without_premium_license(
|
||||||
|
get_storage_mock, premium_data_fixture
|
||||||
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
with pytest.raises(FeaturesNotAvailableError):
|
with pytest.raises(FeaturesNotAvailableError):
|
||||||
run_export_over_interesting_test_table(
|
run_export_over_interesting_test_table(
|
||||||
premium_data_fixture, storage_mock, {"exporter_type": "json"}
|
premium_data_fixture, storage_mock, {"exporter_type": "json"}
|
||||||
|
@ -246,10 +253,12 @@ def test_cannot_export_json_without_premium_license(storage_mock, premium_data_f
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_cannot_export_json_without_premium_license_for_group(
|
def test_cannot_export_json_without_premium_license_for_group(
|
||||||
storage_mock, premium_data_fixture, alternative_per_workspace_license_service
|
get_storage_mock, premium_data_fixture, alternative_per_workspace_license_service
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
# Setting the group id to `0` will make sure that the user doesn't have
|
# Setting the group id to `0` will make sure that the user doesn't have
|
||||||
# premium access to the group.
|
# premium access to the group.
|
||||||
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
||||||
|
@ -262,8 +271,10 @@ def test_cannot_export_json_without_premium_license_for_group(
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_if_duplicate_field_names_json_export(storage_mock, premium_data_fixture):
|
def test_if_duplicate_field_names_json_export(get_storage_mock, premium_data_fixture):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
||||||
database = premium_data_fixture.create_database_application(user=user)
|
database = premium_data_fixture.create_database_application(user=user)
|
||||||
table = premium_data_fixture.create_database_table(database=database)
|
table = premium_data_fixture.create_database_table(database=database)
|
||||||
|
@ -295,10 +306,12 @@ def test_if_duplicate_field_names_json_export(storage_mock, premium_data_fixture
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_can_export_every_interesting_different_field_to_xml(
|
def test_can_export_every_interesting_different_field_to_xml(
|
||||||
storage_mock, premium_data_fixture
|
get_storage_mock, premium_data_fixture
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
xml = run_export_over_interesting_test_table(
|
xml = run_export_over_interesting_test_table(
|
||||||
premium_data_fixture,
|
premium_data_fixture,
|
||||||
storage_mock,
|
storage_mock,
|
||||||
|
@ -507,10 +520,12 @@ def test_can_export_every_interesting_different_field_to_xml(
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_if_xml_duplicate_name_and_value_are_escaped(
|
def test_if_xml_duplicate_name_and_value_are_escaped(
|
||||||
storage_mock, premium_data_fixture
|
get_storage_mock, premium_data_fixture
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
||||||
database = premium_data_fixture.create_database_application(user=user)
|
database = premium_data_fixture.create_database_application(user=user)
|
||||||
table = premium_data_fixture.create_database_table(database=database)
|
table = premium_data_fixture.create_database_table(database=database)
|
||||||
|
@ -551,8 +566,12 @@ def test_if_xml_duplicate_name_and_value_are_escaped(
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_cannot_export_xml_without_premium_license(storage_mock, premium_data_fixture):
|
def test_cannot_export_xml_without_premium_license(
|
||||||
|
get_storage_mock, premium_data_fixture
|
||||||
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
with pytest.raises(FeaturesNotAvailableError):
|
with pytest.raises(FeaturesNotAvailableError):
|
||||||
run_export_over_interesting_test_table(
|
run_export_over_interesting_test_table(
|
||||||
premium_data_fixture, storage_mock, {"exporter_type": "xml"}
|
premium_data_fixture, storage_mock, {"exporter_type": "xml"}
|
||||||
|
@ -561,10 +580,12 @@ def test_cannot_export_xml_without_premium_license(storage_mock, premium_data_fi
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
@patch("baserow.contrib.database.export.handler.default_storage")
|
@patch("baserow.contrib.database.export.handler.get_default_storage")
|
||||||
def test_cannot_export_xml_without_premium_license_for_group(
|
def test_cannot_export_xml_without_premium_license_for_group(
|
||||||
storage_mock, premium_data_fixture, alternative_per_workspace_license_service
|
get_storage_mock, premium_data_fixture, alternative_per_workspace_license_service
|
||||||
):
|
):
|
||||||
|
storage_mock = MagicMock()
|
||||||
|
get_storage_mock.return_value = storage_mock
|
||||||
# Setting the group id to `0` will make sure that the user doesn't have
|
# Setting the group id to `0` will make sure that the user doesn't have
|
||||||
# premium access to the group.
|
# premium access to the group.
|
||||||
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
user = premium_data_fixture.create_user(has_active_premium_license=True)
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from baserow_premium.fields.tasks import generate_ai_values_for_rows
|
from baserow_premium.fields.tasks import generate_ai_values_for_rows
|
||||||
|
|
||||||
from baserow.contrib.database.rows.handler import RowHandler
|
from baserow.contrib.database.rows.handler import RowHandler
|
||||||
from baserow.core.generative_ai.exceptions import GenerativeAIPromptError
|
from baserow.core.generative_ai.exceptions import GenerativeAIPromptError
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.user_files.handler import UserFileHandler
|
from baserow.core.user_files.handler import UserFileHandler
|
||||||
|
|
||||||
|
|
||||||
|
@ -165,6 +164,8 @@ def test_generate_ai_field_value_view_generative_ai_invalid_prompt(
|
||||||
def test_generate_ai_field_value_view_generative_ai_with_files(
|
def test_generate_ai_field_value_view_generative_ai_with_files(
|
||||||
patched_rows_updated, premium_data_fixture
|
patched_rows_updated, premium_data_fixture
|
||||||
):
|
):
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
premium_data_fixture.register_fake_generate_ai_type()
|
premium_data_fixture.register_fake_generate_ai_type()
|
||||||
user = premium_data_fixture.create_user(
|
user = premium_data_fixture.create_user(
|
||||||
email="test@test.nl", password="password", first_name="Test1"
|
email="test@test.nl", password="password", first_name="Test1"
|
||||||
|
@ -185,7 +186,7 @@ def test_generate_ai_field_value_view_generative_ai_with_files(
|
||||||
)
|
)
|
||||||
table_model = table.get_model()
|
table_model = table.get_model()
|
||||||
user_file_1 = UserFileHandler().upload_user_file(
|
user_file_1 = UserFileHandler().upload_user_file(
|
||||||
user, "aifile.txt", BytesIO(b"Text in file"), storage=default_storage
|
user, "aifile.txt", BytesIO(b"Text in file"), storage=storage
|
||||||
)
|
)
|
||||||
values = {f"field_{file_field.id}": [{"name": user_file_1.name}]}
|
values = {f"field_{file_field.id}": [{"name": user_file_1.name}]}
|
||||||
row = RowHandler().force_create_row(
|
row = RowHandler().force_create_row(
|
||||||
|
|
|
@ -1,18 +1,19 @@
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock
|
||||||
|
|
||||||
from django.core.files.storage import default_storage
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from baserow_premium.generative_ai.managers import AIFileManager
|
from baserow_premium.generative_ai.managers import AIFileManager
|
||||||
|
|
||||||
from baserow.contrib.database.rows.handler import RowHandler
|
from baserow.contrib.database.rows.handler import RowHandler
|
||||||
|
from baserow.core.storage import get_default_storage
|
||||||
from baserow.core.user_files.handler import UserFileHandler
|
from baserow.core.user_files.handler import UserFileHandler
|
||||||
from baserow.test_utils.fixtures.generative_ai import TestGenerativeAIWithFilesModelType
|
from baserow.test_utils.fixtures.generative_ai import TestGenerativeAIWithFilesModelType
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_upload_files_from_file_field(premium_data_fixture):
|
def test_upload_files_from_file_field(premium_data_fixture):
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
user = premium_data_fixture.create_user()
|
user = premium_data_fixture.create_user()
|
||||||
generative_ai_model_type = TestGenerativeAIWithFilesModelType()
|
generative_ai_model_type = TestGenerativeAIWithFilesModelType()
|
||||||
table = premium_data_fixture.create_database_table()
|
table = premium_data_fixture.create_database_table()
|
||||||
|
@ -23,7 +24,7 @@ def test_upload_files_from_file_field(premium_data_fixture):
|
||||||
table=table, order=1, name="AI prompt", ai_file_field=file_field
|
table=table, order=1, name="AI prompt", ai_file_field=file_field
|
||||||
)
|
)
|
||||||
user_file_1 = UserFileHandler().upload_user_file(
|
user_file_1 = UserFileHandler().upload_user_file(
|
||||||
user, "aifile.txt", BytesIO(b"Hello"), storage=default_storage
|
user, "aifile.txt", BytesIO(b"Hello"), storage=storage
|
||||||
)
|
)
|
||||||
table_model = table.get_model()
|
table_model = table.get_model()
|
||||||
|
|
||||||
|
@ -49,6 +50,8 @@ def test_upload_files_from_file_field(premium_data_fixture):
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_upload_files_from_file_field_skip_files_over_max_size(premium_data_fixture):
|
def test_upload_files_from_file_field_skip_files_over_max_size(premium_data_fixture):
|
||||||
|
storage = get_default_storage()
|
||||||
|
|
||||||
user = premium_data_fixture.create_user()
|
user = premium_data_fixture.create_user()
|
||||||
generative_ai_model_type = TestGenerativeAIWithFilesModelType()
|
generative_ai_model_type = TestGenerativeAIWithFilesModelType()
|
||||||
table = premium_data_fixture.create_database_table()
|
table = premium_data_fixture.create_database_table()
|
||||||
|
@ -59,7 +62,7 @@ def test_upload_files_from_file_field_skip_files_over_max_size(premium_data_fixt
|
||||||
table=table, order=1, name="AI prompt", ai_file_field=file_field
|
table=table, order=1, name="AI prompt", ai_file_field=file_field
|
||||||
)
|
)
|
||||||
user_file_1 = UserFileHandler().upload_user_file(
|
user_file_1 = UserFileHandler().upload_user_file(
|
||||||
user, "aifile.txt", BytesIO(b"Hello"), storage=default_storage
|
user, "aifile.txt", BytesIO(b"Hello"), storage=storage
|
||||||
)
|
)
|
||||||
table_model = table.get_model()
|
table_model = table.get_model()
|
||||||
values = {f"field_{file_field.id}": [{"name": user_file_1.name}]}
|
values = {f"field_{file_field.id}": [{"name": user_file_1.name}]}
|
||||||
|
|
Loading…
Add table
Reference in a new issue