1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-15 09:34:13 +00:00

Resolve "Allow exporting workspace applications"

This commit is contained in:
Przemyslaw Kukulski 2024-10-10 16:12:12 +00:00
parent 3e0e5e574b
commit 89fbe62d0d
35 changed files with 1574 additions and 231 deletions
backend
changelog/entries/unreleased/feature
enterprise
backend/tests/baserow_enterprise_tests
web-frontend/modules/baserow_enterprise
premium/backend/tests/baserow_premium_tests
web-frontend/modules/core

View file

@ -0,0 +1,49 @@
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from baserow.core.context import clear_current_workspace_id, set_current_workspace_id
from baserow.core.import_export_handler import ImportExportHandler
from baserow.core.storage import get_default_storage
class CoreExportedFileURLSerializerMixin(serializers.Serializer):
url = serializers.SerializerMethodField()
def get_handler(self):
"""Define handler used for url generation.
That handler needs to to implement method `export_file_path`.
"""
raise NotImplementedError("Subclasses must implement this method.")
def get_instance_attr(self, instance, name):
return getattr(instance, name)
@extend_schema_field(OpenApiTypes.URI)
def get_url(self, instance):
if hasattr(instance, "workspace_id"):
# FIXME: Temporarily setting the current workspace ID for URL generation in
# storage backends, enabling permission checks at download time.
try:
set_current_workspace_id(instance.workspace_id)
return self._get_url(instance)
finally:
clear_current_workspace_id()
else:
return self._get_url(instance)
def _get_url(self, instance):
handler = self.get_handler()
name = self.get_instance_attr(instance, "exported_file_name")
if name:
path = handler.export_file_path(name)
storage = get_default_storage()
return storage.url(path)
else:
return None
class ExportWorkspaceExportedFileURLSerializerMixin(CoreExportedFileURLSerializerMixin):
def get_handler(self):
return ImportExportHandler()

View file

@ -3,6 +3,7 @@ from django.urls import include, path, re_path
from .invitations import urls as invitation_urls
from .users import urls as user_urls
from .views import (
AsyncExportWorkspaceApplicationsView,
CreateInitialWorkspaceView,
WorkspaceGenerativeAISettingsView,
WorkspaceLeaveView,
@ -38,4 +39,9 @@ urlpatterns = [
CreateInitialWorkspaceView.as_view(),
name="create_initial_workspace",
),
re_path(
r"(?P<workspace_id>[0-9]+)/export/async/$",
AsyncExportWorkspaceApplicationsView.as_view(),
name="export_workspace_async",
),
]

View file

@ -1,17 +1,23 @@
from typing import Dict
from django.db import transaction
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
from drf_spectacular.utils import extend_schema
from rest_framework import serializers, status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from baserow.api.applications.errors import ERROR_APPLICATION_DOES_NOT_EXIST
from baserow.api.decorators import map_exceptions, validate_body
from baserow.api.errors import (
ERROR_GROUP_DOES_NOT_EXIST,
ERROR_USER_INVALID_GROUP_PERMISSIONS,
ERROR_USER_NOT_IN_GROUP,
)
from baserow.api.jobs.errors import ERROR_MAX_JOB_COUNT_EXCEEDED
from baserow.api.jobs.serializers import JobSerializer
from baserow.api.schemas import (
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
@ -30,12 +36,18 @@ from baserow.core.actions import (
UpdateWorkspaceActionType,
)
from baserow.core.exceptions import (
ApplicationDoesNotExist,
UserInvalidWorkspacePermissionsError,
UserNotInWorkspace,
WorkspaceDoesNotExist,
WorkspaceUserIsLastAdmin,
)
from baserow.core.feature_flags import FF_EXPORT_WORKSPACE, feature_flag_is_enabled
from baserow.core.handler import CoreHandler
from baserow.core.job_types import ExportApplicationsJobType
from baserow.core.jobs.exceptions import MaxJobCountExceeded
from baserow.core.jobs.handler import JobHandler
from baserow.core.jobs.registries import job_type_registry
from baserow.core.notifications.handler import NotificationHandler
from baserow.core.operations import UpdateWorkspaceOperationType
from baserow.core.trash.exceptions import CannotDeleteAlreadyDeletedItem
@ -48,6 +60,21 @@ from .serializers import (
get_generative_ai_settings_serializer,
)
ExportApplicationsJobRequestSerializer = job_type_registry.get(
ExportApplicationsJobType.type
).get_serializer_class(
base_class=serializers.Serializer,
request_serializer=True,
meta_ref_name="SingleExportApplicationsJobRequestSerializer",
)
ExportApplicationsJobResponseSerializer = job_type_registry.get(
ExportApplicationsJobType.type
).get_serializer_class(
base_class=serializers.Serializer,
meta_ref_name="SingleExportApplicationsJobRequestSerializer",
)
class WorkspacesView(APIView):
permission_classes = (IsAuthenticated,)
@ -442,3 +469,74 @@ class CreateInitialWorkspaceView(APIView):
CreateInitialWorkspaceActionType
).do(request.user)
return Response(WorkspaceUserWorkspaceSerializer(workspace_user).data)
class AsyncExportWorkspaceApplicationsView(APIView):
permission_classes = (IsAuthenticated,)
@extend_schema(
parameters=[
OpenApiParameter(
name="workspace_id",
location=OpenApiParameter.PATH,
type=OpenApiTypes.INT,
description="The id of the workspace that must be exported.",
),
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
],
tags=["Workspace"],
operation_id="export_workspace_applications_async",
description=(
"Export workspace or set of applications application if the authorized user is "
"in the application's workspace. "
"All the related children are also going to be exported. For example "
"in case of a database application all the underlying tables, fields, "
"views and rows are going to be exported."
"Roles are not part of the export."
),
request=None,
responses={
202: ExportApplicationsJobResponseSerializer,
400: get_error_schema(
[
"ERROR_USER_NOT_IN_GROUP",
"ERROR_APPLICATION_NOT_IN_GROUP",
"ERROR_MAX_JOB_COUNT_EXCEEDED",
]
),
404: get_error_schema(
[
"ERROR_GROUP_DOES_NOT_EXIST",
"ERROR_APPLICATION_DOES_NOT_EXIST",
]
),
},
)
@transaction.atomic
@map_exceptions(
{
WorkspaceDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
UserNotInWorkspace: ERROR_USER_NOT_IN_GROUP,
MaxJobCountExceeded: ERROR_MAX_JOB_COUNT_EXCEEDED,
}
)
@validate_body(ExportApplicationsJobRequestSerializer, return_validated=True)
def post(self, request, data: Dict, workspace_id: int) -> Response:
"""
Exports the listed applications of a workspace to a ZIP file containing the
applications' data. If the list of applications is empty, all applications of
the workspace are exported.
"""
feature_flag_is_enabled(FF_EXPORT_WORKSPACE, raise_if_disabled=True)
job = JobHandler().create_and_start_job(
request.user,
ExportApplicationsJobType.type,
workspace_id=workspace_id,
application_ids=data.get("application_ids") or [],
)
serializer = job_type_registry.get_serializer(job, JobSerializer)
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)

View file

@ -4,11 +4,10 @@ from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework import fields, serializers
from baserow.api.export.serializers import CoreExportedFileURLSerializerMixin
from baserow.contrib.database.export.handler import ExportHandler
from baserow.contrib.database.export.models import ExportJob
from baserow.contrib.database.export.registries import table_exporter_registry
from baserow.core.context import clear_current_workspace_id, set_current_workspace_id
from baserow.core.storage import get_default_storage
# This is a map from the front end supported charsets to the internal python supported
# charset value as they do not always match up.
@ -60,38 +59,14 @@ SUPPORTED_CSV_COLUMN_SEPARATORS = [
]
class ExportedFileURLSerializerMixin(serializers.Serializer):
class ExportedFileURLSerializerMixin(CoreExportedFileURLSerializerMixin):
"""
When mixed in to a model serializer for an ExportJob this will add an url field
with the actual usable url of the export job's file (if it has one).
"""
url = serializers.SerializerMethodField()
def get_instance_attr(self, instance, name):
return getattr(instance, name)
@extend_schema_field(OpenApiTypes.URI)
def get_url(self, instance):
if hasattr(instance, "workspace_id"):
# FIXME: Temporarily setting the current workspace ID for URL generation in
# storage backends, enabling permission checks at download time.
try:
set_current_workspace_id(instance.workspace_id)
return self._get_url(instance)
finally:
clear_current_workspace_id()
else:
return self._get_url(instance)
def _get_url(self, instance):
name = self.get_instance_attr(instance, "exported_file_name")
if name:
path = ExportHandler().export_file_path(name)
storage = get_default_storage()
return storage.url(path)
else:
return None
def get_handler(self):
return ExportHandler()
class ExportJobSerializer(ExportedFileURLSerializerMixin, serializers.ModelSerializer):

View file

@ -1,8 +1,7 @@
import uuid
from datetime import datetime, timezone
from io import BytesIO
from os.path import join
from typing import Any, BinaryIO, Dict, Optional
from typing import Any, Dict, Optional
from django.conf import settings
from django.contrib.auth import get_user_model
@ -26,7 +25,10 @@ from baserow.contrib.database.views.exceptions import ViewNotInTable
from baserow.contrib.database.views.models import View
from baserow.contrib.database.views.registries import view_type_registry
from baserow.core.handler import CoreHandler
from baserow.core.storage import get_default_storage
from baserow.core.storage import (
_create_storage_dir_if_missing_and_open,
get_default_storage,
)
from .exceptions import (
ExportJobCanceledException,
@ -301,27 +303,3 @@ def _open_file_and_run_export(job: ExportJob) -> ExportJob:
def _generate_random_file_name_with_extension(file_extension):
return str(uuid.uuid4()) + file_extension
def _create_storage_dir_if_missing_and_open(storage_location, storage=None) -> BinaryIO:
"""
Attempts to open the provided storage location in binary overwriting write mode.
If it encounters a FileNotFound error will attempt to create the folder structure
leading upto to the storage location and then open again.
:param storage_location: The storage location to open and ensure folders for.
:param storage: The storage to use, if None will use the default storage.
:return: The open file descriptor for the storage_location
"""
storage = storage or get_default_storage()
try:
return storage.open(storage_location, "wb+")
except FileNotFoundError:
# django's file system storage will not attempt to creating a missing
# EXPORT_FILES_DIRECTORY and instead will throw a FileNotFoundError.
# So we first save an empty file which will create any missing directories
# and then open again.
storage.save(storage_location, BytesIO())
return storage.open(storage_location, "wb")

View file

@ -17,6 +17,7 @@ from baserow.core.action.scopes import (
WorkspaceActionScopeType,
)
from baserow.core.handler import CoreHandler, WorkspaceForUpdate
from baserow.core.import_export_handler import ImportExportHandler
from baserow.core.models import (
Application,
Template,
@ -24,7 +25,7 @@ from baserow.core.models import (
WorkspaceInvitation,
WorkspaceUser,
)
from baserow.core.registries import application_type_registry
from baserow.core.registries import ImportExportConfig, application_type_registry
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import ChildProgressBuilder
@ -1135,3 +1136,68 @@ class CreateInitialWorkspaceActionType(ActionType):
@classmethod
def scope(cls) -> ActionScopeStr:
return RootActionScopeType.value()
class ExportApplicationsActionType(ActionType):
type = "export_applications"
description = ActionTypeDescription(
_("Export applications"),
_('Applications "%(application_names)s" (%(application_ids)s) exported'),
WORKSPACE_ACTION_CONTEXT,
)
analytics_params = [
"workspace_id",
"application_ids",
]
@dataclasses.dataclass
class Params:
workspace_id: int
workspace_name: str
application_ids: List[int]
application_names: List[str]
@classmethod
def do(
cls,
user: AbstractUser,
workspace: Workspace,
applications: List[Application],
progress_builder: Optional[ChildProgressBuilder] = None,
) -> str:
"""
Export provided Applications set from the given workspace.
This action is readonly and is not undoable.
:param user: The user on whose behalf the application is exported.
:param workspace: Workspace instance from which applications are exported.
:param applications: List of application instances to be exported
:param progress_builder: A progress builder instance that can be used to
track the progress of the export.
:return: file name of exported applications.
"""
cli_import_export_config = ImportExportConfig(
include_permission_data=False, reduce_disk_space_usage=False
)
file_name = ImportExportHandler().export_workspace_applications(
workspace,
import_export_config=cli_import_export_config,
applications=applications,
progress_builder=progress_builder,
)
params = cls.Params(
workspace_id=workspace.id,
workspace_name=workspace.name,
application_ids=[application.id for application in applications],
application_names=[application.name for application in applications],
)
cls.register_action(user, params, cls.scope(workspace.id), workspace=workspace)
return file_name
@classmethod
def scope(cls, workspace_id: int) -> ActionScopeStr:
return WorkspaceActionScopeType.value(workspace_id)

View file

@ -189,6 +189,7 @@ class CoreConfig(AppConfig):
DeleteWorkspaceActionType,
DeleteWorkspaceInvitationActionType,
DuplicateApplicationActionType,
ExportApplicationsActionType,
InstallTemplateActionType,
LeaveWorkspaceActionType,
OrderApplicationsActionType,
@ -216,6 +217,7 @@ class CoreConfig(AppConfig):
action_type_registry.register(UpdateWorkspaceInvitationActionType())
action_type_registry.register(LeaveWorkspaceActionType())
action_type_registry.register(CreateInitialWorkspaceActionType())
action_type_registry.register(ExportApplicationsActionType())
from baserow.core.snapshots.actions import (
CreateSnapshotActionType,
@ -271,13 +273,18 @@ class CoreConfig(AppConfig):
from baserow.core.jobs.registries import job_type_registry
from .job_types import DuplicateApplicationJobType, InstallTemplateJobType
from .job_types import (
DuplicateApplicationJobType,
ExportApplicationsJobType,
InstallTemplateJobType,
)
from .snapshots.job_types import CreateSnapshotJobType, RestoreSnapshotJobType
job_type_registry.register(DuplicateApplicationJobType())
job_type_registry.register(InstallTemplateJobType())
job_type_registry.register(CreateSnapshotJobType())
job_type_registry.register(RestoreSnapshotJobType())
job_type_registry.register(ExportApplicationsJobType())
from baserow.api.notifications.user_data_types import (
UnreadUserNotificationsCountPermissionsDataType,

View file

@ -0,0 +1,169 @@
import json
import uuid
from os.path import join
from typing import Dict, List, Optional
from zipfile import ZIP_DEFLATED, ZipFile
from django.conf import settings
from django.core.files.base import ContentFile
from django.core.files.storage import Storage
from opentelemetry import trace
from baserow.core.models import Application, Workspace
from baserow.core.registries import ImportExportConfig, application_type_registry
from baserow.core.storage import (
_create_storage_dir_if_missing_and_open,
get_default_storage,
)
from baserow.core.telemetry.utils import baserow_trace_methods
from baserow.core.utils import ChildProgressBuilder, Progress
tracer = trace.get_tracer(__name__)
class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
def export_application(
self,
app: Application,
import_export_config: ImportExportConfig,
files_zip: ZipFile,
storage: Storage,
progress: Progress,
) -> Dict:
"""
Exports a single application (structure, content and assets) to a zip file.
:param app: Application instance that will be exported
:param import_export_config: provides configuration options for the
import/export process to customize how it works.
:param files_zip: ZipFile instance to which the exported data will be written
:param storage: The storage where the export will be stored.
:param progress: Progress instance that allows tracking of the export progress.
:return: The exported and serialized application.
"""
application = app.specific
application_type = application_type_registry.get_by_model(application)
with application_type.export_safe_transaction_context(application):
exported_application = application_type.export_serialized(
application, import_export_config, files_zip, storage
)
progress.increment()
return exported_application
def export_multiple_applications(
self,
applications: List[Application],
import_export_config: ImportExportConfig,
files_zip: ZipFile,
storage: Storage,
progress: Progress,
) -> List[Dict]:
"""
Exports multiple applications (structure, content and assets) to a zip file.
:param applications: Application instances that will be exported
:param import_export_config: provides configuration options for the
import/export process to customize how it works.
:param files_zip: ZipFile instance to which the exported data will be written
:param storage: The storage where the export will be stored.
:param progress: Progress instance that allows tracking of the export progress.
:return: The exported and serialized application.
"""
exported_applications = []
for app in applications:
exported_application = self.export_application(
app, import_export_config, files_zip, storage, progress
)
exported_applications.append(exported_application)
return exported_applications
def export_json_data(
self,
file_name: str,
exported_applications: List[Dict],
files_zip: ZipFile,
storage: Storage,
) -> None:
"""
Export application data (structure and content) to a json file
and put it in the zip file.
:param file_name: name of the file that will be created with exported data
:param exported_applications: exported and serialized applications
:param files_zip: ZipFile instance to which the exported data will be written
:param storage: The storage where the files will be stored
"""
temp_json_file_name = f"temp_{file_name}_{uuid.uuid4()}.json"
temp_json_file_path = storage.save(temp_json_file_name, ContentFile(""))
with storage.open(temp_json_file_path, "w") as temp_json_file:
json.dump(exported_applications, temp_json_file, indent=None)
with storage.open(temp_json_file_path, "rb") as temp_json_file:
files_zip.write(temp_json_file.name, file_name)
storage.delete(temp_json_file_path)
def export_file_path(self, file_name: str) -> str:
"""
Returns the full path for given file_name, which will be used
to store the file within storage
:param file_name: name of file
:return: full path to the file
"""
return join(settings.EXPORT_FILES_DIRECTORY, file_name)
def export_workspace_applications(
self,
workspace: Workspace,
import_export_config: ImportExportConfig,
applications: List[Application],
storage: Optional[Storage] = None,
progress_builder: Optional[ChildProgressBuilder] = None,
) -> str:
"""
Create zip file with exported applications. If applications param is provided,
only those applications will be exported.
:param workspace: The workspace of which the applications will be exported.
:param import_export_config: provides configuration options for the
import/export process to customize how it works.
:param applications: A list of Application instances that will be exported.
:param storage: The storage where the files will be stored. If not provided
the default storage will be used.
:param progress_builder: A progress builder that allows for publishing progress.
:return: name of the zip file with exported applications
"""
storage = storage or get_default_storage()
applications = applications or []
progress = ChildProgressBuilder.build(progress_builder, child_total=100)
export_app_progress = progress.create_child(80, len(applications))
zip_file_name = f"workspace_{workspace.id}_{uuid.uuid4()}.zip"
json_file_name = f"data/workspace_export.json"
export_path = self.export_file_path(zip_file_name)
with _create_storage_dir_if_missing_and_open(
export_path, storage
) as files_buffer:
with ZipFile(files_buffer, "a", ZIP_DEFLATED, False) as files_zip:
exported_applications = self.export_multiple_applications(
applications,
import_export_config,
files_zip,
storage,
export_app_progress,
)
self.export_json_data(
json_file_name, exported_applications, files_zip, storage
)
progress.increment(by=20)
return zip_file_name

View file

@ -1,9 +1,11 @@
from contextlib import contextmanager
from typing import Any, Dict, List
from django.contrib.auth.models import AbstractUser
from rest_framework import serializers
from baserow.api.applications.errors import ERROR_APPLICATION_DOES_NOT_EXIST
from baserow.api.applications.serializers import (
InstallTemplateJobApplicationsSerializer,
PolymorphicApplicationResponseSerializer,
@ -11,8 +13,10 @@ from baserow.api.applications.serializers import (
from baserow.api.errors import (
ERROR_GROUP_DOES_NOT_EXIST,
ERROR_MAX_LOCKS_PER_TRANSACTION_EXCEEDED,
ERROR_PERMISSION_DENIED,
ERROR_USER_NOT_IN_GROUP,
)
from baserow.api.export.serializers import ExportWorkspaceExportedFileURLSerializerMixin
from baserow.api.templates.errors import (
ERROR_TEMPLATE_DOES_NOT_EXIST,
ERROR_TEMPLATE_FILE_DOES_NOT_EXIST,
@ -22,10 +26,13 @@ from baserow.api.workspaces.serializers import WorkspaceSerializer
from baserow.core.action.registries import action_type_registry
from baserow.core.actions import (
DuplicateApplicationActionType,
ExportApplicationsActionType,
InstallTemplateActionType,
)
from baserow.core.exceptions import (
ApplicationDoesNotExist,
DuplicateApplicationMaxLocksExceededException,
PermissionDenied,
TemplateDoesNotExist,
TemplateFileDoesNotExist,
UserNotInWorkspace,
@ -33,8 +40,17 @@ from baserow.core.exceptions import (
)
from baserow.core.handler import CoreHandler
from baserow.core.jobs.registries import JobType
from baserow.core.models import Application, DuplicateApplicationJob, InstallTemplateJob
from baserow.core.operations import CreateApplicationsWorkspaceOperationType
from baserow.core.models import (
Application,
DuplicateApplicationJob,
ExportApplicationsJob,
InstallTemplateJob,
)
from baserow.core.operations import (
CreateApplicationsWorkspaceOperationType,
ListApplicationsWorkspaceOperationType,
ReadWorkspaceOperationType,
)
from baserow.core.registries import application_type_registry
from baserow.core.utils import Progress
@ -188,3 +204,135 @@ class InstallTemplateJobType(JobType):
job.save(update_fields=("installed_applications",))
return installed_applications
class ExportApplicationsJobType(JobType):
type = "export_applications"
model_class = ExportApplicationsJob
max_count = 1
api_exceptions_map = {
UserNotInWorkspace: ERROR_USER_NOT_IN_GROUP,
WorkspaceDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
ApplicationDoesNotExist: ERROR_APPLICATION_DOES_NOT_EXIST,
}
job_exceptions_map = {PermissionDenied: ERROR_PERMISSION_DENIED}
request_serializer_field_names = ["workspace_id", "application_ids"]
request_serializer_field_overrides = {
"application_ids": serializers.ListField(
allow_null=True,
allow_empty=True,
required=False,
child=serializers.IntegerField(),
help_text=(
"The application IDs to export. If not provided, all the applications for "
"the workspace will be exported."
),
),
"only_structure": serializers.BooleanField(
required=False,
default=False,
help_text=(
"If True, only the structure of the applications will be exported. "
"If False, the data will be included as well."
),
),
}
serializer_mixins = [ExportWorkspaceExportedFileURLSerializerMixin]
serializer_field_names = ["exported_file_name", "url"]
def transaction_atomic_context(self, job: "DuplicateApplicationJob"):
"""
Each application is isolated, so a single transaction for all of them together
is unnecessary and increases the risk of `max_locks_per_transaction`.
Instead, the `import_export_handler` creates a transaction for each
application in a `repeatable_read` isolation level to guarantee consistency
in the data read.
"""
@contextmanager
def empty_context():
yield
return empty_context()
def get_workspace_and_applications(self, user, workspace_id, application_ids):
handler = CoreHandler()
workspace = handler.get_workspace(workspace_id=workspace_id)
handler.check_permissions(
user,
ReadWorkspaceOperationType.type,
workspace=workspace,
context=workspace,
)
applications = Application.objects.filter(
workspace=workspace, workspace__trashed=False
)
if application_ids:
applications = applications.filter(id__in=application_ids)
applications = CoreHandler().filter_queryset(
user,
ListApplicationsWorkspaceOperationType.type,
applications,
workspace=workspace,
)
if application_ids and len(application_ids) != len(applications):
raise PermissionDenied(
"Some of the selected applications do not exist or the user does "
"not have access to them."
)
return workspace, applications
def prepare_values(
self, values: Dict[str, Any], user: AbstractUser
) -> Dict[str, Any]:
workspace_id = values.get("workspace_id")
application_ids = values.get("application_ids")
self.get_workspace_and_applications(
user=user, workspace_id=workspace_id, application_ids=application_ids
)
return {
"workspace_id": workspace_id,
"application_ids": ",".join(map(str, application_ids))
if application_ids
else "",
}
def run(self, job: ExportApplicationsJob, progress: Progress) -> str:
application_ids = job.application_ids
if application_ids:
application_ids = application_ids.split(",")
workspace, applications = self.get_workspace_and_applications(
user=job.user,
workspace_id=job.workspace_id,
application_ids=application_ids,
)
progress_builder = progress.create_child_builder(
represents_progress=progress.total
)
exported_file_name = action_type_registry.get_by_type(
ExportApplicationsActionType
).do(
job.user,
workspace=workspace,
applications=applications,
progress_builder=progress_builder,
)
job.exported_file_name = exported_file_name
job.save(update_fields=("exported_file_name",))
return exported_file_name

View file

@ -0,0 +1,88 @@
# Generated by Django 4.2.13 on 2024-08-26 08:53
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0089_alter_snapshot_unique_together"),
]
operations = [
migrations.CreateModel(
name="ExportApplicationsJob",
fields=[
(
"job_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="core.job",
),
),
(
"user_ip_address",
models.GenericIPAddressField(
help_text="The user IP address.", null=True
),
),
(
"user_websocket_id",
models.CharField(
help_text="The user websocket uuid needed to manage signals sent correctly.",
max_length=36,
null=True,
),
),
(
"user_session_id",
models.CharField(
help_text="The user session uuid needed for undo/redo functionality.",
max_length=36,
null=True,
),
),
(
"user_action_group_id",
models.CharField(
help_text="The user session uuid needed for undo/redo action group functionality.",
max_length=36,
null=True,
),
),
(
"workspace_id",
models.PositiveIntegerField(
help_text="The workspace id that the applications are going to be exported from."
),
),
(
"application_ids",
models.TextField(
help_text="The comma separated list of application ids that are going to be exported."
),
),
(
"only_structure",
models.BooleanField(
default=False,
help_text="Indicates if only the structure of the applications should be exported, without user data.",
),
),
(
"exported_file_name",
models.TextField(
blank=True, help_text="The name of the exported archive file."
),
),
],
options={
"abstract": False,
},
bases=("core.job", models.Model),
),
]

View file

@ -51,6 +51,7 @@ __all__ = [
"Service",
"Notification",
"BlacklistedToken",
"ExportApplicationsJob",
]
User = get_user_model()
@ -643,3 +644,23 @@ class InstallTemplateJob(
help_text="The template that is installed.",
)
installed_applications = models.JSONField(default=list)
class ExportApplicationsJob(
JobWithUserIpAddress, JobWithWebsocketId, JobWithUndoRedoIds, Job
):
workspace_id = models.PositiveIntegerField(
help_text="The workspace id that the applications are going to be exported from."
)
application_ids = models.TextField(
help_text="The comma separated list of application ids that are going to be exported."
)
only_structure = models.BooleanField(
default=False,
help_text="Indicates if only the structure of the applications should be "
"exported, without user data.",
)
exported_file_name = models.TextField(
blank=True,
help_text="The name of the exported archive file.",
)

View file

@ -1,3 +1,6 @@
from io import BytesIO
from typing import BinaryIO
from django.core.files.storage import Storage, default_storage
@ -21,3 +24,27 @@ class OverwritingStorageHandler:
if self.storage.exists(name):
self.storage.delete(name)
self.storage.save(name, content)
def _create_storage_dir_if_missing_and_open(storage_location, storage=None) -> BinaryIO:
"""
Attempts to open the provided storage location in binary overwriting write mode.
If it encounters a FileNotFound error will attempt to create the folder structure
leading upto to the storage location and then open again.
:param storage_location: The storage location to open and ensure folders for.
:param storage: The storage to use, if None will use the default storage.
:return: The open file descriptor for the storage_location
"""
storage = storage or get_default_storage()
try:
return storage.open(storage_location, "wb+")
except FileNotFoundError:
# django's file system storage will not attempt to creating a missing
# EXPORT_FILES_DIRECTORY and instead will throw a FileNotFoundError.
# So we first save an empty file which will create any missing directories
# and then open again.
storage.save(storage_location, BytesIO())
return storage.open(storage_location, "wb")

View file

@ -1,7 +1,11 @@
import mimetypes
import pathlib
from django.core.files.base import ContentFile
from django.core.files.storage import Storage
from baserow.core.models import UserFile
from baserow.core.user_files.handler import UserFileHandler
from baserow.core.utils import random_string
@ -30,4 +34,14 @@ class UserFileFixtures:
if "sha256_hash" not in kwargs:
kwargs["sha256_hash"] = random_string(64)
return UserFile.objects.create(**kwargs)
user_file = UserFile.objects.create(**kwargs)
return user_file
def save_content_in_user_file(
self, user_file: UserFile, storage: Storage, content: str = ""
) -> UserFile:
path = UserFileHandler().user_file_path(user_file.name)
content = content or f"test file {user_file.original_name} at {path}"
storage.save(path, ContentFile(content))
return user_file

View file

@ -178,8 +178,6 @@ def setup_interesting_test_table(
.values_list("id", flat=True)
)
file_suffix = file_suffix or ""
values = {
"text": "text",
"long_text": "long_text",

View file

@ -854,3 +854,8 @@ def test_thread():
sys.setswitchinterval(orig_switch_interval)
yield run_callable
@pytest.fixture()
def use_tmp_media_root(tmpdir, settings):
settings.MEDIA_ROOT = tmpdir

View file

@ -0,0 +1,243 @@
import json
import zipfile
from django.test.utils import override_settings
from django.urls import reverse
import pytest
from rest_framework.status import (
HTTP_400_BAD_REQUEST,
HTTP_401_UNAUTHORIZED,
HTTP_403_FORBIDDEN,
HTTP_404_NOT_FOUND,
)
@pytest.mark.django_db
@override_settings(
FEATURE_FLAGS="",
)
def test_exporting_workspace_with_feature_flag_disabled(
data_fixture, api_client, tmpdir
):
user, token = data_fixture.create_user_and_token()
workspace = data_fixture.create_workspace(user=user)
data_fixture.create_database_application(workspace=workspace)
response = api_client.post(
reverse(
"api:workspaces:export_workspace_async",
kwargs={"workspace_id": workspace.id},
),
data={},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_403_FORBIDDEN
assert response.json()["error"] == "ERROR_FEATURE_DISABLED"
@pytest.mark.django_db
def test_exporting_missing_workspace_returns_error(data_fixture, api_client, tmpdir):
user, token = data_fixture.create_user_and_token()
workspace = data_fixture.create_workspace(user=user)
data_fixture.create_database_application(workspace=workspace)
response = api_client.post(
reverse(
"api:workspaces:export_workspace_async",
kwargs={"workspace_id": 9999},
),
data={},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
@pytest.mark.django_db
def test_exporting_workspace_with_no_permissions_returns_error(
data_fixture, api_client, tmpdir
):
user, token = data_fixture.create_user_and_token()
_, token2 = data_fixture.create_user_and_token()
workspace = data_fixture.create_workspace(user=user)
data_fixture.create_database_application(workspace=workspace)
response = api_client.post(
reverse(
"api:workspaces:export_workspace_async",
kwargs={"workspace_id": workspace.id},
),
data={},
format="json",
HTTP_AUTHORIZATION=f"JWT {token2}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
@pytest.mark.django_db
def test_exporting_workspace_with_application_without_permissions_returns_error(
data_fixture, api_client, tmpdir
):
user, token = data_fixture.create_user_and_token()
workspace = data_fixture.create_workspace(user=user)
database = data_fixture.create_database_application(workspace=workspace)
user2, token2 = data_fixture.create_user_and_token()
workspace2 = data_fixture.create_workspace(user=user2)
database2 = data_fixture.create_database_application(workspace=workspace2)
response = api_client.post(
reverse(
"api:workspaces:export_workspace_async",
kwargs={"workspace_id": workspace.id},
),
data={"application_ids": [database.id, database2.id]},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_401_UNAUTHORIZED
assert response.json()["error"] == "PERMISSION_DENIED"
@pytest.mark.django_db(transaction=True)
def test_exporting_empty_workspace(
data_fixture,
api_client,
tmpdir,
settings,
django_capture_on_commit_callbacks,
use_tmp_media_root,
):
user = data_fixture.create_user()
workspace = data_fixture.create_workspace(user=user)
token = data_fixture.generate_token(user)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:workspaces:export_workspace_async",
kwargs={"workspace_id": workspace.id},
),
data={
"application_ids": [],
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
job_id = response_json["id"]
assert response_json == {
"exported_file_name": "",
"human_readable_error": "",
"id": job_id,
"progress_percentage": 0,
"state": "pending",
"type": "export_applications",
"url": None,
}
response = api_client.get(
reverse("api:jobs:item", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
file_name = response_json["exported_file_name"]
assert response_json["state"] == "finished"
assert response_json["progress_percentage"] == 100
assert (
response_json["url"] == f"http://localhost:8000/media/export_files/{file_name}"
)
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, file_name)
assert file_path.isfile()
with zipfile.ZipFile(file_path, "r") as zip_ref:
assert "data/workspace_export.json" in zip_ref.namelist()
with zip_ref.open("data/workspace_export.json") as json_file:
json_data = json.load(json_file)
assert len(json_data) == 0
@pytest.mark.django_db(transaction=True)
def test_exporting_workspace_with_single_empty_database(
data_fixture,
api_client,
tmpdir,
settings,
django_capture_on_commit_callbacks,
use_tmp_media_root,
):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
token = data_fixture.generate_token(user)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:workspaces:export_workspace_async",
kwargs={"workspace_id": database.workspace.id},
),
data={
"application_ids": [],
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
job_id = response_json["id"]
assert response_json == {
"exported_file_name": "",
"human_readable_error": "",
"id": job_id,
"progress_percentage": 0,
"state": "pending",
"type": "export_applications",
"url": None,
}
response = api_client.get(
reverse("api:jobs:item", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
file_name = response_json["exported_file_name"]
assert response_json["state"] == "finished"
assert response_json["progress_percentage"] == 100
assert (
response_json["url"] == f"http://localhost:8000/media/export_files/{file_name}"
)
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, file_name)
assert file_path.isfile()
with zipfile.ZipFile(file_path, "r") as zip_ref:
assert "data/workspace_export.json" in zip_ref.namelist()
with zip_ref.open("data/workspace_export.json") as json_file:
json_data = json.load(json_file)
assert len(json_data) == 1
assert json_data == [
{
"id": database.id,
"name": database.name,
"order": database.order,
"type": "database",
"tables": [],
}
]

View file

@ -231,9 +231,7 @@ def test_exporting_csv_writes_file_to_storage(
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch(
"baserow.contrib.database.export.handler.get_default_storage"
) as get_storage_mock:
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
get_storage_mock.return_value = storage
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
@ -355,9 +353,7 @@ def test_exporting_csv_table_writes_file_to_storage(
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch(
"baserow.contrib.database.export.handler.get_default_storage"
) as get_storage_mock:
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
get_storage_mock.return_value = storage
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
# DRF uses some custom internal date time formatting, use the field itself

View file

@ -0,0 +1,146 @@
import json
import zipfile
from django.urls import reverse
import pytest
from baserow.contrib.database.rows.handler import RowHandler
from baserow.core.import_export_handler import ImportExportHandler
from baserow.core.registries import ImportExportConfig
from baserow.core.storage import get_default_storage
from baserow.core.user_files.models import UserFile
from baserow.test_utils.helpers import setup_interesting_test_database
@pytest.mark.django_db(transaction=True)
def test_exporting_interesting_database(
data_fixture, api_client, tmpdir, settings, use_tmp_media_root
):
user = data_fixture.create_user()
workspace = data_fixture.create_workspace(user=user)
database_name = "To be exported"
cli_import_export_config = ImportExportConfig(
include_permission_data=False, reduce_disk_space_usage=False
)
database = setup_interesting_test_database(
data_fixture,
user=user,
workspace=workspace,
name=database_name,
)
storage = get_default_storage()
for user_file in UserFile.objects.all():
data_fixture.save_content_in_user_file(user_file=user_file, storage=storage)
file_name = ImportExportHandler().export_workspace_applications(
workspace,
import_export_config=cli_import_export_config,
applications=[database],
storage=storage,
progress_builder=None,
)
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, file_name)
assert file_path.isfile()
with zipfile.ZipFile(file_path, "r") as zip_ref:
assert "data/workspace_export.json" in zip_ref.namelist()
with zip_ref.open("data/workspace_export.json") as json_file:
json_data = json.load(json_file)
assert len(json_data) == 1
assert json_data[0]["name"] == database.name
@pytest.mark.django_db(transaction=True)
def test_exporting_workspace_writes_file_to_storage(
data_fixture,
api_client,
tmpdir,
settings,
django_capture_on_commit_callbacks,
use_tmp_media_root,
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table, name="text_field", order=0)
row_handler = RowHandler()
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "row #1",
},
)
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "row #2",
},
)
token = data_fixture.generate_token(user)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:workspaces:export_workspace_async",
kwargs={"workspace_id": table.database.workspace.id},
),
data={
"application_ids": [],
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
job_id = response_json["id"]
assert response_json == {
"exported_file_name": "",
"human_readable_error": "",
"id": job_id,
"progress_percentage": 0,
"state": "pending",
"type": "export_applications",
"url": None,
}
response = api_client.get(
reverse("api:jobs:item", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
file_name = response_json["exported_file_name"]
assert response_json["state"] == "finished"
assert response_json["progress_percentage"] == 100
assert (
response_json["url"] == f"http://localhost:8000/media/export_files/{file_name}"
)
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, file_name)
assert file_path.isfile()
with zipfile.ZipFile(file_path, "r") as zip_ref:
assert "data/workspace_export.json" in zip_ref.namelist()
with zip_ref.open("data/workspace_export.json") as json_file:
json_data = json.load(json_file)
assert len(json_data) == 1
assert json_data[0]["name"] == table.database.name
assert len(json_data[0]["tables"]) == 1
table = json_data[0]["tables"][0]
assert len(table["fields"]) == 1
assert table["fields"][0]["name"] == text_field.name
assert len(table["rows"]) == 2
assert table["rows"][0][f"field_{text_field.id}"] == "row #1"
assert table["rows"][1][f"field_{text_field.id}"] == "row #2"

View file

@ -50,7 +50,7 @@ def _parse_date(date):
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_hidden_fields_are_excluded(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -79,7 +79,7 @@ def test_hidden_fields_are_excluded(get_storage_mock, data_fixture):
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_csv_is_sorted_by_sorts(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -106,7 +106,7 @@ def test_csv_is_sorted_by_sorts(get_storage_mock, data_fixture):
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_csv_is_filtered_by_filters(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -135,7 +135,7 @@ def test_csv_is_filtered_by_filters(get_storage_mock, data_fixture):
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_exporting_table_ignores_view_filters_sorts_hides(
get_storage_mock, data_fixture
):
@ -179,7 +179,7 @@ def test_exporting_table_ignores_view_filters_sorts_hides(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_columns_are_exported_by_order_then_field_id(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -223,7 +223,7 @@ def test_columns_are_exported_by_order_then_field_id(get_storage_mock, data_fixt
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_can_export_every_interesting_different_field_to_csv(
get_storage_mock, data_fixture
):
@ -285,7 +285,7 @@ def run_export_job_over_interesting_table(data_fixture, storage_mock, options):
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_can_export_special_characters_in_arabic_encoding_to_csv(
get_storage_mock, data_fixture
):
@ -384,7 +384,7 @@ def test_a_complete_export_job_which_has_expired_will_have_its_file_deleted(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_a_pending_job_which_has_expired_will_be_cleaned_up(
get_storage_mock,
data_fixture,
@ -425,7 +425,7 @@ def test_a_pending_job_which_has_expired_will_be_cleaned_up(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_a_running_export_job_which_has_expired_will_be_stopped(
get_storage_mock, data_fixture, settings
):
@ -547,7 +547,7 @@ def test_attempting_to_export_a_view_for_a_type_which_doesnt_support_it_fails(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_an_export_job_which_fails_will_be_marked_as_a_failed_job(
get_storage_mock,
data_fixture,
@ -610,7 +610,7 @@ def test_an_export_job_which_fails_will_be_marked_as_a_failed_job(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_can_export_csv_without_header(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -630,7 +630,7 @@ def test_can_export_csv_without_header(get_storage_mock, data_fixture):
@pytest.mark.django_db
@pytest.mark.once_per_day_in_ci
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_can_export_csv_with_different_charsets(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -654,7 +654,7 @@ def test_can_export_csv_with_different_charsets(get_storage_mock, data_fixture):
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_can_export_csv_with_different_column_separators(
get_storage_mock, data_fixture
):
@ -682,7 +682,7 @@ def test_can_export_csv_with_different_column_separators(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_adding_more_rows_doesnt_increase_number_of_queries_run(
get_storage_mock, data_fixture, django_assert_num_queries
):
@ -879,7 +879,7 @@ def setup_table_and_run_export_decoding_result(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_a_column_without_a_grid_view_option_has_an_option_made_and_is_exported(
get_storage_mock, data_fixture
):
@ -916,7 +916,7 @@ def test_a_column_without_a_grid_view_option_has_an_option_made_and_is_exported(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_action_done_is_emitted_when_the_export_finish(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -943,7 +943,7 @@ def test_action_done_is_emitted_when_the_export_finish(get_storage_mock, data_fi
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_csv_is_escaped(get_storage_mock, data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock

View file

@ -0,0 +1,23 @@
import pytest
from baserow.core.import_export_handler import ImportExportHandler
from baserow.core.registries import ImportExportConfig
@pytest.mark.django_db(transaction=True)
def test_create_export_file(data_fixture):
user = data_fixture.create_user()
workspace = data_fixture.create_workspace(user=user)
database = data_fixture.create_database_application(workspace=workspace)
cli_import_export_config = ImportExportConfig(
include_permission_data=False, reduce_disk_space_usage=False
)
file_name = ImportExportHandler().export_workspace_applications(
workspace=workspace,
import_export_config=cli_import_export_config,
applications=[database],
)
assert file_name is not None

View file

@ -0,0 +1,44 @@
import pytest
from baserow.core.exceptions import UserNotInWorkspace
from baserow.core.job_types import ExportApplicationsJobType
from baserow.core.jobs.constants import JOB_FAILED, JOB_FINISHED
from baserow.core.jobs.handler import JobHandler
from baserow.core.models import ExportApplicationsJob
@pytest.mark.django_db(transaction=True)
def test_no_exported_files_on_error(data_fixture):
user = data_fixture.create_user()
workspace = data_fixture.create_workspace()
data_fixture.create_database_application(workspace=workspace)
with pytest.raises(UserNotInWorkspace):
job = JobHandler().create_and_start_job(
user,
ExportApplicationsJobType.type,
workspace_id=workspace.id,
application_ids=[],
sync=True,
)
assert job.state == JOB_FAILED
assert job.exported_file_name is None
assert ExportApplicationsJob.objects.count() == 0
@pytest.mark.django_db(transaction=True)
def test_success_export(data_fixture):
user = data_fixture.create_user()
workspace = data_fixture.create_workspace(user=user)
data_fixture.create_database_application(workspace=workspace)
job = JobHandler().create_and_start_job(
user,
ExportApplicationsJobType.type,
workspace_id=workspace.id,
application_ids=[],
sync=True,
)
assert job.state == JOB_FINISHED
assert job.exported_file_name is not None

View file

@ -0,0 +1,7 @@
{
"type": "feature",
"message": "Allow exporting workspace applications",
"issue_number": 2930,
"bullet_points": [],
"created_at": "2024-08-29"
}

View file

@ -733,44 +733,41 @@ def test_exporting_csv_writes_file_to_storage_and_its_served_by_the_backend(
api_client,
tmpdir,
django_capture_on_commit_callbacks,
use_tmp_media_root,
):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
storage = dummy_storage(tmpdir)
with patch("baserow.core.storage.get_default_storage", new=storage):
token = enterprise_data_fixture.generate_token(user)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
job_id = response_json["id"]
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
token = enterprise_data_fixture.generate_token(user)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
json = response.json()
response_json = response.json()
job_id = response_json["id"]
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
json = response.json()
# The file is served by the backend
assert json["url"].startswith("http://localhost:8000/api/files/")
# download it
with patch("baserow.core.storage.get_default_storage", new=storage):
response = api_client.get(json["url"].replace("http://localhost:8000", ""))
response = api_client.get(json["url"].replace("http://localhost:8000", ""))
assert response.status_code == HTTP_200_OK
@ -791,6 +788,7 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend(
synced_roles,
django_capture_on_commit_callbacks,
tmpdir,
use_tmp_media_root,
):
(
admin_user,
@ -803,40 +801,37 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend(
"export_charset": "utf-8",
}
storage = dummy_storage(tmpdir)
with patch("baserow.core.storage.get_default_storage", new=storage):
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse("api:enterprise:audit_log:async_export"),
data=csv_settings,
format="json",
HTTP_AUTHORIZATION=f"JWT {admin_token}",
)
assert response.status_code == HTTP_202_ACCEPTED, response.json()
job = response.json()
assert job["id"] is not None
assert job["state"] == "pending"
assert job["type"] == "audit_log_export"
admin_token = enterprise_data_fixture.generate_token(admin_user)
response = api_client.get(
reverse(
"api:jobs:item",
kwargs={"job_id": job["id"]},
),
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse("api:enterprise:audit_log:async_export"),
data=csv_settings,
format="json",
HTTP_AUTHORIZATION=f"JWT {admin_token}",
)
assert response.status_code == HTTP_200_OK
assert response.status_code == HTTP_202_ACCEPTED, response.json()
job = response.json()
assert job["state"] == "finished"
assert job["id"] is not None
assert job["state"] == "pending"
assert job["type"] == "audit_log_export"
admin_token = enterprise_data_fixture.generate_token(admin_user)
response = api_client.get(
reverse(
"api:jobs:item",
kwargs={"job_id": job["id"]},
),
HTTP_AUTHORIZATION=f"JWT {admin_token}",
)
assert response.status_code == HTTP_200_OK
job = response.json()
assert job["state"] == "finished"
assert job["type"] == "audit_log_export"
# The file is served by the backend
assert job["url"].startswith("http://localhost:8000/api/files/")
# download it
with patch("baserow.core.storage.get_default_storage", new=storage):
response = api_client.get(job["url"].replace("http://localhost:8000", ""))
response = api_client.get(job["url"].replace("http://localhost:8000", ""))
assert response.status_code == HTTP_200_OK
@ -852,51 +847,45 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend(
},
)
def test_files_can_be_downloaded_with_dl_query_param_as_filename(
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
enable_enterprise, enterprise_data_fixture, api_client, tmpdir, use_tmp_media_root
):
_, token = enterprise_data_fixture.create_user_and_token()
storage = dummy_storage(tmpdir)
with patch("baserow.core.storage.get_default_storage", new=storage):
file = SimpleUploadedFile("test.txt", b"Hello World")
response = api_client.post(
reverse("api:user_files:upload_file"),
data={"file": file},
format="multipart",
HTTP_AUTHORIZATION=f"JWT {token}",
)
file = SimpleUploadedFile("test.txt", b"Hello World")
response = api_client.post(
reverse("api:user_files:upload_file"),
data={"file": file},
format="multipart",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK, response.json()
backend_file_url = response.json()["url"]
file_name = response.json()["name"]
with patch("baserow.core.storage.get_default_storage", new=storage):
response = api_client.get(
backend_file_url.replace("http://localhost:8000", ""),
)
response = api_client.get(
backend_file_url.replace("http://localhost:8000", ""),
)
assert response.status_code == HTTP_200_OK
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
with patch("baserow.core.storage.get_default_storage", new=storage):
response = api_client.get(
backend_file_url.replace("http://localhost:8000", "") + "?dl=",
)
response = api_client.get(
backend_file_url.replace("http://localhost:8000", "") + "?dl=",
)
assert response.status_code == HTTP_200_OK
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
with patch("baserow.core.storage.get_default_storage", new=storage):
response = api_client.get(
backend_file_url.replace("http://localhost:8000", "") + "?dl=download.txt",
)
response = api_client.get(
backend_file_url.replace("http://localhost:8000", "") + "?dl=download.txt",
)
assert response.status_code == HTTP_200_OK
assert (
response.headers["Content-Disposition"] == 'attachment; filename="download.txt"'
)
with patch("baserow.core.storage.get_default_storage", new=storage):
response = api_client.get(
backend_file_url.replace("http://localhost:8000", "") + "?dl=1",
)
response = api_client.get(
backend_file_url.replace("http://localhost:8000", "") + "?dl=1",
)
assert response.status_code == HTTP_200_OK
assert response.headers["Content-Disposition"] == 'attachment; filename="1"'
@ -918,6 +907,7 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend_with_workspace
synced_roles,
django_capture_on_commit_callbacks,
tmpdir,
use_tmp_media_root,
):
(
admin_user,
@ -936,39 +926,36 @@ def test_audit_log_can_export_to_csv_and_be_served_by_the_backend_with_workspace
"export_charset": "utf-8",
}
storage = dummy_storage(tmpdir)
with patch("baserow.core.storage.get_default_storage", new=storage):
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse("api:enterprise:audit_log:async_export"),
data=csv_settings,
format="json",
HTTP_AUTHORIZATION=f"JWT {wp_admin_token}",
)
assert response.status_code == HTTP_202_ACCEPTED, response.json()
job = response.json()
assert job["id"] is not None
assert job["state"] == "pending"
assert job["type"] == "audit_log_export"
response = api_client.get(
reverse(
"api:jobs:item",
kwargs={"job_id": job["id"]},
),
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse("api:enterprise:audit_log:async_export"),
data=csv_settings,
format="json",
HTTP_AUTHORIZATION=f"JWT {wp_admin_token}",
)
assert response.status_code == HTTP_200_OK
assert response.status_code == HTTP_202_ACCEPTED, response.json()
job = response.json()
assert job["state"] == "finished"
assert job["id"] is not None
assert job["state"] == "pending"
assert job["type"] == "audit_log_export"
response = api_client.get(
reverse(
"api:jobs:item",
kwargs={"job_id": job["id"]},
),
HTTP_AUTHORIZATION=f"JWT {wp_admin_token}",
)
assert response.status_code == HTTP_200_OK
job = response.json()
assert job["state"] == "finished"
assert job["type"] == "audit_log_export"
# The file is served by the backend
assert job["url"].startswith("http://localhost:8000/api/files/")
# download it
with (
patch("baserow.core.storage.get_default_storage", new=storage),
patch(
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate",
side_effect=[(wp_admin_user, None), (other_wp_admin_user, None)],
@ -998,46 +985,43 @@ def test_exporting_csv_writes_file_to_storage_and_its_served_by_the_backend_with
api_client,
tmpdir,
django_capture_on_commit_callbacks,
use_tmp_media_root,
):
user = enterprise_data_fixture.create_user()
table = enterprise_data_fixture.create_database_table(user=user)
other_user = enterprise_data_fixture.create_user()
storage = dummy_storage(tmpdir)
with patch("baserow.core.storage.get_default_storage", new=storage):
token = enterprise_data_fixture.generate_token(user)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
job_id = response_json["id"]
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
token = enterprise_data_fixture.generate_token(user)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
json = response.json()
response_json = response.json()
job_id = response_json["id"]
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
json = response.json()
# The file is served by the backend
assert json["url"].startswith("http://localhost:8000/api/files/")
# download it
with (
patch("baserow.core.storage.get_default_storage", new=storage),
patch(
"baserow_enterprise.api.secure_file_serve.views.SecureFileServeAuthentication.authenticate",
side_effect=[(user, None), (other_user, None)],

View file

@ -17,7 +17,7 @@ from baserow_enterprise.audit_log.job_types import AuditLogExportJobType
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_audit_log_export_csv_correctly(
get_storage_mock, enterprise_data_fixture, synced_roles
):
@ -97,7 +97,7 @@ def test_audit_log_export_csv_correctly(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
@override_settings(DEBUG=True)
@pytest.mark.skip("Need to re-build the translations first.")
def test_audit_log_export_csv_in_the_user_language(
@ -141,7 +141,7 @@ def test_audit_log_export_csv_in_the_user_language(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
@override_settings(DEBUG=True)
def test_deleting_audit_log_export_job_also_delete_exported_file(
get_storage_mock, enterprise_data_fixture, synced_roles
@ -182,7 +182,7 @@ def test_deleting_audit_log_export_job_also_delete_exported_file(
@pytest.mark.django_db
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
@override_settings(DEBUG=True)
def test_audit_log_export_filters_work_correctly(
get_storage_mock, enterprise_data_fixture, synced_roles
@ -238,7 +238,7 @@ def test_audit_log_export_filters_work_correctly(
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_audit_log_export_workspace_csv_correctly(
get_storage_mock, enterprise_data_fixture, synced_roles
):

View file

@ -0,0 +1,11 @@
<template>
<Alert type="warning">
<p>{{ $t('exportWorkspaceModalWarning.message') }}</p>
</Alert>
</template>
<script>
export default {
name: 'ExportWorkspaceModalWarning',
}
</script>

View file

@ -282,6 +282,9 @@
"snapshotModalWarning": {
"message": "Please be aware that a snapshot will include any permissions set on the application and its tables."
},
"exportWorkspaceModalWarning": {
"message": "Please be aware that an export will not include any permissions set on the application and its tables."
},
"auditLogSidebarWorkspace": {
"title": "Audit log"
},

View file

@ -9,6 +9,7 @@ import EnterpriseSettings from '@baserow_enterprise/components/EnterpriseSetting
import EnterpriseSettingsOverrideDashboardHelp from '@baserow_enterprise/components/EnterpriseSettingsOverrideDashboardHelp'
import EnterpriseLogo from '@baserow_enterprise/components/EnterpriseLogo'
import { DatabaseApplicationType } from '@baserow/modules/database/applicationTypes'
import ExportWorkspaceModalWarning from '@baserow_enterprise/components/ExportWorkspaceModalWarning.vue'
export class EnterprisePlugin extends BaserowPlugin {
static getType() {
@ -58,6 +59,14 @@ export class EnterprisePlugin extends BaserowPlugin {
return rbacSupport ? SnapshotModalWarning : null
}
getExtraExportWorkspaceModalComponents(workspace) {
const rbacSupport = this.app.$hasFeature(
EnterpriseFeatures.RBAC,
workspace.id
)
return rbacSupport ? ExportWorkspaceModalWarning : null
}
getSettingsPageComponents() {
return [EnterpriseSettings]
}

View file

@ -74,9 +74,7 @@ def test_exporting_json_writes_file_to_storage(
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch(
"baserow.contrib.database.export.handler.get_default_storage"
) as get_storage_mock:
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
get_storage_mock.return_value = storage
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
expected_created_at = DateTimeField().to_representation(run_time)
@ -231,9 +229,7 @@ def test_exporting_xml_writes_file_to_storage(
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch(
"baserow.contrib.database.export.handler.get_default_storage"
) as get_storage_mock:
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
get_storage_mock.return_value = storage
run_time = parse_datetime("2020-02-01 01:00").replace(tzinfo=timezone.utc)
with freeze_time(run_time):

View file

@ -23,7 +23,7 @@ def _parse_date(date):
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_can_export_every_interesting_different_field_to_json(
get_storage_mock, premium_data_fixture
):
@ -239,7 +239,7 @@ def test_can_export_every_interesting_different_field_to_json(
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_cannot_export_json_without_premium_license(
get_storage_mock, premium_data_fixture
):
@ -253,7 +253,7 @@ def test_cannot_export_json_without_premium_license(
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_cannot_export_json_without_premium_license_for_group(
get_storage_mock, premium_data_fixture, alternative_per_workspace_license_service
):
@ -271,7 +271,7 @@ def test_cannot_export_json_without_premium_license_for_group(
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_if_duplicate_field_names_json_export(get_storage_mock, premium_data_fixture):
storage_mock = MagicMock()
get_storage_mock.return_value = storage_mock
@ -306,7 +306,7 @@ def test_if_duplicate_field_names_json_export(get_storage_mock, premium_data_fix
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_can_export_every_interesting_different_field_to_xml(
get_storage_mock, premium_data_fixture
):
@ -520,7 +520,7 @@ def test_can_export_every_interesting_different_field_to_xml(
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_if_xml_duplicate_name_and_value_are_escaped(
get_storage_mock, premium_data_fixture
):
@ -566,7 +566,7 @@ def test_if_xml_duplicate_name_and_value_are_escaped(
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_cannot_export_xml_without_premium_license(
get_storage_mock, premium_data_fixture
):
@ -580,7 +580,7 @@ def test_cannot_export_xml_without_premium_license(
@pytest.mark.django_db
@override_settings(DEBUG=True)
@patch("baserow.contrib.database.export.handler.get_default_storage")
@patch("baserow.core.storage.get_default_storage")
def test_cannot_export_xml_without_premium_license_for_group(
get_storage_mock, premium_data_fixture, alternative_per_workspace_license_service
):

View file

@ -0,0 +1,38 @@
<template>
<form @submit.prevent="submit">
<FormGroup :error="fieldHasErrors('name')" small-label required>
<slot name="settings"> </slot>
<template #after-input>
<slot></slot>
</template>
</FormGroup>
</form>
</template>
<script>
import form from '@baserow/modules/core/mixins/form'
import { required } from 'vuelidate/lib/validators'
export default {
name: 'ExportWorkspaceForm',
mixins: [form],
data() {
return {
values: {
only_structure: false,
application_ids: [],
},
}
},
validations() {
return {
values: {
only_structure: {
required,
},
},
}
},
}
</script>

View file

@ -0,0 +1,145 @@
<template>
<Modal>
<h2 class="box__title">
{{ $t('exportWorkspaceModal.title') }} {{ workspace.name }}
</h2>
<p>
{{ $t('exportWorkspaceModal.description') }}
</p>
<component
:is="component"
v-for="(component, index) in workspaceExportModalAlertComponents"
:key="index"
></component>
<Error :error="error"></Error>
<div class="export-workspace-modal">
<ExportWorkspaceForm ref="form" @submitted="submitted">
<template v-if="jobIsRunning || jobHasSucceeded" #settings>
<ProgressBar
:value="job.progress_percentage"
:status="jobHumanReadableState"
/>
</template>
<template #default>
<Button
v-if="!loading && !finished"
size="large"
:loading="loading"
:disabled="loading"
>
{{ $t('exportWorkspaceModal.export') }}
</Button>
<Button
v-if="loading && !finished"
type="secondary"
tag="a"
size="large"
@click="reset()"
>
{{ $t('exportWorkspaceModal.cancel') }}</Button
>
<DownloadLink
v-if="!loading && finished"
class="button button--large button--full-width modal-progress__export-button"
:url="job.url"
:filename="job.exported_file_name"
:loading-class="'button--loading'"
>
{{ $t('exportTableLoadingBar.download') }}
</DownloadLink>
</template>
</ExportWorkspaceForm>
</div>
<template #actions> </template>
</Modal>
</template>
<script>
import modal from '@baserow/modules/core/mixins/modal'
import error from '@baserow/modules/core/mixins/error'
import SnapshotListItem from '@baserow/modules/core/components/snapshots/SnapshotListItem'
import WorkspaceService from '@baserow/modules/core/services/workspace'
import jobProgress from '@baserow/modules/core/mixins/jobProgress'
import ExportWorkspaceForm from '@baserow/modules/core/components/export/ExportWorkspaceForm'
import { notifyIf } from '@baserow/modules/core/utils/error'
export default {
name: 'ExportWorkspaceModal',
components: {
ExportWorkspaceForm,
SnapshotListItem,
},
mixins: [modal, error, jobProgress],
props: {
workspace: {
type: Object,
required: true,
},
},
data() {
return {
job: null,
loading: false,
finished: false,
}
},
computed: {
workspaceExportModalAlertComponents() {
return Object.values(this.$registry.getAll('plugin'))
.map((plugin) =>
plugin.getExtraExportWorkspaceModalComponents(this.workspace)
)
.filter((component) => component !== null)
},
},
beforeDestroy() {
this.stopPollIfRunning()
},
methods: {
show(...args) {
modal.methods.show.bind(this)(...args)
this.reset()
},
async submitted(values) {
this.loading = true
this.hideError()
try {
const { data } = await WorkspaceService(
this.$client
).exportApplications(this.workspace.id, values)
this.startJobPoller(data)
} catch (error) {
this.loading = false
this.handleError(error)
}
},
// eslint-disable-next-line require-await
async onJobDone() {
this.loading = false
this.finished = true
},
// eslint-disable-next-line require-await
async onJobFailed() {
this.loading = false
this.showError(
this.$t('clientHandler.notCompletedTitle'),
this.job.human_readable_error
)
},
// eslint-disable-next-line require-await
async onJobPollingError(error) {
this.loading = false
notifyIf(error)
},
reset() {
this.stopPollIfRunning()
this.job = null
this.finished = false
this.loading = false
this.hideError()
},
},
}
</script>

View file

@ -13,6 +13,18 @@
class="loading margin-left-2 margin-top-2 margin-bottom-2 margin-bottom-2"
></div>
<ul v-else class="context__menu">
<li
v-if="
$hasPermission('workspace.read', workspace, workspace.id) &&
$featureFlagIsEnabled(FF_EXPORT_WORKSPACE)
"
class="context__menu-item"
>
<a class="context__menu-item-link" @click="openExportData">
<i class="context__menu-item-icon iconoir-arrow-up-circle"></i>
{{ $t('workspaceContext.exportWorkspace') }}
</a>
</li>
<li
v-if="$hasPermission('workspace.update', workspace, workspace.id)"
class="context__menu-item"
@ -95,6 +107,12 @@
:initial-workspace="workspace"
>
</TrashModal>
<ExportWorkspaceModal
v-if="$hasPermission('workspace.read', workspace, workspace.id)"
ref="exportWorkspaceModal"
:workspace="workspace"
>
</ExportWorkspaceModal>
<LeaveWorkspaceModal
ref="leaveWorkspaceModal"
:workspace="workspace"
@ -110,13 +128,20 @@
<script>
import context from '@baserow/modules/core/mixins/context'
import { notifyIf } from '@baserow/modules/core/utils/error'
import ExportWorkspaceModal from '@baserow/modules/core/components/export/ExportWorkspaceModal.vue'
import TrashModal from '@baserow/modules/core/components/trash/TrashModal'
import LeaveWorkspaceModal from '@baserow/modules/core/components/workspace/LeaveWorkspaceModal'
import WorkspaceSettingsModal from '@baserow/modules/core/components/workspace/WorkspaceSettingsModal'
import { FF_EXPORT_WORKSPACE } from '@baserow/modules/core/plugins/featureFlags'
export default {
name: 'WorkspaceContext',
components: { LeaveWorkspaceModal, TrashModal, WorkspaceSettingsModal },
components: {
ExportWorkspaceModal,
LeaveWorkspaceModal,
TrashModal,
WorkspaceSettingsModal,
},
mixins: [context],
props: {
workspace: {
@ -127,6 +152,7 @@ export default {
data() {
return {
loading: false,
FF_EXPORT_WORKSPACE,
}
},
methods: {
@ -138,6 +164,10 @@ export default {
this.$refs.context.hide()
this.$refs.workspaceTrashModal.show()
},
openExportData() {
this.$refs.context.hide()
this.$refs.exportWorkspaceModal.show()
},
async deleteWorkspace() {
this.loading = true

View file

@ -127,6 +127,13 @@
"message": "Are you sure you want to leave the workspace {workspace}? You won't be able to access the related applications anymore and if you want to regain access, one of the admins must invite you again. If you leave the workspace, it will not be deleted. All the other members will still have access to it. It is not possible to leave a workspace if you're the last admin because that will leave it unmaintained.",
"leave": "Leave workspace"
},
"exportWorkspaceModal": {
"title": "Export",
"description": "Your data will be exported as a ZIP file, which can be imported into other Baserow instance.",
"exportSettings": "Export settings",
"export": "Export data",
"cancel": "Cancel"
},
"dashboardWorkspace": {
"createApplication": "Create new"
},
@ -165,6 +172,7 @@
},
"workspaceContext": {
"renameWorkspace": "Rename workspace",
"exportWorkspace": "Export data",
"settings": "Settings",
"members": "Members",
"auditLog": "Audit log",

View file

@ -142,6 +142,14 @@ export class BaserowPlugin extends Registerable {
return null
}
/**
* If set, `getExtraExportWorkspaceModalComponents` will allow plugins to decide what kind of
* copy is shown in the export workspace modal's Alert box.
*/
getExtraExportWorkspaceModalComponents(workspace) {
return null
}
/**
* Some features are optionally enabled, this function will be called when the
* $hasFeature directive is called on each plugin to check if any of the plugins

View file

@ -74,6 +74,9 @@ export default (client) => {
createInitialWorkspace(values) {
return client.post('/workspaces/create-initial-workspace/', values)
},
exportApplications(workspaceId, values) {
return client.post(`/workspaces/${workspaceId}/export/async/`, values)
},
}
)
}