mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-15 01:28:30 +00:00
Merge branch '3184-improve-the-export-progress-update' into 'develop'
Improve the export progress update Closes #3184 See merge request baserow/baserow!2859
This commit is contained in:
commit
aa20310a0e
14 changed files with 160 additions and 58 deletions
backend
src/baserow
contrib
core
tests/baserow/core
changelog/entries/unreleased/feature
premium/web-frontend/modules/baserow_premium/components/field
web-frontend/modules/core
|
@ -141,6 +141,7 @@ class BuilderApplicationType(ApplicationType):
|
|||
import_export_config: ImportExportConfig,
|
||||
files_zip: Optional[ExportZipFile] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
) -> BuilderDict:
|
||||
"""
|
||||
Exports the builder application type to a serialized format that can later
|
||||
|
@ -205,6 +206,7 @@ class BuilderApplicationType(ApplicationType):
|
|||
import_export_config,
|
||||
files_zip=files_zip,
|
||||
storage=storage,
|
||||
progress_builder=progress_builder,
|
||||
)
|
||||
|
||||
serialized_login_page = None
|
||||
|
|
|
@ -52,6 +52,7 @@ class DashboardApplicationType(ApplicationType):
|
|||
import_export_config: ImportExportConfig,
|
||||
files_zip: ExportZipFile | None = None,
|
||||
storage: Storage | None = None,
|
||||
progress_builder: ChildProgressBuilder | None = None,
|
||||
) -> DashboardDict:
|
||||
"""
|
||||
Exports the dashboard application type to a serialized format that can later
|
||||
|
@ -93,6 +94,7 @@ class DashboardApplicationType(ApplicationType):
|
|||
import_export_config,
|
||||
files_zip=files_zip,
|
||||
storage=storage,
|
||||
progress_builder=progress_builder,
|
||||
)
|
||||
|
||||
return DashboardDict(
|
||||
|
|
|
@ -35,7 +35,11 @@ from baserow.core.storage import ExportZipFile
|
|||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.utils import ChildProgressBuilder, grouper
|
||||
|
||||
from .constants import IMPORT_SERIALIZED_IMPORTING, IMPORT_SERIALIZED_IMPORTING_TABLE
|
||||
from .constants import (
|
||||
EXPORT_SERIALIZED_EXPORTING_TABLE,
|
||||
IMPORT_SERIALIZED_IMPORTING,
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE,
|
||||
)
|
||||
from .data_sync.registries import data_sync_type_registry
|
||||
from .db.atomic import read_repeatable_single_database_atomic_transaction
|
||||
from .export_serialized import DatabaseExportSerializedStructure
|
||||
|
@ -96,13 +100,17 @@ class DatabaseApplicationType(ApplicationType):
|
|||
import_export_config: ImportExportConfig,
|
||||
files_zip: Optional[ExportZipFile] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Exports the tables provided to a serialized format that can later be
|
||||
Exports the tables provided to a serialized format that can later
|
||||
be imported via the `import_tables_serialized`.
|
||||
"""
|
||||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=len(tables))
|
||||
|
||||
serialized_tables: List[Dict[str, Any]] = []
|
||||
|
||||
for table in tables:
|
||||
fields = table.field_set.all()
|
||||
serialized_fields = []
|
||||
|
@ -125,9 +133,12 @@ class DatabaseApplicationType(ApplicationType):
|
|||
|
||||
serialized_rows = []
|
||||
row_count_limit = settings.BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT
|
||||
if not import_export_config.only_structure:
|
||||
export_all_table_rows = not import_export_config.only_structure
|
||||
if export_all_table_rows:
|
||||
model = table.get_model(fields=fields, add_dependencies=False)
|
||||
row_queryset = model.objects.all()[: row_count_limit or None]
|
||||
|
||||
row_progress = progress.create_child(1, row_queryset.count())
|
||||
if table.created_by_column_added:
|
||||
row_queryset = row_queryset.select_related("created_by")
|
||||
if table.last_modified_by_column_added:
|
||||
|
@ -150,6 +161,11 @@ class DatabaseApplicationType(ApplicationType):
|
|||
row, field_name, table_cache, files_zip, storage
|
||||
)
|
||||
serialized_rows.append(serialized_row)
|
||||
row_progress.increment(
|
||||
state=EXPORT_SERIALIZED_EXPORTING_TABLE + str(table.name)
|
||||
)
|
||||
else:
|
||||
progress.increment()
|
||||
|
||||
serialized_data_sync = None
|
||||
if hasattr(table, "data_sync"):
|
||||
|
@ -174,6 +190,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
if extra_data is not None:
|
||||
structure.update(**extra_data)
|
||||
serialized_tables.append(structure)
|
||||
|
||||
return serialized_tables
|
||||
|
||||
def export_serialized(
|
||||
|
@ -182,6 +199,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
import_export_config: ImportExportConfig,
|
||||
files_zip: Optional[ExportZipFile] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Exports the database application type to a serialized format that can later
|
||||
|
@ -210,7 +228,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
)
|
||||
|
||||
serialized_tables = self.export_tables_serialized(
|
||||
tables, import_export_config, files_zip, storage
|
||||
tables, import_export_config, files_zip, storage, progress_builder
|
||||
)
|
||||
|
||||
serialized = super().export_serialized(
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
IMPORT_SERIALIZED_IMPORTING = "importing"
|
||||
IMPORT_SERIALIZED_IMPORTING_TABLE = "importing-table-"
|
||||
EXPORT_SERIALIZED_EXPORTING = "exporting"
|
||||
EXPORT_SERIALIZED_EXPORTING_TABLE = "exporting-table-"
|
||||
EXPORT_WORKSPACE_CREATE_ARCHIVE = "create-archive"
|
||||
|
|
|
@ -30,6 +30,7 @@ from loguru import logger
|
|||
from opentelemetry import trace
|
||||
|
||||
from baserow.config.settings.base import BASEROW_DEFAULT_ZIP_COMPRESS_LEVEL
|
||||
from baserow.contrib.database.constants import EXPORT_WORKSPACE_CREATE_ARCHIVE
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.import_export.exceptions import (
|
||||
ImportExportResourceDoesNotExist,
|
||||
|
@ -196,7 +197,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
zip_file: ExportZipFile,
|
||||
import_export_config: ImportExportConfig,
|
||||
storage: Storage,
|
||||
progress: Progress,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
) -> Dict:
|
||||
"""
|
||||
Exports a single application (structure, content and assets) to a zip file.
|
||||
|
@ -206,7 +207,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
:param import_export_config: provides configuration options for the
|
||||
import/export process to customize how it works.
|
||||
:param storage: The storage where the export will be stored.
|
||||
:param progress: Progress instance that allows tracking of the export progress.
|
||||
:param progress_builder: A progress builder that allows for publishing progress.
|
||||
:return: The exported and serialized application.
|
||||
"""
|
||||
|
||||
|
@ -218,7 +219,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
with application_type.export_safe_transaction_context(application):
|
||||
exported_application = application_type.export_serialized(
|
||||
application, import_export_config, zip_file, storage
|
||||
application, import_export_config, zip_file, storage, progress_builder
|
||||
)
|
||||
|
||||
data_file_content = json.dumps(exported_application, indent=INDENT)
|
||||
|
@ -234,10 +235,6 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
"total_files": 1,
|
||||
"files": {"schema": base_schema_path},
|
||||
}
|
||||
|
||||
# TODO: Pass the progress instance to the export_serialized method of the
|
||||
# application type and handle a more granular progress tracking there.
|
||||
progress.increment()
|
||||
return application_data
|
||||
|
||||
def export_multiple_applications(
|
||||
|
@ -246,7 +243,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
zip_file: ExportZipFile,
|
||||
import_export_config: ImportExportConfig,
|
||||
storage: Storage,
|
||||
progress: Progress,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Exports multiple applications (structure, content, and assets) to a zip file.
|
||||
|
@ -257,15 +254,17 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
:param import_export_config: Configuration options for the import/export
|
||||
process.
|
||||
:param storage: The storage instance where the export will be stored.
|
||||
:param progress: Progress instance to track the export progress.
|
||||
:param progress_builder: A progress builder that allows for publishing progress.
|
||||
:return: A list of dictionaries representing the exported applications.
|
||||
"""
|
||||
|
||||
exported_applications = []
|
||||
progress = ChildProgressBuilder.build(progress_builder, len(applications))
|
||||
|
||||
for app in applications:
|
||||
child_builder = progress.create_child_builder(represents_progress=1)
|
||||
exported_application = self.export_application(
|
||||
app, zip_file, import_export_config, storage, progress
|
||||
app, zip_file, import_export_config, storage, child_builder
|
||||
)
|
||||
exported_applications.append(exported_application)
|
||||
return exported_applications
|
||||
|
@ -483,9 +482,6 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
storage = storage or get_default_storage()
|
||||
applications = applications or []
|
||||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=100)
|
||||
export_app_progress = progress.create_child(90, len(applications))
|
||||
|
||||
export_file_path = self.get_export_storage_path(file_name)
|
||||
|
||||
zip_file = ExportZipFile(
|
||||
|
@ -493,12 +489,13 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
compress_type=zipstream.ZIP_DEFLATED,
|
||||
)
|
||||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=100)
|
||||
exported_applications = self.export_multiple_applications(
|
||||
applications,
|
||||
zip_file,
|
||||
import_export_config,
|
||||
storage,
|
||||
export_app_progress,
|
||||
progress.create_child_builder(represents_progress=90),
|
||||
)
|
||||
|
||||
manifest_data = self.create_manifest(
|
||||
|
@ -507,14 +504,13 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
self.create_manifest_signature(manifest_data, zip_file)
|
||||
|
||||
progress.set_progress(90, state=EXPORT_WORKSPACE_CREATE_ARCHIVE)
|
||||
with _create_storage_dir_if_missing_and_open(
|
||||
export_file_path, storage
|
||||
) as files_buffer:
|
||||
for chunk in zip_file:
|
||||
files_buffer.write(chunk)
|
||||
|
||||
progress.increment(by=8)
|
||||
|
||||
with storage.open(export_file_path, "rb") as zip_file_handle:
|
||||
with ZipFile(zip_file_handle, "r") as zip_file:
|
||||
self.validate_manifest(zip_file)
|
||||
|
@ -523,7 +519,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
resource.is_valid = True
|
||||
resource.save()
|
||||
|
||||
progress.increment(by=2)
|
||||
progress.set_progress(100)
|
||||
return resource
|
||||
|
||||
def list_exports(self, performed_by: AbstractUser, workspace_id: int) -> QuerySet:
|
||||
|
@ -819,8 +815,8 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
id_mapping,
|
||||
zip_file,
|
||||
storage,
|
||||
progress_builder=progress.create_child_builder(represents_progress=1),
|
||||
)
|
||||
progress.increment()
|
||||
return imported_application
|
||||
|
||||
def import_multiple_applications(
|
||||
|
@ -965,6 +961,8 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
if storage.exists(import_tmp_path):
|
||||
self.clean_storage(import_tmp_path, storage)
|
||||
|
||||
progress.set_progress(2)
|
||||
|
||||
if not storage.exists(import_file_path):
|
||||
raise ImportExportResourceDoesNotExist(
|
||||
f"The file {import_file_path} does not exist."
|
||||
|
@ -975,7 +973,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
f"The file {import_file_path} is invalid or corrupted."
|
||||
)
|
||||
|
||||
progress.increment(by=5)
|
||||
progress.set_progress(5)
|
||||
|
||||
with storage.open(import_file_path, "rb") as zip_file_handle:
|
||||
with ZipFile(zip_file_handle, "r") as zip_file:
|
||||
|
@ -987,6 +985,8 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
self.extract_files_from_zip(import_tmp_path, zip_file, storage)
|
||||
|
||||
progress.set_progress(15)
|
||||
|
||||
try:
|
||||
self.validate_checksums(manifest_data, import_tmp_path, storage)
|
||||
except Exception as e: # noqa
|
||||
|
@ -1026,9 +1026,10 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
type_name=application_type.type,
|
||||
)
|
||||
|
||||
progress.set_progress(95)
|
||||
self.clean_storage(import_tmp_path, storage)
|
||||
self.clean_storage(import_file_path, storage)
|
||||
progress.increment(by=95)
|
||||
progress.set_progress(100)
|
||||
|
||||
return imported_applications
|
||||
|
||||
|
|
|
@ -404,6 +404,7 @@ class ApplicationType(
|
|||
import_export_config: ImportExportConfig,
|
||||
files_zip: Optional[ExportZipFile] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
):
|
||||
"""
|
||||
Exports the application to a serialized dict that can be imported by the
|
||||
|
@ -418,10 +419,14 @@ class ApplicationType(
|
|||
:type storage: Storage or None
|
||||
:param import_export_config: provides configuration options for the
|
||||
import/export process to customize how it works.
|
||||
:param progress_builder: If provided will be used to build a child progress bar
|
||||
and report on this methods progress to the parent of the progress_builder.
|
||||
:return: The exported and serialized application.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
progress = ChildProgressBuilder.build(progress_builder, child_total=1)
|
||||
|
||||
structure = CoreExportSerializedStructure.application(
|
||||
id=application.id,
|
||||
name=application.name,
|
||||
|
@ -432,6 +437,7 @@ class ApplicationType(
|
|||
structure = self.export_serialized_structure_with_registry(
|
||||
application.get_root(), application, structure, import_export_config
|
||||
)
|
||||
progress.increment()
|
||||
return structure
|
||||
|
||||
def import_serialized(
|
||||
|
|
|
@ -741,14 +741,20 @@ class Progress:
|
|||
"""
|
||||
|
||||
self.total = total
|
||||
self.progress = 0
|
||||
self._progress = 0
|
||||
self._last_progress = 0
|
||||
self._last_state = None
|
||||
self.updated_events = []
|
||||
self.parent = parent
|
||||
self.represents_progress = represents_progress
|
||||
self.last_parent_progress = 0
|
||||
|
||||
@property
|
||||
def progress(self):
|
||||
return math.ceil(self._progress)
|
||||
|
||||
def reset_with_total(self, total):
|
||||
self.progress = 0
|
||||
self._progress = self._last_progress = 0
|
||||
self.total = total
|
||||
|
||||
def register_updated_event(self, event):
|
||||
|
@ -773,7 +779,7 @@ class Progress:
|
|||
"Downloading files."
|
||||
"""
|
||||
|
||||
self.set_progress(self.progress + by, state)
|
||||
self.set_progress(self._progress + by, state)
|
||||
|
||||
def set_progress(self, progress: int, state: Optional[str] = None):
|
||||
"""
|
||||
|
@ -785,23 +791,35 @@ class Progress:
|
|||
"Downloading files."
|
||||
"""
|
||||
|
||||
self.progress = progress
|
||||
if self.total == 0:
|
||||
return
|
||||
|
||||
new_progress = min(progress, self.total)
|
||||
new_progress_ratio = Decimal(new_progress) / self.total
|
||||
new_progress_perc = math.ceil(new_progress_ratio * 100)
|
||||
|
||||
last_progress_ratio = Decimal(self._progress) / self.total
|
||||
last_progress_perc = math.ceil(last_progress_ratio * 100)
|
||||
|
||||
last_progress = self._progress
|
||||
last_state = self._last_state
|
||||
|
||||
self._progress = new_progress
|
||||
|
||||
if self.parent is not None:
|
||||
if self.progress >= self.total:
|
||||
new_parent_progress = self.represents_progress
|
||||
else:
|
||||
new_parent_progress = math.ceil(
|
||||
(Decimal(self.progress) / self.total) * self.represents_progress
|
||||
)
|
||||
diff = new_parent_progress - self.last_parent_progress
|
||||
self.last_parent_progress = new_parent_progress
|
||||
if diff != 0:
|
||||
last_parent_progress = self.last_parent_progress
|
||||
new_parent_progress = new_progress_ratio * self.represents_progress
|
||||
diff = new_parent_progress - last_parent_progress
|
||||
if diff > 0 or state != last_state:
|
||||
self.last_parent_progress = new_parent_progress
|
||||
self.parent.increment(diff, state)
|
||||
|
||||
percentage = math.ceil(Decimal(self.progress) / self.total * 100)
|
||||
for event in self.updated_events:
|
||||
event(percentage, state)
|
||||
# Run all the callbacks only if something has changed.
|
||||
if new_progress_perc > last_progress_perc or state != last_state:
|
||||
self._last_progress = last_progress
|
||||
self._last_state = state
|
||||
for event in self.updated_events:
|
||||
event(new_progress_perc, state)
|
||||
|
||||
def create_child(self, represents_progress: int, total: int):
|
||||
"""
|
||||
|
@ -820,7 +838,9 @@ class Progress:
|
|||
parent=self, represents_progress=represents_progress, total=total
|
||||
)
|
||||
|
||||
if child_progress.progress >= child_progress.total:
|
||||
# If the total is 0, we can just increment the parent progress by the
|
||||
# represents progress because the child progress will never increment.
|
||||
if total == 0:
|
||||
self.increment(represents_progress)
|
||||
|
||||
return child_progress
|
||||
|
|
|
@ -269,62 +269,77 @@ def test_nested_progress():
|
|||
assert args[0][0] == 20
|
||||
assert args[0][1] is None
|
||||
|
||||
sub_progress_2 = progress.create_child(20, 5 * 120)
|
||||
sub_progress_2 = progress.create_child(20, 5 * 100)
|
||||
for i in range(0, 5):
|
||||
for i2 in range(0, 100):
|
||||
for i2 in range(0, 75):
|
||||
sub_progress_2.increment()
|
||||
sub_progress_2.increment(by=20, state="Sub progress 2 second")
|
||||
sub_progress_2.increment(by=25, state="Sub progress 2 second")
|
||||
|
||||
assert mock_event.call_count == 21
|
||||
args = mock_event.call_args
|
||||
# called only once everytime the percentange or the state change
|
||||
assert [arg[0][0] for arg in mock_event.call_args_list] == list(range(20, 41))
|
||||
assert args[0][0] == 40
|
||||
assert args[0][1] is None
|
||||
assert args[0][1] == "Sub progress 2 second"
|
||||
|
||||
sub_progress_3_builder = progress.create_child_builder(40)
|
||||
sub_progress_3 = ChildProgressBuilder.build(sub_progress_3_builder, 100)
|
||||
|
||||
# 10% of 40% -> 4%
|
||||
sub_progress_3_1 = sub_progress_3.create_child(10, 4)
|
||||
sub_progress_3_1.increment(by=2)
|
||||
sub_progress_3_1.increment()
|
||||
sub_progress_3_1.increment()
|
||||
|
||||
assert mock_event.call_count == 24
|
||||
assert [arg[0][0] for arg in mock_event.call_args_list[-3:]] == [42, 43, 44]
|
||||
args = mock_event.call_args
|
||||
assert args[0][0] == 44
|
||||
assert args[0][1] is None
|
||||
|
||||
# 10% of 40% -> 4%
|
||||
sub_progress_3_2 = sub_progress_3.create_child(10, 11)
|
||||
for i in range(0, 11):
|
||||
sub_progress_3_2.increment()
|
||||
|
||||
args = mock_event.call_args
|
||||
assert mock_event.call_count == 28
|
||||
assert [arg[0][0] for arg in mock_event.call_args_list[-4:]] == [45, 46, 47, 48]
|
||||
assert args[0][0] == 48
|
||||
assert args[0][1] is None
|
||||
|
||||
# 10% of 40% -> 4%
|
||||
sub_progress_3.create_child(10, 0)
|
||||
assert mock_event.call_count == 29
|
||||
args = mock_event.call_args
|
||||
assert args[0][0] == 52
|
||||
assert args[0][1] is None
|
||||
|
||||
# 10% of 40% -> 4%
|
||||
sub_progress_3_4_builder = sub_progress_3.create_child_builder(10)
|
||||
ChildProgressBuilder.build(sub_progress_3_4_builder, 0)
|
||||
assert mock_event.call_count == 30
|
||||
args = mock_event.call_args
|
||||
assert args[0][0] == 56
|
||||
assert args[0][1] is None
|
||||
|
||||
sub_progress_3_5 = sub_progress_3.create_child(55, 5 * 120)
|
||||
# 55% of 40% -> 22%
|
||||
sub_progress_3_5 = sub_progress_3.create_child(55, 5 * 100)
|
||||
for i in range(0, 5):
|
||||
sub_progress_3_5_1 = sub_progress_3_5.create_child(100, 100)
|
||||
sub_progress_3_5_1 = sub_progress_3_5.create_child(75, 100)
|
||||
for i2 in range(0, 100):
|
||||
sub_progress_3_5_1.increment()
|
||||
sub_progress_3_5.increment(20)
|
||||
sub_progress_3_5.increment(25)
|
||||
|
||||
args = mock_event.call_args
|
||||
assert args[0][0] == 78
|
||||
assert args[0][1] is None
|
||||
|
||||
# 5% of 40% -> 2%
|
||||
sub_progress_3_6 = sub_progress_3.create_child(5, 1)
|
||||
sub_progress_3_6.increment()
|
||||
|
||||
assert mock_event.call_count == 52
|
||||
assert mock_event.call_count == 53
|
||||
args = mock_event.call_args
|
||||
assert args[0][0] == 80
|
||||
assert args[0][1] is None
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "feature",
|
||||
"message": "Improve the export progress update",
|
||||
"issue_number": 3184,
|
||||
"bullet_points": [],
|
||||
"created_at": "2024-11-20"
|
||||
}
|
|
@ -48,11 +48,11 @@
|
|||
:fixed-items="true"
|
||||
>
|
||||
<DropdownItem
|
||||
v-for="outputType in outputTypes"
|
||||
:key="outputType.getType()"
|
||||
:name="outputType.getName()"
|
||||
:value="outputType.getType()"
|
||||
:description="outputType.getDescription()"
|
||||
v-for="outputTypeItem in outputTypes"
|
||||
:key="outputTypeItem.getType()"
|
||||
:name="outputTypeItem.getName()"
|
||||
:value="outputTypeItem.getType()"
|
||||
:description="outputTypeItem.getDescription()"
|
||||
/>
|
||||
</Dropdown>
|
||||
<template v-if="changedOutputType" #warning>
|
||||
|
|
|
@ -71,6 +71,11 @@ import job from '@baserow/modules/core/mixins/job'
|
|||
import ExportWorkspaceForm from '@baserow/modules/core/components/export/ExportWorkspaceForm'
|
||||
import { ExportApplicationsJobType } from '@baserow/modules/core/jobTypes'
|
||||
import ExportWorkspaceListItem from '@baserow/modules/core/components/export/ExportWorkspaceListItem.vue'
|
||||
import {
|
||||
EXPORT_SERIALIZED_EXPORTING,
|
||||
EXPORT_SERIALIZED_EXPORTING_TABLE,
|
||||
EXPORT_WORKSPACE_CREATE_ARCHIVE,
|
||||
} from '@baserow/modules/core/constants'
|
||||
|
||||
const WORKSPACE_EXPORTS_LIMIT = 5
|
||||
|
||||
|
@ -182,8 +187,16 @@ export default {
|
|||
},
|
||||
|
||||
getCustomHumanReadableJobState(jobState) {
|
||||
if (jobState.startsWith('importing')) {
|
||||
return this.$t('exportWorkspaceModal.importingState')
|
||||
if (jobState === EXPORT_SERIALIZED_EXPORTING) {
|
||||
return this.$t('exportWorkspaceModal.exportingState')
|
||||
}
|
||||
if (jobState.startsWith(EXPORT_SERIALIZED_EXPORTING_TABLE)) {
|
||||
return this.$t('exportWorkspaceModal.exportingTableState', {
|
||||
table: jobState.replace(EXPORT_SERIALIZED_EXPORTING_TABLE, ''),
|
||||
})
|
||||
}
|
||||
if (jobState === EXPORT_WORKSPACE_CREATE_ARCHIVE) {
|
||||
return this.$t('exportWorkspaceModal.exportingCreateArchiveState')
|
||||
}
|
||||
return ''
|
||||
},
|
||||
|
|
|
@ -111,6 +111,7 @@ import ImportWorkspaceForm from '@baserow/modules/core/components/import/ImportW
|
|||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
import { ImportApplicationsJobType } from '@baserow/modules/core/jobTypes'
|
||||
import { ResponseErrorMessage } from '@baserow/modules/core/plugins/clientHandler'
|
||||
import { IMPORT_SERIALIZED_IMPORTING } from '@baserow/modules/core/constants'
|
||||
|
||||
const STAGES = {
|
||||
UPLOAD: 'upload',
|
||||
|
@ -290,6 +291,13 @@ export default {
|
|||
this.handleError(error)
|
||||
}
|
||||
},
|
||||
|
||||
getCustomHumanReadableJobState(jobState) {
|
||||
if (jobState.startsWith(IMPORT_SERIALIZED_IMPORTING)) {
|
||||
return this.$t('importWorkspaceModal.importingState')
|
||||
}
|
||||
return ''
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
4
web-frontend/modules/core/constants.js
Normal file
4
web-frontend/modules/core/constants.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
export const IMPORT_SERIALIZED_IMPORTING = 'importing'
|
||||
export const EXPORT_SERIALIZED_EXPORTING = 'exporting'
|
||||
export const EXPORT_SERIALIZED_EXPORTING_TABLE = 'exporting-table-'
|
||||
export const EXPORT_WORKSPACE_CREATE_ARCHIVE = 'create-archive'
|
|
@ -135,7 +135,9 @@
|
|||
"reset": "Start new",
|
||||
"cancel": "Cancel",
|
||||
"download": "Download",
|
||||
"importingState": "Importing",
|
||||
"exportingState": "Exporting",
|
||||
"exportingTableState": "Exporting table: {table}",
|
||||
"exportingCreateArchiveState": "Creating archive",
|
||||
"noExports": "No exports for this workspace yet.",
|
||||
"created": "created"
|
||||
},
|
||||
|
@ -166,7 +168,8 @@
|
|||
"invalidResourceTitle": "Invalid resource",
|
||||
"invalidResourceMessage": "The provided file is not a valid Baserow export.",
|
||||
"untrustedPublicKeyTitle": "Untrusted signature",
|
||||
"untrustedPublicKeyMessage": "The provided file is signed with an untrusted public key. Ask your administrator to add the public key to the list of trusted keys or disable the signature verification to be able to import this file."
|
||||
"untrustedPublicKeyMessage": "The provided file is signed with an untrusted public key. Ask your administrator to add the public key to the list of trusted keys or disable the signature verification to be able to import this file.",
|
||||
"importingState": "Importing"
|
||||
},
|
||||
"dashboardWorkspace": {
|
||||
"createApplication": "Create new"
|
||||
|
|
Loading…
Add table
Reference in a new issue