1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-04 21:25:24 +00:00

Fix import progress bar percentage

This commit is contained in:
Davide Silvestri 2025-03-10 09:38:02 +01:00
parent e93ca5dd67
commit b84e9494e9
9 changed files with 107 additions and 42 deletions
backend/src/baserow
contrib/database
core
import_export
jobs
changelog/entries/unreleased/bug
web-frontend/modules/core

View file

@ -33,12 +33,12 @@ from baserow.core.registries import (
)
from baserow.core.storage import ExportZipFile
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import ChildProgressBuilder, grouper
from baserow.core.utils import ChildProgressBuilder, Progress, grouper
from .constants import (
EXPORT_SERIALIZED_EXPORTING_TABLE,
IMPORT_SERIALIZED_IMPORTING,
IMPORT_SERIALIZED_IMPORTING_TABLE,
IMPORT_SERIALIZED_IMPORTING_TABLE_DATA,
IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE,
)
from .data_sync.registries import data_sync_type_registry
from .db.atomic import read_repeatable_single_database_atomic_transaction
@ -293,7 +293,7 @@ class DatabaseApplicationType(ApplicationType):
import_export_config: ImportExportConfig,
external_table_fields_to_import: List[Tuple[Table, Dict[str, Any]]],
deferred_fk_update_collector: DeferredForeignKeyUpdater,
progress: ChildProgressBuilder,
progress: Progress,
) -> ImportedFields:
"""
Import the fields from the serialized data in the correct order based on their
@ -308,6 +308,10 @@ class DatabaseApplicationType(ApplicationType):
also be imported. These fields will be imported into the existing table
provided in the first item in the tuple, the second being the serialized
field to import.
:param deferred_fk_update_collector: A collector that collects all the foreign
keys to update them later when the model with all the fields is created.
:param progress: A progress used to report progress of the import.
:return: The imported fields.
"""
field_cache = FieldCache()
@ -348,7 +352,10 @@ class DatabaseApplicationType(ApplicationType):
if table_instance not in table_fields_by_name:
table_fields_by_name[table_instance] = {}
table_fields_by_name[table_instance][field_instance.name] = field_instance
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
table_name = serialized_table["name"]
progress.increment(
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{table_name}"
)
return field_instance
fields_without_dependencies: List[Field] = []
@ -404,7 +411,7 @@ class DatabaseApplicationType(ApplicationType):
deferred_fk_update_collector,
)
SearchHandler.after_field_created(external_field)
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
progress.increment()
deferred_fk_update_collector.run_deferred_fk_updates(
id_mapping, "database_fields"
@ -537,7 +544,9 @@ class DatabaseApplicationType(ApplicationType):
self._create_table_schema(
serialized_table, already_created_through_table_names
)
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
progress.increment(
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{serialized_table['name']}"
)
# Now that everything is in place we can start filling the table with the rows
# in an efficient matter by using the bulk_create functionality.
@ -594,9 +603,9 @@ class DatabaseApplicationType(ApplicationType):
user_email_mapping: Dict[str, Any],
deferred_fk_update_collector: DeferredForeignKeyUpdater,
id_mapping: Dict[str, Any],
files_zip: Optional[ZipFile] = None,
storage: Optional[Storage] = None,
progress: Optional[ChildProgressBuilder] = None,
files_zip: ZipFile | None,
storage: Storage | None,
progress: Progress,
):
"""
Imports the rows of a table from the serialized data in an efficient manner.
@ -610,7 +619,7 @@ class DatabaseApplicationType(ApplicationType):
imported files from
:param storage: An optional place to persist any user files if importing files
from a the above file_zip.
:param progress: A progress builder used to report progress of the import.
:param progress: A progress used to report progress of the import.
"""
table_cache: Dict[str, Any] = {}
@ -668,7 +677,7 @@ class DatabaseApplicationType(ApplicationType):
rows_to_be_inserted.append(row_instance)
progress.increment(
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE}{serialized_table['id']}"
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_DATA}{serialized_table['name']}"
)
# We want to insert the rows in bulk because there could potentially be
@ -678,7 +687,7 @@ class DatabaseApplicationType(ApplicationType):
table_model.objects.bulk_create(chunk, batch_size=512)
progress.increment(
len(chunk),
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE}{serialized_table['id']}",
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_DATA}{serialized_table['name']}",
)
# Every row import can have additional objects that must be inserted,
@ -811,7 +820,7 @@ class DatabaseApplicationType(ApplicationType):
field_type.after_rows_imported(
field, field_cache=imported_fields.field_cache
)
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
progress.increment()
def _create_table_schema(
self, serialized_table, already_created_through_table_names
@ -869,10 +878,13 @@ class DatabaseApplicationType(ApplicationType):
"""
table = serialized_table["_object"]
table_name = serialized_table["name"]
for serialized_view in serialized_table["views"]:
view_type = view_type_registry.get(serialized_view["type"])
view_type.import_serialized(table, serialized_view, id_mapping, files_zip)
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
progress.increment(
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{table_name}"
)
def _import_tables(
self,
@ -906,7 +918,10 @@ class DatabaseApplicationType(ApplicationType):
serialized_table["_object"] = table_instance
serialized_table["field_instances"] = []
imported_tables.append(table_instance)
progress.increment(state=IMPORT_SERIALIZED_IMPORTING)
table_name = serialized_table["name"]
progress.increment(
state=f"{IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE}{table_name}"
)
return imported_tables
@ -924,7 +939,7 @@ class DatabaseApplicationType(ApplicationType):
Imports a database application exported by the `export_serialized` method.
"""
database_progress, table_progress = 1, 99
database_progress, table_progress = 1, len(serialized_values["tables"])
progress = ChildProgressBuilder.build(
progress_builder, child_total=database_progress + table_progress
)
@ -940,10 +955,7 @@ class DatabaseApplicationType(ApplicationType):
)
database = application.specific
if not serialized_values["tables"]:
progress.increment(state=IMPORT_SERIALIZED_IMPORTING, by=table_progress)
else:
if serialized_values["tables"]:
self.import_tables_serialized(
database,
serialized_values["tables"],

View file

@ -1,5 +1,6 @@
IMPORT_SERIALIZED_IMPORTING = "importing"
IMPORT_SERIALIZED_IMPORTING_TABLE = "importing-table-"
IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE = "importing-table-structure-"
IMPORT_SERIALIZED_IMPORTING_TABLE_DATA = "importing-table-data-"
EXPORT_SERIALIZED_EXPORTING = "exporting"
EXPORT_SERIALIZED_EXPORTING_TABLE = "exporting-table-"
EXPORT_WORKSPACE_CREATE_ARCHIVE = "create-archive"

View file

@ -63,7 +63,7 @@ from baserow.core.user_files.exceptions import (
InvalidFileStreamError,
)
from baserow.core.user_files.handler import UserFileHandler
from baserow.core.utils import ChildProgressBuilder, Progress, grouper, stream_size
from baserow.core.utils import ChildProgressBuilder, grouper, stream_size
from baserow.version import VERSION
tracer = trace.get_tracer(__name__)
@ -780,7 +780,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
import_export_config: ImportExportConfig,
zip_file: ZipFile,
storage: Storage,
progress: Progress,
progress_builder: Optional[ChildProgressBuilder] = None,
) -> Application:
"""
Imports a single application into a workspace from the provided data.
@ -815,7 +815,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
id_mapping,
zip_file,
storage,
progress_builder=progress.create_child_builder(represents_progress=1),
progress_builder=progress_builder,
)
return imported_application
@ -828,7 +828,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
import_export_config: ImportExportConfig,
zip_file: ZipFile,
storage: Storage,
progress: Progress,
progress_builder: Optional[ChildProgressBuilder] = None,
) -> List[Application]:
"""
Imports multiple applications into a workspace from the provided application
@ -866,6 +866,14 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
manifest["applications"].keys(), key=application_priority_sort, reverse=True
)
application_count = sum(
len(manifest["applications"][application_type]["items"])
for application_type in prioritized_applications
)
progress = ChildProgressBuilder.build(
progress_builder, child_total=application_count
)
for application_type in prioritized_applications:
for application_manifest in manifest["applications"][application_type][
"items"
@ -880,7 +888,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
import_export_config,
zip_file,
storage,
progress,
progress.create_child_builder(represents_progress=1),
)
except Exception as exc: # noqa
# Trash the already imported applications so the user won't see
@ -898,7 +906,11 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
return imported_applications
def extract_files_from_zip(
self, tmp_import_path: str, zip_file: ZipFile, storage: Storage
self,
tmp_import_path: str,
zip_file: ZipFile,
storage: Storage,
progress_builder: Optional[ChildProgressBuilder] = None,
):
"""
Extracts files from a zip archive to a specified temporary import path.
@ -910,13 +922,20 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
extracted.
:param zip_file: The ZipFile instance containing the files to be extracted.
:param storage: The storage instance used to save the extracted files.
:param progress_builder: A progress builder that allows for publishing progress.
"""
for file_info in zip_file.infolist():
file_list = zip_file.infolist()
progress = ChildProgressBuilder.build(
progress_builder, child_total=len(file_list)
)
for file_info in file_list:
extracted_file_path = join(tmp_import_path, file_info.filename)
with zip_file.open(file_info) as extracted_file:
file_content = extracted_file.read()
storage.save(extracted_file_path, ContentFile(file_content))
progress.increment()
def import_workspace_applications(
self,
@ -983,9 +1002,12 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
self.mark_resource_invalid(resource)
raise
self.extract_files_from_zip(import_tmp_path, zip_file, storage)
progress.set_progress(15)
self.extract_files_from_zip(
import_tmp_path,
zip_file,
storage,
progress.create_child_builder(represents_progress=10),
)
try:
self.validate_checksums(manifest_data, import_tmp_path, storage)
@ -1011,7 +1033,7 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
import_export_config,
zip_file,
storage,
progress,
progress.create_child_builder(represents_progress=80),
)
for application in imported_applications:

View file

@ -1,4 +1,4 @@
from datetime import datetime
from datetime import datetime, timezone
from typing import Any
from django.contrib.auth import get_user_model
@ -113,7 +113,7 @@ class Job(CreatedAndUpdatedOnMixin, PolymorphicContentTypeMixin, models.Model):
progress = {
"progress_percentage": self.progress_percentage,
"state": self.state,
"updated_on": datetime.now(),
"updated_on": datetime.now(tz=timezone.utc),
}
cache.set(job_progress_key(self.id), progress, timeout=None)

View file

@ -0,0 +1,8 @@
{
"type": "bug",
"message": "Fix the progress bar percentage when importing applications into the workspace.",
"domain": "database",
"issue_number": null,
"bullet_points": [],
"created_at": "2025-03-07"
}

View file

@ -186,9 +186,6 @@ export default {
},
getCustomHumanReadableJobState(jobState) {
if (jobState === EXPORT_SERIALIZED_EXPORTING) {
return this.$t('exportWorkspaceModal.exportingState')
}
if (jobState.startsWith(EXPORT_SERIALIZED_EXPORTING_TABLE)) {
return this.$t('exportWorkspaceModal.exportingTableState', {
table: jobState.replace(EXPORT_SERIALIZED_EXPORTING_TABLE, ''),
@ -197,6 +194,9 @@ export default {
if (jobState === EXPORT_WORKSPACE_CREATE_ARCHIVE) {
return this.$t('exportWorkspaceModal.exportingCreateArchiveState')
}
if (jobState === EXPORT_SERIALIZED_EXPORTING) {
return this.$t('exportWorkspaceModal.exportingState')
}
return ''
},
},

View file

@ -111,7 +111,11 @@ import ImportWorkspaceForm from '@baserow/modules/core/components/import/ImportW
import { notifyIf } from '@baserow/modules/core/utils/error'
import { ImportApplicationsJobType } from '@baserow/modules/core/jobTypes'
import { ResponseErrorMessage } from '@baserow/modules/core/plugins/clientHandler'
import { IMPORT_SERIALIZED_IMPORTING } from '@baserow/modules/core/constants'
import {
IMPORT_SERIALIZED_IMPORTING,
IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE,
IMPORT_SERIALIZED_IMPORTING_TABLE_DATA,
} from '@baserow/modules/core/constants'
const STAGES = {
UPLOAD: 'upload',
@ -292,6 +296,19 @@ export default {
},
getCustomHumanReadableJobState(jobState) {
if (jobState.startsWith(IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE)) {
return this.$t('importWorkspaceModal.importingTableStructure', {
table: jobState.replace(
IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE,
''
),
})
}
if (jobState.startsWith(IMPORT_SERIALIZED_IMPORTING_TABLE_DATA)) {
return this.$t('importWorkspaceModal.importingTableData', {
table: jobState.replace(IMPORT_SERIALIZED_IMPORTING_TABLE_DATA, ''),
})
}
if (jobState.startsWith(IMPORT_SERIALIZED_IMPORTING)) {
return this.$t('importWorkspaceModal.importingState')
}

View file

@ -1,4 +1,7 @@
export const IMPORT_SERIALIZED_IMPORTING = 'importing'
export const IMPORT_SERIALIZED_IMPORTING_TABLE_STRUCTURE =
'importing-table-structure-'
export const IMPORT_SERIALIZED_IMPORTING_TABLE_DATA = 'importing-table-data-'
export const EXPORT_SERIALIZED_EXPORTING = 'exporting'
export const EXPORT_SERIALIZED_EXPORTING_TABLE = 'exporting-table-'
export const EXPORT_WORKSPACE_CREATE_ARCHIVE = 'create-archive'

View file

@ -135,8 +135,8 @@
"reset": "Start new",
"cancel": "Cancel",
"download": "Download",
"exportingState": "Exporting",
"exportingTableState": "Exporting table: {table}",
"exportingState": "Exporting...",
"exportingTableState": "Exporting: {table}",
"exportingCreateArchiveState": "Creating archive",
"noExports": "No exports for this workspace yet.",
"created": "created"
@ -169,7 +169,9 @@
"invalidResourceMessage": "The provided file is not a valid Baserow export.",
"untrustedPublicKeyTitle": "Untrusted signature",
"untrustedPublicKeyMessage": "The provided file is signed with an untrusted public key. Ask your administrator to add the public key to the list of trusted keys or disable the signature verification to be able to import this file.",
"importingState": "Importing"
"importingState": "Importing...",
"importingTableStructure": "Creating: {table}",
"importingTableData": "Importing data: {table}"
},
"dashboardWorkspace": {
"createApplication": "Create new"