1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-02 04:07:59 +00:00

Resolve "Add new limit setting for import/export workspace"

This commit is contained in:
Przemyslaw Kukulski 2024-11-12 18:02:45 +00:00 committed by Davide Silvestri
parent 4e29d1afba
commit f5dbec08bf
9 changed files with 96 additions and 3 deletions
.env.example
backend
src/baserow
config/settings
contrib/database
core/import_export
tests/baserow/contrib/database/import_export
changelog/entries/unreleased/feature
docker-compose.local-build.ymldocker-compose.no-caddy.ymldocker-compose.yml

View file

@ -110,6 +110,7 @@ DATABASE_NAME=baserow
# BASEROW_USER_LOG_ENTRY_RETENTION_DAYS=
# BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES=
# BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS=
# BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT=
# BASEROW_MAX_ROW_REPORT_ERROR_COUNT=
# BASEROW_JOB_EXPIRATION_TIME_LIMIT=
# BASEROW_JOBS_FRONTEND_POLLING_TIMEOUT_MS=

View file

@ -1107,6 +1107,12 @@ BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS = int(
os.getenv("BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS", 5)
)
# The maximum number of rows that will be exported when exporting a table.
# If `0` then all rows will be exported.
BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT = int(
os.getenv("BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT", 0)
)
PERMISSION_MANAGERS = [
"view_ownership",
"core",

View file

@ -5,6 +5,7 @@ from functools import partial
from typing import Any, Dict, List, Optional, Set, Tuple
from zipfile import ZipFile
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.core.files.storage import Storage
from django.core.management.color import no_style
@ -122,9 +123,10 @@ class DatabaseApplicationType(ApplicationType):
)
serialized_rows = []
row_count_limit = settings.BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT
if not import_export_config.only_structure:
model = table.get_model(fields=fields, add_dependencies=False)
row_queryset = model.objects.all()
row_queryset = model.objects.all()[: row_count_limit or None]
if table.created_by_column_added:
row_queryset = row_queryset.select_related("created_by")
if table.last_modified_by_column_added:

View file

@ -898,8 +898,23 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
id_mapping: Dict[str, Any] = {}
next_application_order_value = Application.get_last_order(workspace)
for applications in manifest["applications"].values():
for application_manifest in applications["items"]:
# Sort the serialized applications so that we import:
# Database first
# Applications second
# Everything else after that.
def application_priority_sort(application_to_sort):
return application_type_registry.get(
application_to_sort
).import_application_priority
prioritized_applications = sorted(
manifest["applications"].keys(), key=application_priority_sort, reverse=True
)
for application_type in prioritized_applications:
for application_manifest in manifest["applications"][application_type][
"items"
]:
imported_application = self.import_application(
workspace,
id_mapping,

View file

@ -1,6 +1,8 @@
import json
import zipfile
from django.test.utils import override_settings
import pytest
from baserow.contrib.database.rows.handler import RowHandler
@ -268,3 +270,60 @@ def test_exported_files_checksum(
calculated_checksum = handler.compute_checksum(file_path, storage)
assert database_file_checksum == calculated_checksum
@pytest.mark.import_export_workspace
@pytest.mark.django_db(transaction=True)
@override_settings(BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT=1)
def test_export_with_rows_limit(
data_fixture,
api_client,
tmpdir,
settings,
use_tmp_media_root,
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table, name="text_field", order=0)
row_handler = RowHandler()
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "row #1",
},
)
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "row #2",
},
)
resource = ImportExportHandler().export_workspace_applications(
user=user,
applications=[table.database],
import_export_config=ImportExportConfig(
include_permission_data=False,
reduce_disk_space_usage=True,
only_structure=False,
),
)
file_path = tmpdir.join(
settings.EXPORT_FILES_DIRECTORY, resource.get_archive_name()
)
assert file_path.isfile()
with zipfile.ZipFile(file_path, "r") as zip_ref:
with zip_ref.open(MANIFEST_NAME) as json_file:
json_data = json.load(json_file)
database_export = json_data["applications"]["database"]["items"][0]
db_export_path = database_export["files"]["data"]["file"]
with zip_ref.open(db_export_path) as db_data_file:
db_data = json.loads(db_data_file.read())
assert len(db_data["tables"][0]["rows"]) == 1

View file

@ -0,0 +1,7 @@
{
"type": "feature",
"message": "Add new limit settings for import export workspace applications",
"issue_number": 3180,
"bullet_points": [],
"created_at": "2024-11-12"
}

View file

@ -109,6 +109,7 @@ x-backend-variables: &backend-variables
BASEROW_USER_LOG_ENTRY_RETENTION_DAYS:
BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES:
BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS:
BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT:
BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
BASEROW_JOB_SOFT_TIME_LIMIT:
BASEROW_FRONTEND_JOBS_POLLING_TIMEOUT_MS:

View file

@ -128,6 +128,7 @@ x-backend-variables: &backend-variables
BASEROW_USER_LOG_ENTRY_RETENTION_DAYS:
BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES:
BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS:
BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT:
BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
BASEROW_JOB_SOFT_TIME_LIMIT:
BASEROW_FRONTEND_JOBS_POLLING_TIMEOUT_MS:

View file

@ -125,6 +125,7 @@ x-backend-variables: &backend-variables
BASEROW_USER_LOG_ENTRY_RETENTION_DAYS:
BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES:
BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS:
BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT:
BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
BASEROW_JOB_SOFT_TIME_LIMIT:
BASEROW_FRONTEND_JOBS_POLLING_TIMEOUT_MS: