diff --git a/.env.example b/.env.example
index 84fee4894..b2c94bdae 100644
--- a/.env.example
+++ b/.env.example
@@ -110,6 +110,7 @@ DATABASE_NAME=baserow
 # BASEROW_USER_LOG_ENTRY_RETENTION_DAYS=
 # BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES=
 # BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS=
+# BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT=
 # BASEROW_MAX_ROW_REPORT_ERROR_COUNT=
 # BASEROW_JOB_EXPIRATION_TIME_LIMIT=
 # BASEROW_JOBS_FRONTEND_POLLING_TIMEOUT_MS=
diff --git a/backend/src/baserow/config/settings/base.py b/backend/src/baserow/config/settings/base.py
index 9f18984b9..4f0513572 100644
--- a/backend/src/baserow/config/settings/base.py
+++ b/backend/src/baserow/config/settings/base.py
@@ -1107,6 +1107,12 @@ BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS = int(
     os.getenv("BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS", 5)
 )
 
+# The maximum number of rows that will be exported when exporting a table.
+# If `0` then all rows will be exported.
+BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT = int(
+    os.getenv("BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT", 0)
+)
+
 PERMISSION_MANAGERS = [
     "view_ownership",
     "core",
diff --git a/backend/src/baserow/contrib/database/application_types.py b/backend/src/baserow/contrib/database/application_types.py
index a3650216c..2481e98b1 100755
--- a/backend/src/baserow/contrib/database/application_types.py
+++ b/backend/src/baserow/contrib/database/application_types.py
@@ -5,6 +5,7 @@ from functools import partial
 from typing import Any, Dict, List, Optional, Set, Tuple
 from zipfile import ZipFile
 
+from django.conf import settings
 from django.contrib.auth.models import AbstractUser
 from django.core.files.storage import Storage
 from django.core.management.color import no_style
@@ -122,9 +123,10 @@ class DatabaseApplicationType(ApplicationType):
                 )
 
             serialized_rows = []
+            row_count_limit = settings.BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT
             if not import_export_config.only_structure:
                 model = table.get_model(fields=fields, add_dependencies=False)
-                row_queryset = model.objects.all()
+                row_queryset = model.objects.all()[: row_count_limit or None]
                 if table.created_by_column_added:
                     row_queryset = row_queryset.select_related("created_by")
                 if table.last_modified_by_column_added:
diff --git a/backend/src/baserow/core/import_export/handler.py b/backend/src/baserow/core/import_export/handler.py
index 60ae9832b..254c2ce2b 100644
--- a/backend/src/baserow/core/import_export/handler.py
+++ b/backend/src/baserow/core/import_export/handler.py
@@ -898,8 +898,23 @@ class ImportExportHandler(metaclass=baserow_trace_methods(tracer)):
         id_mapping: Dict[str, Any] = {}
         next_application_order_value = Application.get_last_order(workspace)
 
-        for applications in manifest["applications"].values():
-            for application_manifest in applications["items"]:
+        # Sort the serialized applications so that we import:
+        # Database first
+        # Applications second
+        # Everything else after that.
+        def application_priority_sort(application_to_sort):
+            return application_type_registry.get(
+                application_to_sort
+            ).import_application_priority
+
+        prioritized_applications = sorted(
+            manifest["applications"].keys(), key=application_priority_sort, reverse=True
+        )
+
+        for application_type in prioritized_applications:
+            for application_manifest in manifest["applications"][application_type][
+                "items"
+            ]:
                 imported_application = self.import_application(
                     workspace,
                     id_mapping,
diff --git a/backend/tests/baserow/contrib/database/import_export/test_export_applications.py b/backend/tests/baserow/contrib/database/import_export/test_export_applications.py
index 5035c241a..028fb7d88 100644
--- a/backend/tests/baserow/contrib/database/import_export/test_export_applications.py
+++ b/backend/tests/baserow/contrib/database/import_export/test_export_applications.py
@@ -1,6 +1,8 @@
 import json
 import zipfile
 
+from django.test.utils import override_settings
+
 import pytest
 
 from baserow.contrib.database.rows.handler import RowHandler
@@ -268,3 +270,60 @@ def test_exported_files_checksum(
 
         calculated_checksum = handler.compute_checksum(file_path, storage)
         assert database_file_checksum == calculated_checksum
+
+
+@pytest.mark.import_export_workspace
+@pytest.mark.django_db(transaction=True)
+@override_settings(BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT=1)
+def test_export_with_rows_limit(
+    data_fixture,
+    api_client,
+    tmpdir,
+    settings,
+    use_tmp_media_root,
+):
+    user = data_fixture.create_user()
+    table = data_fixture.create_database_table(user=user)
+    text_field = data_fixture.create_text_field(table=table, name="text_field", order=0)
+
+    row_handler = RowHandler()
+    row_handler.create_row(
+        user=user,
+        table=table,
+        values={
+            text_field.id: "row #1",
+        },
+    )
+    row_handler.create_row(
+        user=user,
+        table=table,
+        values={
+            text_field.id: "row #2",
+        },
+    )
+
+    resource = ImportExportHandler().export_workspace_applications(
+        user=user,
+        applications=[table.database],
+        import_export_config=ImportExportConfig(
+            include_permission_data=False,
+            reduce_disk_space_usage=True,
+            only_structure=False,
+        ),
+    )
+
+    file_path = tmpdir.join(
+        settings.EXPORT_FILES_DIRECTORY, resource.get_archive_name()
+    )
+    assert file_path.isfile()
+
+    with zipfile.ZipFile(file_path, "r") as zip_ref:
+        with zip_ref.open(MANIFEST_NAME) as json_file:
+            json_data = json.load(json_file)
+            database_export = json_data["applications"]["database"]["items"][0]
+
+            db_export_path = database_export["files"]["data"]["file"]
+            with zip_ref.open(db_export_path) as db_data_file:
+                db_data = json.loads(db_data_file.read())
+
+            assert len(db_data["tables"][0]["rows"]) == 1
diff --git a/changelog/entries/unreleased/feature/3180_add_new_limit_settings_for_import_export_workspace.json b/changelog/entries/unreleased/feature/3180_add_new_limit_settings_for_import_export_workspace.json
new file mode 100644
index 000000000..06fbc55a7
--- /dev/null
+++ b/changelog/entries/unreleased/feature/3180_add_new_limit_settings_for_import_export_workspace.json
@@ -0,0 +1,7 @@
+{
+    "type": "feature",
+    "message": "Add new limit settings for import export workspace applications",
+    "issue_number": 3180,
+    "bullet_points": [],
+    "created_at": "2024-11-12"
+}
diff --git a/docker-compose.local-build.yml b/docker-compose.local-build.yml
index 31a91f1e0..17908e672 100644
--- a/docker-compose.local-build.yml
+++ b/docker-compose.local-build.yml
@@ -109,6 +109,7 @@ x-backend-variables: &backend-variables
   BASEROW_USER_LOG_ENTRY_RETENTION_DAYS:
   BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES:
   BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS:
+  BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT:
   BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
   BASEROW_JOB_SOFT_TIME_LIMIT:
   BASEROW_FRONTEND_JOBS_POLLING_TIMEOUT_MS:
diff --git a/docker-compose.no-caddy.yml b/docker-compose.no-caddy.yml
index 432747af6..c602f1894 100644
--- a/docker-compose.no-caddy.yml
+++ b/docker-compose.no-caddy.yml
@@ -128,6 +128,7 @@ x-backend-variables: &backend-variables
   BASEROW_USER_LOG_ENTRY_RETENTION_DAYS:
   BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES:
   BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS:
+  BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT:
   BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
   BASEROW_JOB_SOFT_TIME_LIMIT:
   BASEROW_FRONTEND_JOBS_POLLING_TIMEOUT_MS:
diff --git a/docker-compose.yml b/docker-compose.yml
index d0d6998f8..364004375 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -125,6 +125,7 @@ x-backend-variables: &backend-variables
   BASEROW_USER_LOG_ENTRY_RETENTION_DAYS:
   BASEROW_IMPORT_EXPORT_RESOURCE_CLEANUP_INTERVAL_MINUTES:
   BASEROW_IMPORT_EXPORT_RESOURCE_REMOVAL_AFTER_DAYS:
+  BASEROW_IMPORT_EXPORT_TABLE_ROWS_COUNT_LIMIT:
   BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
   BASEROW_JOB_SOFT_TIME_LIMIT:
   BASEROW_FRONTEND_JOBS_POLLING_TIMEOUT_MS: