diff --git a/backend/src/baserow/config/settings/base.py b/backend/src/baserow/config/settings/base.py
index 4f0f0e6ae..1044e39ca 100644
--- a/backend/src/baserow/config/settings/base.py
+++ b/backend/src/baserow/config/settings/base.py
@@ -110,6 +110,7 @@ MIDDLEWARE = [
     "django.contrib.sessions.middleware.SessionMiddleware",
     "django.middleware.common.CommonMiddleware",
     "django.middleware.csrf.CsrfViewMiddleware",
+    "baserow.core.cache.LocalCacheMiddleware",
     "django.contrib.auth.middleware.AuthenticationMiddleware",
     "baserow.api.user_sources.middleware.AddUserSourceUserMiddleware",
     "django.contrib.messages.middleware.MessageMiddleware",
@@ -1344,3 +1345,5 @@ BASEROW_DEFAULT_ZIP_COMPRESS_LEVEL = 5
 BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE = int(
     os.getenv("BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE", "") or 10
 )
+
+BASEROW_USE_LOCAL_CACHE = str_to_bool(os.getenv("BASEROW_USE_LOCAL_CACHE", "true"))
diff --git a/backend/src/baserow/contrib/builder/api/data_sources/views.py b/backend/src/baserow/contrib/builder/api/data_sources/views.py
index ecccf828c..1c83d424f 100644
--- a/backend/src/baserow/contrib/builder/api/data_sources/views.py
+++ b/backend/src/baserow/contrib/builder/api/data_sources/views.py
@@ -186,7 +186,11 @@ class DataSourcesView(APIView):
 
         page = PageHandler().get_page(page_id)
 
-        before = DataSourceHandler().get_data_source(before_id) if before_id else None
+        before = (
+            DataSourceHandler().get_data_source(before_id, specific=False)
+            if before_id
+            else None
+        )
 
         service_type = service_type_registry.get(type_name) if type_name else None
 
@@ -423,7 +427,7 @@ class MoveDataSourceView(APIView):
 
         before = None
         if before_id:
-            before = DataSourceHandler().get_data_source(before_id)
+            before = DataSourceHandler().get_data_source(before_id, specific=False)
 
         moved_data_source = DataSourceService().move_data_source(
             request.user, data_source, before
diff --git a/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py b/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py
index 400baa3b4..06bdef853 100644
--- a/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py
+++ b/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py
@@ -201,7 +201,9 @@ class DataSourceDataProviderType(DataProviderType):
             return {}
 
         try:
-            data_source = DataSourceHandler().get_data_source(data_source_id)
+            data_source = DataSourceHandler().get_data_source(
+                data_source_id, with_cache=True
+            )
         except DataSourceDoesNotExist as exc:
             # The data source has probably been deleted
             raise InvalidBaserowFormula() from exc
@@ -272,7 +274,9 @@ class DataSourceContextDataProviderType(DataProviderType):
             return {}
 
         try:
-            data_source = DataSourceHandler().get_data_source(data_source_id)
+            data_source = DataSourceHandler().get_data_source(
+                data_source_id, with_cache=True
+            )
         except DataSourceDoesNotExist as exc:
             # The data source has probably been deleted
             raise InvalidBaserowFormula() from exc
@@ -376,7 +380,9 @@ class CurrentRecordDataProviderType(DataProviderType):
             return {}
 
         try:
-            data_source = DataSourceHandler().get_data_source(data_source_id)
+            data_source = DataSourceHandler().get_data_source(
+                data_source_id, with_cache=True
+            )
         except DataSourceDoesNotExist as exc:
             # The data source is probably not accessible so we raise an invalid formula
             raise InvalidBaserowFormula() from exc
diff --git a/backend/src/baserow/contrib/builder/data_sources/builder_dispatch_context.py b/backend/src/baserow/contrib/builder/data_sources/builder_dispatch_context.py
index 1b30f0bc7..d2f3906b0 100644
--- a/backend/src/baserow/contrib/builder/data_sources/builder_dispatch_context.py
+++ b/backend/src/baserow/contrib/builder/data_sources/builder_dispatch_context.py
@@ -24,6 +24,8 @@ CACHE_KEY_PREFIX = "used_properties_for_page"
 
 User = get_user_model()
 
+SENTINEL = "__no_results__"
+
 
 class BuilderDispatchContext(DispatchContext):
     own_properties = [
diff --git a/backend/src/baserow/contrib/builder/data_sources/handler.py b/backend/src/baserow/contrib/builder/data_sources/handler.py
index 1f8154ad1..c0513149c 100644
--- a/backend/src/baserow/contrib/builder/data_sources/handler.py
+++ b/backend/src/baserow/contrib/builder/data_sources/handler.py
@@ -17,6 +17,7 @@ from baserow.contrib.builder.data_sources.models import DataSource
 from baserow.contrib.builder.formula_importer import import_formula
 from baserow.contrib.builder.pages.models import Page
 from baserow.contrib.builder.types import DataSourceDict
+from baserow.core.cache import local_cache
 from baserow.core.integrations.models import Integration
 from baserow.core.integrations.registries import integration_type_registry
 from baserow.core.services.handler import ServiceHandler
@@ -36,13 +37,50 @@ class DataSourceHandler:
         self.service_handler = ServiceHandler()
 
     def get_data_source(
-        self, data_source_id: int, base_queryset: Optional[QuerySet] = None, cache=None
+        self,
+        data_source_id: int,
+        base_queryset: Optional[QuerySet] = None,
+        specific=True,
+        with_cache=False,
     ) -> DataSource:
         """
         Returns a data_source instance from the database.
 
         :param data_source_id: The ID of the data_source.
         :param base_queryset: The base queryset to use to build the query.
+        :param specific: Return the specific version of related objects like the
+          service and the integration
+        :raises DataSourceDoesNotExist: If the data_source can't be found.
+        :param with_cache: Whether this method should use the short
+          cache for data_sources.
+        :return: The data_source instance.
+        """
+
+        if with_cache and not base_queryset:
+            return local_cache.get(
+                f"ab_data_source_{data_source_id}_{specific}",
+                lambda: self._get_data_source(
+                    data_source_id, base_queryset, specific=specific
+                ),
+            )
+        else:
+            return self._get_data_source(
+                data_source_id, base_queryset, specific=specific
+            )
+
+    def _get_data_source(
+        self,
+        data_source_id: int,
+        base_queryset: Optional[QuerySet] = None,
+        specific=True,
+    ) -> DataSource:
+        """
+        Base version of the get_data_source without the caching capabilities.
+
+        :param data_source_id: The ID of the data_source.
+        :param base_queryset: The base queryset to use to build the query.
+        :param specific: Return the specific version of related objects like the
+          service and the integration
         :raises DataSourceDoesNotExist: If the data_source can't be found.
         :return: The data_source instance.
         """
@@ -51,12 +89,24 @@ class DataSourceHandler:
             base_queryset if base_queryset is not None else DataSource.objects.all()
         )
 
+        queryset = queryset.select_related("page__builder__workspace")
+
         try:
-            data_source = queryset.select_related(
-                "page", "page__builder", "page__builder__workspace", "service"
-            ).get(id=data_source_id)
-        except DataSource.DoesNotExist:
-            raise DataSourceDoesNotExist()
+            if specific:
+                data_source = queryset.get(id=data_source_id)
+                if data_source.service_id:
+                    specific_service = ServiceHandler().get_service(
+                        data_source.service_id, specific=True
+                    )
+                    data_source.__class__.service.field.set_cached_value(
+                        data_source, specific_service
+                    )
+            else:
+                data_source = queryset.select_related("service__integration").get(
+                    id=data_source_id
+                )
+        except DataSource.DoesNotExist as exc:
+            raise DataSourceDoesNotExist() from exc
 
         return data_source
 
@@ -83,26 +133,23 @@ class DataSourceHandler:
             base_queryset=queryset,
         )
 
-    def _query_data_sources(self, base_queryset: QuerySet, specific=True):
+    def _query_data_sources(
+        self, base_queryset: QuerySet, specific=True, with_cache=False
+    ):
         """
         Query data sources from the base queryset.
 
         :param base_queryset: The base QuerySet to query from.
         :param specific: A boolean flag indicating whether to include specific service
           instance.
+        :param with_cache: Whether this method should populate the short
+          cache for data_sources.
         :return: A list of queried data sources.
         """
 
-        data_source_queryset = base_queryset.select_related(
-            "service",
-            "page__builder__workspace",
-            "service__integration__application",
-        )
+        data_source_queryset = base_queryset.select_related("page__builder__workspace")
 
         if specific:
-            data_source_queryset = data_source_queryset.select_related(
-                "service__content_type"
-            )
             data_sources = list(data_source_queryset.all())
 
             # Get all service ids to get them from DB in one query
@@ -124,9 +171,19 @@ class DataSourceHandler:
                 if data_source.service_id:
                     data_source.service = specific_services_map[data_source.service_id]
 
-            return data_sources
         else:
-            return data_source_queryset.all()
+            data_source_queryset.select_related(
+                "service__integration__application",
+            )
+            data_sources = data_source_queryset.all()
+
+        if with_cache:
+            for ds in data_sources:
+                local_cache.get(
+                    f"ab_data_source_{ds.id}_{specific}",
+                    ds,
+                )
+        return data_sources
 
     def get_data_sources(
         self,
@@ -134,6 +191,7 @@ class DataSourceHandler:
         base_queryset: Optional[QuerySet] = None,
         with_shared: Optional[bool] = False,
         specific: Optional[bool] = True,
+        with_cache=False,
     ) -> Union[QuerySet[DataSource], Iterable[DataSource]]:
         """
         Gets all the specific data_sources of a given page.
@@ -144,6 +202,8 @@ class DataSourceHandler:
           on the same builder.
         :param specific: If True, return the specific version of the service related
           to the data source
+        :param with_cache: Whether this method should populate the short
+          cache for data_sources.
         :return: The data_sources of that page.
         """
 
@@ -159,13 +219,18 @@ class DataSourceHandler:
         else:
             data_source_queryset = data_source_queryset.filter(page=page)
 
-        return self._query_data_sources(data_source_queryset, specific=specific)
+        return self._query_data_sources(
+            data_source_queryset,
+            specific=specific,
+            with_cache=with_cache,
+        )
 
     def get_builder_data_sources(
         self,
         builder: "Builder",
         base_queryset: Optional[QuerySet] = None,
         specific: Optional[bool] = True,
+        with_cache=False,
     ) -> Union[QuerySet[DataSource], Iterable[DataSource]]:
         """
         Gets all the specific data_sources of a given builder.
@@ -174,6 +239,8 @@ class DataSourceHandler:
         :param base_queryset: The base queryset to use to build the query.
         :param specific: If True, return the specific version of the service related
           to the data source
+        :param with_cache: Whether this method should populate the short
+          cache for data_sources.
         :return: The data_sources of that builder.
         """
 
@@ -183,7 +250,11 @@ class DataSourceHandler:
 
         data_source_queryset = data_source_queryset.filter(page__builder=builder)
 
-        return self._query_data_sources(data_source_queryset, specific=specific)
+        return self._query_data_sources(
+            data_source_queryset,
+            specific=specific,
+            with_cache=with_cache,
+        )
 
     def get_data_sources_with_cache(
         self,
@@ -192,26 +263,25 @@ class DataSourceHandler:
         specific: bool = True,
     ):
         """
-        Gets all the specific data_sources of a given page. This version cache the
+        Gets all the data sources of a given page. This version cache the
         data sources of the page onto the page object to improve perfs.
 
         :param page: The page that holds the data_source.
         :param base_queryset: The base queryset to use to build the query.
         :param specific: If True, return the specific version of the service related
-          to the integration
+          to the data source
         :return: The data_sources of the page.
         """
 
-        if not hasattr(page, "_data_sources"):
-            data_sources = DataSourceHandler().get_data_sources(
+        return local_cache.get(
+            f"ab_data_sources_{page.id}_{specific}",
+            lambda: DataSourceHandler().get_data_sources(
                 page,
                 base_queryset=base_queryset,
                 specific=specific,
                 with_shared=True,
-            )
-            setattr(page, "_data_sources", data_sources)
-
-        return getattr(page, "_data_sources")
+            ),
+        )
 
     def get_data_source_with_cache(
         self,
diff --git a/backend/src/baserow/contrib/builder/data_sources/service.py b/backend/src/baserow/contrib/builder/data_sources/service.py
index 366390e25..4e04b6889 100644
--- a/backend/src/baserow/contrib/builder/data_sources/service.py
+++ b/backend/src/baserow/contrib/builder/data_sources/service.py
@@ -89,13 +89,15 @@ class DataSourceService:
         return self.handler.get_data_sources(page, base_queryset=user_data_sources)
 
     def get_builder_data_sources(
-        self, user: AbstractUser, builder: "Builder"
+        self, user: AbstractUser, builder: "Builder", with_cache=False
     ) -> List[DataSource]:
         """
         Gets all the data_sources of a given builder visible to the given user.
 
         :param user: The user trying to get the data_sources.
         :param page: The builder that holds the data_sources.
+        :param with_cache: Whether this method should populate the short
+          cache for data_sources.
         :return: The data_sources of that builder.
         """
 
@@ -107,7 +109,9 @@ class DataSourceService:
         )
 
         return self.handler.get_builder_data_sources(
-            builder, base_queryset=user_data_sources
+            builder,
+            base_queryset=user_data_sources,
+            with_cache=with_cache,
         )
 
     def create_data_source(
diff --git a/backend/src/baserow/contrib/builder/domains/handler.py b/backend/src/baserow/contrib/builder/domains/handler.py
index 7f256e8f1..08feea365 100644
--- a/backend/src/baserow/contrib/builder/domains/handler.py
+++ b/backend/src/baserow/contrib/builder/domains/handler.py
@@ -57,7 +57,7 @@ class DomainHandler:
         """
 
         if base_queryset is None:
-            base_queryset = Domain.objects
+            base_queryset = Domain.objects.all()
 
         return specific_iterator(base_queryset.filter(builder=builder))
 
@@ -73,7 +73,7 @@ class DomainHandler:
         try:
             domain = (
                 Domain.objects.exclude(published_to=None)
-                .select_related("published_to", "builder")
+                .select_related("published_to", "builder__workspace")
                 .only("published_to", "builder")
                 .get(domain_name=domain_name)
             )
diff --git a/backend/src/baserow/contrib/builder/domains/permission_manager.py b/backend/src/baserow/contrib/builder/domains/permission_manager.py
index 2d50f2f9a..2ea4827c1 100755
--- a/backend/src/baserow/contrib/builder/domains/permission_manager.py
+++ b/backend/src/baserow/contrib/builder/domains/permission_manager.py
@@ -1,3 +1,5 @@
+import functools
+
 from django.contrib.auth import get_user_model
 
 from baserow.contrib.builder.data_sources.operations import (
@@ -11,6 +13,7 @@ from baserow.contrib.builder.workflow_actions.operations import (
     DispatchBuilderWorkflowActionOperationType,
     ListBuilderWorkflowActionsPageOperationType,
 )
+from baserow.core.cache import local_cache
 from baserow.core.operations import ReadApplicationOperationType
 from baserow.core.registries import PermissionManagerType, operation_type_registry
 from baserow.core.subjects import AnonymousUserSubjectType, UserSubjectType
@@ -56,25 +59,54 @@ class AllowPublicBuilderManagerType(PermissionManagerType):
         ListUserSourcesApplicationOperationType.type,
     ]
 
+    def get_builder_from_id(self, builder_id):
+        """
+        Returns the builder for the given id. Can be a cached version.
+        """
+
+        def get_builder_if_exists():
+            try:
+                return Builder.objects.get(id=builder_id)
+            except Builder.DoesNotExist:
+                return None
+
+        return local_cache.get(f"ab_builder_{builder_id}", get_builder_if_exists)
+
+    def get_builder_from_instance(self, instance, property_name):
+        """
+        Returns the builder from the instance at the given property. The value can be
+        cached.
+        """
+
+        prop_id_name = f"{property_name}_id"
+
+        if getattr(instance.__class__, property_name).is_cached(instance):
+            return local_cache.get(
+                f"ab_builder_{getattr(instance, prop_id_name)}",
+                lambda: getattr(instance, property_name),
+            )
+        else:
+            return self.get_builder_from_id(getattr(instance, prop_id_name))
+
     def check_multiple_permissions(self, checks, workspace=None, include_trash=False):
         result = {}
 
         for check in checks:
             operation_type = operation_type_registry.get(check.operation_name)
             if operation_type.type in self.page_level_operations:
-                builder = check.context.builder
+                builder = self.get_builder_from_instance(check.context, "builder")
             elif operation_type.type in self.sub_page_level_operations:
-                builder = check.context.page.builder
+                builder = self.get_builder_from_instance(check.context.page, "builder")
             elif (
                 operation_type.type in self.sub_application_level_operations
-                and isinstance(check.context.application.specific, Builder)
+                and self.get_builder_from_id(check.context.application_id)
             ):
-                builder = check.context.application.specific
+                builder = self.get_builder_from_id(check.context.application_id)
             elif (
                 operation_type.type in self.application_level_operations
-                and isinstance(check.context.specific, Builder)
+                and self.get_builder_from_id(check.context.id)
             ):
-                builder = check.context.specific
+                builder = self.get_builder_from_id(check.context.id)
             else:
                 continue
 
@@ -100,7 +132,18 @@ class AllowPublicBuilderManagerType(PermissionManagerType):
                     # give access to specific data.
                     continue
 
-            if DomainHandler().get_domain_for_builder(builder) is not None:
+            def is_public_callback(b):
+                return (
+                    b.workspace is None
+                    and DomainHandler().get_domain_for_builder(b) is not None
+                )
+
+            is_public = local_cache.get(
+                f"ab_is_public_builder_{builder.id}",
+                functools.partial(is_public_callback, builder),
+            )
+
+            if is_public:
                 # it's a public builder, we allow it.
                 result[check] = True
 
diff --git a/backend/src/baserow/contrib/builder/elements/handler.py b/backend/src/baserow/contrib/builder/elements/handler.py
index f4612662f..648337003 100644
--- a/backend/src/baserow/contrib/builder/elements/handler.py
+++ b/backend/src/baserow/contrib/builder/elements/handler.py
@@ -124,9 +124,7 @@ class ElementHandler:
 
         try:
             element = (
-                queryset.select_related(
-                    "page", "page__builder", "page__builder__workspace"
-                )
+                queryset.select_related("page__builder__workspace")
                 .get(id=element_id)
                 .specific
             )
@@ -239,8 +237,7 @@ class ElementHandler:
         """
 
         if specific:
-            queryset = base_queryset.select_related("content_type")
-            elements = specific_iterator(queryset)
+            elements = specific_iterator(base_queryset)
         else:
             elements = base_queryset
 
diff --git a/backend/src/baserow/contrib/builder/elements/mixins.py b/backend/src/baserow/contrib/builder/elements/mixins.py
index 2bc98ca0c..321b1c887 100644
--- a/backend/src/baserow/contrib/builder/elements/mixins.py
+++ b/backend/src/baserow/contrib/builder/elements/mixins.py
@@ -502,7 +502,7 @@ class CollectionElementTypeMixin:
         # current instance
         data_source_id = instance.data_source_id or kwargs.get("data_source_id", None)
         data_source = (
-            DataSourceHandler().get_data_source(data_source_id)
+            DataSourceHandler().get_data_source(data_source_id, with_cache=True)
             if data_source_id
             else None
         )
diff --git a/backend/src/baserow/contrib/builder/formula_property_extractor.py b/backend/src/baserow/contrib/builder/formula_property_extractor.py
index f9167d740..5c9631239 100644
--- a/backend/src/baserow/contrib/builder/formula_property_extractor.py
+++ b/backend/src/baserow/contrib/builder/formula_property_extractor.py
@@ -204,19 +204,23 @@ def get_builder_used_property_names(
         BuilderWorkflowActionService,
     )
 
+    # We query the data source first to populate the data source cache
+    data_sources = DataSourceService().get_builder_data_sources(
+        user, builder, with_cache=True
+    )
+
     elements = list(ElementService().get_builder_elements(user, builder))
     element_map = {e.id: e for e in elements}
 
     element_results = get_element_property_names(elements, element_map)
 
+    ds_results = get_data_source_property_names(data_sources)
+
     workflow_actions = BuilderWorkflowActionService().get_builder_workflow_actions(
         user, builder
     )
     wa_results = get_workflow_action_property_names(workflow_actions, element_map)
 
-    data_sources = DataSourceService().get_builder_data_sources(user, builder)
-    ds_results = get_data_source_property_names(data_sources)
-
     results = {
         "internal": merge_dicts_no_duplicates(
             wa_results["internal"], ds_results["internal"]
diff --git a/backend/src/baserow/contrib/builder/handler.py b/backend/src/baserow/contrib/builder/handler.py
index bac5e96c7..c37fb9282 100644
--- a/backend/src/baserow/contrib/builder/handler.py
+++ b/backend/src/baserow/contrib/builder/handler.py
@@ -17,6 +17,9 @@ CACHE_KEY_PREFIX = "used_properties_for_page"
 BUILDER_PREVIEW_USED_PROPERTIES_CACHE_TTL_SECONDS = 60
 
 
+SENTINEL = "__no_results__"
+
+
 class BuilderHandler:
     def get_builder(self, builder_id: int) -> Builder:
         """
@@ -87,11 +90,17 @@ class BuilderHandler:
         (required only by the backend).
         """
 
-        return safe_get_or_set_cache(
+        def compute_properties():
+            properties = get_builder_used_property_names(user, builder)
+            return SENTINEL if properties is None else properties
+
+        result = safe_get_or_set_cache(
             self.get_builder_used_properties_cache_key(user, builder),
             self._get_builder_version_cache(builder),
-            default=lambda: get_builder_used_property_names(user, builder),
+            default=compute_properties,
             timeout=settings.BUILDER_PUBLICLY_USED_PROPERTIES_CACHE_TTL_SECONDS
             if builder.workspace_id
             else BUILDER_PREVIEW_USED_PROPERTIES_CACHE_TTL_SECONDS,
         )
+
+        return result if result != SENTINEL else None
diff --git a/backend/src/baserow/contrib/builder/migrations/0051_alter_builderworkflowaction_options.py b/backend/src/baserow/contrib/builder/migrations/0051_alter_builderworkflowaction_options.py
new file mode 100644
index 000000000..a9d9a3692
--- /dev/null
+++ b/backend/src/baserow/contrib/builder/migrations/0051_alter_builderworkflowaction_options.py
@@ -0,0 +1,16 @@
+# Generated by Django 5.0.9 on 2024-09-30 16:30
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ("builder", "0050_page_query_params"),
+    ]
+
+    operations = [
+        migrations.AlterModelOptions(
+            name="builderworkflowaction",
+            options={"ordering": ("order", "id")},
+        ),
+    ]
diff --git a/backend/src/baserow/contrib/builder/workflow_actions/models.py b/backend/src/baserow/contrib/builder/workflow_actions/models.py
index 7b787898f..c5da0f79a 100644
--- a/backend/src/baserow/contrib/builder/workflow_actions/models.py
+++ b/backend/src/baserow/contrib/builder/workflow_actions/models.py
@@ -57,6 +57,9 @@ class BuilderWorkflowAction(
         queryset = BuilderWorkflowAction.objects.filter(page=page, element=None)
         return cls.get_highest_order_of_queryset(queryset) + 1
 
+    class Meta:
+        ordering = ("order", "id")
+
 
 class NotificationWorkflowAction(BuilderWorkflowAction):
     title = FormulaField(default="")
diff --git a/backend/src/baserow/contrib/integrations/local_baserow/mixins.py b/backend/src/baserow/contrib/integrations/local_baserow/mixins.py
index 181fed8f7..343dbe33e 100644
--- a/backend/src/baserow/contrib/integrations/local_baserow/mixins.py
+++ b/backend/src/baserow/contrib/integrations/local_baserow/mixins.py
@@ -214,7 +214,7 @@ class LocalBaserowTableServiceFilterableMixin:
                     service_filter.value = new_formula
                     yield service_filter
 
-    def get_queryset(
+    def get_table_queryset(
         self,
         service: ServiceSubClass,
         table: "Table",
@@ -233,7 +233,7 @@ class LocalBaserowTableServiceFilterableMixin:
         :return: the queryset with filters applied.
         """
 
-        queryset = super().get_queryset(service, table, dispatch_context, model)
+        queryset = super().get_table_queryset(service, table, dispatch_context, model)
         queryset = self.get_dispatch_filters(service, queryset, model, dispatch_context)
         dispatch_filters = dispatch_context.filters()
         if dispatch_filters is not None and dispatch_context.is_publicly_filterable:
@@ -254,15 +254,6 @@ class LocalBaserowTableServiceFilterableMixin:
             queryset = adhoc_filters.apply_to_queryset(model, queryset)
         return queryset
 
-    def enhance_queryset(self, queryset):
-        return (
-            super()
-            .enhance_queryset(queryset)
-            .prefetch_related(
-                "service_filters",
-            )
-        )
-
 
 class LocalBaserowTableServiceSortableMixin:
     """
@@ -356,7 +347,7 @@ class LocalBaserowTableServiceSortableMixin:
 
         return sort_ordering, queryset
 
-    def get_queryset(
+    def get_table_queryset(
         self,
         service: ServiceSubClass,
         table: "Table",
@@ -375,7 +366,7 @@ class LocalBaserowTableServiceSortableMixin:
         :return: the queryset with sortings applied.
         """
 
-        queryset = super().get_queryset(service, table, dispatch_context, model)
+        queryset = super().get_table_queryset(service, table, dispatch_context, model)
 
         adhoc_sort = dispatch_context.sortings()
         if adhoc_sort and dispatch_context.is_publicly_sortable:
@@ -426,7 +417,7 @@ class LocalBaserowTableServiceSearchableMixin:
         )
 
         if isinstance(used_fields_from_parent, list) and service.search_query:
-            fields = [fo["field"] for fo in self.get_table_field_objects(service)]
+            fields = [fo["field"] for fo in self.get_table_field_objects(service) or []]
             return used_fields_from_parent + [
                 f.tsv_db_column if SearchHandler.full_text_enabled() else f.db_column
                 for f in fields
@@ -459,7 +450,7 @@ class LocalBaserowTableServiceSearchableMixin:
                 f"The `search_query` formula can't be resolved: {exc}"
             ) from exc
 
-    def get_queryset(
+    def get_table_queryset(
         self,
         service: ServiceSubClass,
         table: "Table",
@@ -478,7 +469,7 @@ class LocalBaserowTableServiceSearchableMixin:
         :return: the queryset with the search query applied.
         """
 
-        queryset = super().get_queryset(service, table, dispatch_context, model)
+        queryset = super().get_table_queryset(service, table, dispatch_context, model)
         search_mode = SearchHandler.get_default_search_mode_for_table(table)
         service_search_query = self.get_dispatch_search(service, dispatch_context)
         if service_search_query:
diff --git a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py
index a3ca3fe34..5a758ab12 100644
--- a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py
+++ b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py
@@ -90,6 +90,7 @@ from baserow.contrib.integrations.local_baserow.utils import (
     guess_cast_function_from_response_serializer_field,
     guess_json_type_from_response_serializer_field,
 )
+from baserow.core.cache import local_cache
 from baserow.core.formula import resolve_formula
 from baserow.core.formula.registries import formula_runtime_function_registry
 from baserow.core.handler import CoreHandler
@@ -203,11 +204,10 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
         if not model:
             model = self.get_table_model(service)
 
-        queryset = self.get_queryset(service, table, dispatch_context, model)
-
+        queryset = self.get_table_queryset(service, table, dispatch_context, model)
         return queryset
 
-    def get_queryset(
+    def get_table_queryset(
         self,
         service: ServiceSubClass,
         table: "Table",
@@ -225,7 +225,7 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
     def enhance_queryset(self, queryset):
         return queryset.select_related(
             "table__database__workspace",
-        ).prefetch_related("table__field_set")
+        )
 
     def resolve_service_formulas(
         self,
@@ -482,8 +482,9 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
         :return: A schema dictionary, or None if no `Table` has been applied.
         """
 
-        table = service.table
-        if not table:
+        field_objects = self.get_table_field_objects(service)
+
+        if field_objects is None:
             return None
 
         properties = {
@@ -495,8 +496,7 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
                 "searchable": False,
             }
         }
-
-        for field_object in self.get_table_field_objects(service):
+        for field_object in field_objects:
             # When a schema is being generated, we will exclude properties that the
             # Application creator did not actively configure. A configured property
             # is one that the Application is using in a formula, configuration
@@ -506,10 +506,9 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
                 and field_object["name"] not in allowed_fields
             ):
                 continue
-
             field_type = field_object["type"]
-            field = field_object["field"]
             # Only `TextField` has a default value at the moment.
+            field = field_object["field"]
             default_value = getattr(field, "text_default", None)
             field_serializer = field_type.get_serializer(field, FieldSerializer)
             properties[field.db_column] = {
@@ -566,37 +565,41 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
         Returns the model for the table associated with the given service.
         """
 
-        if getattr(service, "_table_model", None) is None:
-            table = service.table
+        if not service.table_id:
+            return None
 
-            if not table:
-                return None
+        return local_cache.get(
+            f"integration_service_{service.table_id}_table_model",
+            service.table.get_model,
+        )
 
-            setattr(service, "_table_model", table.get_model())
-
-        return getattr(service, "_table_model")
-
-    def get_table_field_objects(self, service: LocalBaserowTableService) -> List[Dict]:
+    def get_table_field_objects(
+        self, service: LocalBaserowTableService
+    ) -> List[Dict] | None:
         """
-        Returns the fields of the table associated with the given service.
+        Returns the fields objects of the table of the given service.
+
+        :param service: The service we want the fields for.
+        :returns: The field objects from the table model.
         """
 
         model = self.get_table_model(service)
 
         if model is None:
-            return []
+            return None
 
         return model.get_field_objects()
 
     def get_context_data(
         self, service: ServiceSubClass, allowed_fields: Optional[List[str]] = None
     ) -> Dict[str, Any]:
-        table = service.table
-        if not table:
+        field_objects = self.get_table_field_objects(service)
+
+        if field_objects is None:
             return None
 
         ret = {}
-        for field_object in self.get_table_field_objects(service):
+        for field_object in field_objects:
             # When a context_data is being generated, we will exclude properties that
             # the Application creator did not actively configure. A configured property
             # is one that the Application is using in a formula, configuration
@@ -619,22 +622,22 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
     def get_context_data_schema(
         self, service: ServiceSubClass, allowed_fields: Optional[List[str]] = None
     ) -> Optional[Dict[str, Any]]:
-        table = service.table
-        if not table:
+        field_objects = self.get_table_field_objects(service)
+
+        if field_objects is None:
             return None
 
         properties = {}
-        fields = FieldHandler().get_fields(table, specific=True)
-
-        for field in fields:
-            if allowed_fields is not None and (field.db_column not in allowed_fields):
+        for field_object in field_objects:
+            if allowed_fields is not None and (
+                field_object["name"] not in allowed_fields
+            ):
                 continue
 
-            field_type = field_type_registry.get_by_model(field)
-            if field_type.can_have_select_options:
-                properties[field.db_column] = {
+            if field_object["type"].can_have_select_options:
+                properties[field_object["name"]] = {
                     "type": "array",
-                    "title": field.name,
+                    "title": field_object["field"].name,
                     "default": None,
                     "items": {
                         "type": "object",
@@ -703,7 +706,7 @@ class LocalBaserowViewServiceType(LocalBaserowTableServiceType):
         return (
             super()
             .enhance_queryset(queryset)
-            .select_related("view")
+            .select_related("view__content_type")
             .prefetch_related(
                 "view__viewfilter_set",
                 "view__filter_groups",
@@ -1076,7 +1079,8 @@ class LocalBaserowListRowsUserServiceType(
             # Maybe some fields were deleted in the meantime
             # Let's check we still have them
             available_fields = set(
-                [fo["name"] for fo in self.get_table_field_objects(service)] + ["id"]
+                [fo["name"] for fo in (self.get_table_field_objects(service) or [])]
+                + ["id"]
             )
 
             # Ensure that only used fields are fetched from the database.
diff --git a/backend/src/baserow/core/app_auth_providers/handler.py b/backend/src/baserow/core/app_auth_providers/handler.py
index 38422429b..c3b1aa1c4 100644
--- a/backend/src/baserow/core/app_auth_providers/handler.py
+++ b/backend/src/baserow/core/app_auth_providers/handler.py
@@ -30,8 +30,6 @@ class AppAuthProviderHandler(BaseAuthProviderHandler):
         base_queryset = cls.base_class.objects.filter(
             user_source=user_source
         ).select_related(
-            "user_source",
-            "user_source__application",
             "user_source__application__workspace",
         )
 
diff --git a/backend/src/baserow/core/cache.py b/backend/src/baserow/core/cache.py
new file mode 100644
index 000000000..04d919522
--- /dev/null
+++ b/backend/src/baserow/core/cache.py
@@ -0,0 +1,91 @@
+from contextlib import contextmanager
+from typing import Callable, TypeVar
+
+from django.conf import settings
+
+from asgiref.local import Local
+
+T = TypeVar("T")
+
+
+class LocalCache:
+    """
+    A thread-safe and async-safe local cache implementation using asgiref.Local.
+
+    This cache provides request-scoped storage in Django applications via the
+    LocalCacheMiddleware. It ensures that data is isolated between different requests
+    and is automatically cleaned up after each request cycle, preventing data leakage
+    and maintaining proper cache lifecycle management.
+    """
+
+    def __init__(self):
+        self._local = Local()
+
+    def get(self, key: str, default: T | Callable[[], T] = None) -> T:
+        """
+        Get a value from the cache. If the key is not present:
+        - If the default is callable, call it to get the value.
+        - Otherwise, use the plain default value.
+        The computed or plain default value is stored in the cache.
+        As the cache is shared, ensure the key is unique to prevent collision.
+        """
+
+        if not settings.BASEROW_USE_LOCAL_CACHE:
+            return default() if callable(default) else default
+
+        if not hasattr(self._local, "cache"):
+            self._local.cache = {}
+
+        cache = self._local.cache
+
+        if key not in cache:
+            value = default() if callable(default) else default
+            cache[key] = value
+
+        return cache[key]
+
+    def clear(self):
+        """
+        Clear all data from the cache.
+        """
+
+        if hasattr(self._local, "cache"):
+            del self._local.cache
+
+    @contextmanager
+    def context(self):
+        """
+        Context manager for automatic cache lifecycle management. Clears the cache
+        before entering the context and ensures cleanup after exiting, even if an
+        exception occurs.
+        """
+
+        self.clear()
+        try:
+            yield self
+        finally:
+            self.clear()
+
+
+local_cache = LocalCache()
+
+
+class LocalCacheMiddleware:
+    """
+    Django middleware for managing the lifecycle of LocalCache.
+
+    This middleware ensures that the cache is cleared before and after
+    each request, preventing data leakage between requests and maintaining
+    proper cleanup.
+
+    Usage:
+        Add to MIDDLEWARE in Django settings:
+        'baserow.core.cache.LocalCacheMiddleware'
+    """
+
+    def __init__(self, get_response):
+        self.get_response = get_response
+
+    def __call__(self, request):
+        with local_cache.context():
+            return self.get_response(request)
diff --git a/backend/src/baserow/core/db.py b/backend/src/baserow/core/db.py
index f27ef9cb6..fbb673143 100644
--- a/backend/src/baserow/core/db.py
+++ b/backend/src/baserow/core/db.py
@@ -81,7 +81,7 @@ def specific_iterator(
     objects with the least amount of queries. If a queryset is provided respects the
     annotations, select related and prefetch related of the provided query. This
     function is only compatible with models having the `PolymorphicContentTypeMixin`
-    and `content_type property.`
+    and `content_type` property.
 
     Can be used like:
 
@@ -153,8 +153,8 @@ def specific_iterator(
         if per_content_type_queryset_hook is not None:
             objects = per_content_type_queryset_hook(model, objects)
 
-        for object in objects:
-            specific_objects[object.id] = object
+        for obj in objects:
+            specific_objects[obj.id] = obj
 
     # Create an array with specific objects in the right order.
     ordered_specific_objects = []
diff --git a/backend/src/baserow/core/integrations/handler.py b/backend/src/baserow/core/integrations/handler.py
index cf931314f..ec63e2d67 100644
--- a/backend/src/baserow/core/integrations/handler.py
+++ b/backend/src/baserow/core/integrations/handler.py
@@ -23,27 +23,37 @@ from .types import IntegrationForUpdate
 
 class IntegrationHandler:
     def get_integration(
-        self, integration_id: int, base_queryset: Optional[QuerySet] = None
+        self,
+        integration_id: int,
+        base_queryset: Optional[QuerySet] = None,
+        specific=True,
     ) -> Integration:
         """
         Returns an integration instance from the database.
 
         :param integration_id: The ID of the integration.
         :param base_queryset: The base queryset use to build the query if provided.
+        :param specific: Whether we want the specific instance or not.
         :raises IntegrationDoesNotExist: If the integration can't be found.
-        :return: The specific integration instance.
+        :return: The integration instance.
         """
 
         queryset = (
             base_queryset if base_queryset is not None else Integration.objects.all()
         )
 
+        queryset = queryset.select_related("application__workspace")
+
         try:
-            integration = (
-                queryset.select_related("application", "application__workspace")
-                .get(id=integration_id)
-                .specific
-            )
+            if specific:
+                integration = queryset.get(id=integration_id)
+                # We use the enhanced version of the queryset to get the related
+                # fields. This is also responsible for returning the specific instance.
+                integration = (
+                    integration.get_type().get_queryset().get(id=integration_id)
+                )
+            else:
+                integration = queryset.get(id=integration_id)
         except Integration.DoesNotExist:
             raise IntegrationDoesNotExist()
 
@@ -101,9 +111,7 @@ class IntegrationHandler:
                 integration_type = integration_type_registry.get_by_model(model)
                 return integration_type.enhance_queryset(queryset)
 
-            queryset = queryset.select_related(
-                "content_type", "application", "application__workspace"
-            )
+            queryset = queryset.select_related("application__workspace")
 
             return specific_iterator(
                 queryset, per_content_type_queryset_hook=per_content_type_queryset_hook
diff --git a/backend/src/baserow/core/jobs/tasks.py b/backend/src/baserow/core/jobs/tasks.py
index 65caedaf1..b8ce994db 100644
--- a/backend/src/baserow/core/jobs/tasks.py
+++ b/backend/src/baserow/core/jobs/tasks.py
@@ -3,6 +3,7 @@ from datetime import timedelta
 from django.conf import settings
 
 from baserow.config.celery import app
+from baserow.core.cache import local_cache
 from baserow.core.jobs.exceptions import JobCancelled
 from baserow.core.jobs.registries import job_type_registry
 from baserow.core.sentry import setup_user_in_sentry
@@ -14,6 +15,7 @@ from baserow.core.telemetry.utils import setup_user_in_baggage_and_spans
     queue="export",
     soft_time_limit=settings.BASEROW_JOB_SOFT_TIME_LIMIT,
 )
+@local_cache.context()
 def run_async_job(self, job_id: int):
     """Run the job task asynchronously"""
 
diff --git a/backend/src/baserow/core/mixins.py b/backend/src/baserow/core/mixins.py
index c45f99468..58fd8efc0 100755
--- a/backend/src/baserow/core/mixins.py
+++ b/backend/src/baserow/core/mixins.py
@@ -223,13 +223,13 @@ class PolymorphicContentTypeMixin:
         """Returns this instance in its most specific subclassed form."""
 
         self._ensure_content_type_is_set()
-        content_type = ContentType.objects.get_for_id(self.content_type_id)
         model_class = self.specific_class
         if model_class is None:
             return self
         elif isinstance(self, model_class):
             return self
         else:
+            content_type = ContentType.objects.get_for_id(self.content_type_id)
             return content_type.get_object_for_this_type(id=self.id)
 
     @cached_property
diff --git a/backend/src/baserow/core/permission_manager.py b/backend/src/baserow/core/permission_manager.py
index 0526981a1..b0b68fb30 100755
--- a/backend/src/baserow/core/permission_manager.py
+++ b/backend/src/baserow/core/permission_manager.py
@@ -1,7 +1,9 @@
-from typing import List
+from typing import Iterable, List
 
 from django.contrib.auth import get_user_model
+from django.contrib.auth.models import AbstractUser
 
+from baserow.core.cache import local_cache
 from baserow.core.handler import CoreHandler
 from baserow.core.integrations.operations import (
     ListIntegrationsApplicationOperationType,
@@ -192,22 +194,48 @@ class WorkspaceMemberOnlyPermissionManagerType(PermissionManagerType):
 
         return getattr(actor, self.actor_cache_key, {}).get(workspace.id, False)
 
+    def get_user_ids_map_actually_in_workspace(
+        self,
+        workspace: Workspace,
+        users_to_query: Iterable[AbstractUser],
+        include_trash: bool = False,
+    ):
+        """
+        Return a `user_id -> is in the workspace` map. This version is cached for
+        a few seconds to prevent queries.
+        """
+
+        cached = local_cache.get(f"workspace_user_{workspace.id}", dict)
+
+        missing = []
+        for user in users_to_query:
+            if user.id not in cached:
+                missing.append(user)
+
+        if missing:
+            user_ids_in_workspace = set(
+                CoreHandler()
+                .get_workspace_users(workspace, missing, include_trash=include_trash)
+                .values_list("user_id", flat=True)
+            )
+
+            for missing_user in missing:
+                cached[missing_user.id] = missing_user.id in user_ids_in_workspace
+
+        return cached
+
     def check_multiple_permissions(self, checks, workspace=None, include_trash=False):
         if workspace is None:
             return {}
 
-        users_to_query = {c.actor for c in checks}
-
-        user_ids_in_workspace = set(
-            CoreHandler()
-            .get_workspace_users(workspace, users_to_query, include_trash=include_trash)
-            .values_list("user_id", flat=True)
+        user_id_map_in_workspace = self.get_user_ids_map_actually_in_workspace(
+            workspace, {c.actor for c in checks}, include_trash=include_trash
         )
 
         permission_by_check = {}
 
         def check_workspace(actor):
-            return lambda: actor.id in user_ids_in_workspace
+            return lambda: user_id_map_in_workspace[actor.id]
 
         for check in checks:
             if self.is_actor_in_workspace(
@@ -361,10 +389,9 @@ class StaffOnlySettingOperationPermissionManagerType(PermissionManagerType):
         return always_allowed_operations, staff_only_operations
 
     def check_multiple_permissions(self, checks, workspace=None, include_trash=False):
-        (
-            always_allowed_ops,
-            staff_only_ops,
-        ) = self.get_permitted_operations_for_settings()
+        (always_allowed_ops, staff_only_ops) = local_cache.get(
+            "settings", self.get_permitted_operations_for_settings
+        )
 
         result = {}
         for check in checks:
diff --git a/backend/src/baserow/core/registry.py b/backend/src/baserow/core/registry.py
index c9c2153ab..1d271d525 100644
--- a/backend/src/baserow/core/registry.py
+++ b/backend/src/baserow/core/registry.py
@@ -275,6 +275,23 @@ class CustomFieldsInstanceMixin:
 
         return None
 
+    def get_queryset(self):
+        """
+        Returns the base queryset potentially enhanced by the `.enhance_queryset` method
+        for the model of this class. Requires the `ModelInstance` mixin.
+        """
+
+        return self.enhance_queryset(self.model_class.objects.all())
+
+    def enhance_queryset(self, queryset):
+        """
+        A hook to enhance the queryset for this type like adding `select_related`,
+        `prefetch_related` or stuff like that. Called by `.get_queryset` to generate
+        the base queryset to use for this type.
+        """
+
+        return queryset
+
 
 class PublicCustomFieldsInstanceMixin(CustomFieldsInstanceMixin):
     """
diff --git a/backend/src/baserow/core/services/handler.py b/backend/src/baserow/core/services/handler.py
index 8713a4a76..003237619 100644
--- a/backend/src/baserow/core/services/handler.py
+++ b/backend/src/baserow/core/services/handler.py
@@ -23,7 +23,7 @@ from .types import DispatchResult, ServiceForUpdate, UpdatedService
 
 class ServiceHandler:
     def get_service(
-        self, service_id: int, base_queryset: QuerySet[Service] = None
+        self, service_id: int, base_queryset: QuerySet[Service] = None, specific=True
     ) -> Service:
         """
         Returns an service instance from the database.
@@ -37,15 +37,23 @@ class ServiceHandler:
         queryset = base_queryset if base_queryset is not None else Service.objects.all()
 
         try:
-            service = (
-                queryset.select_related(
-                    "integration",
-                    "integration__application",
+            if specific:
+                service = queryset.get(id=service_id).specific
+                # We use the enhanced version of the queryset to get the related
+                # fields.
+                service = service.get_type().get_queryset().get(id=service_id)
+
+                if service.integration_id:
+                    specific_integration = IntegrationHandler().get_integration(
+                        service.integration_id, specific=True
+                    )
+                    service.__class__.integration.field.set_cached_value(
+                        service, specific_integration
+                    )
+            else:
+                service = queryset.select_related(
                     "integration__application__workspace",
-                )
-                .get(id=service_id)
-                .specific
-            )
+                ).get(id=service_id)
         except Service.DoesNotExist:
             raise ServiceDoesNotExist()
 
@@ -94,8 +102,6 @@ class ServiceHandler:
             queryset = queryset.filter(integration=integration)
 
         if specific:
-            queryset = queryset.select_related("content_type")
-
             # Apply the type specific queryset enhancement for performance.
             def per_content_type_queryset_hook(model, queryset):
                 service_type = service_type_registry.get_by_model(model)
@@ -125,8 +131,8 @@ class ServiceHandler:
                     ]
 
             return specific_services
-
         else:
+            queryset = queryset.select_related("integration__application")
             return queryset
 
     def create_service(self, service_type: ServiceType, **kwargs) -> Service:
diff --git a/backend/src/baserow/core/user_sources/handler.py b/backend/src/baserow/core/user_sources/handler.py
index 954923f1a..75650b946 100644
--- a/backend/src/baserow/core/user_sources/handler.py
+++ b/backend/src/baserow/core/user_sources/handler.py
@@ -11,6 +11,7 @@ from loguru import logger
 
 from baserow.core.db import specific_iterator
 from baserow.core.exceptions import ApplicationOperationNotSupported
+from baserow.core.integrations.handler import IntegrationHandler
 from baserow.core.models import Application
 from baserow.core.registries import application_type_registry
 from baserow.core.storage import ExportZipFile
@@ -29,13 +30,17 @@ class UserSourceHandler:
     allowed_fields_update = ["name", "integration"]
 
     def get_user_source(
-        self, user_source_id: int, base_queryset: Optional[QuerySet] = None
+        self,
+        user_source_id: int,
+        base_queryset: Optional[QuerySet] = None,
+        specific=True,
     ) -> UserSource:
         """
         Returns a user_source instance from the database.
 
         :param user_source_id: The ID of the user_source.
         :param base_queryset: The base queryset use to build the query if provided.
+        :param specific: To return the specific instances.
         :raises UserSourceDoesNotExist: If the user_source can't be found.
         :return: The specific user_source instance.
         """
@@ -44,12 +49,23 @@ class UserSourceHandler:
             base_queryset if base_queryset is not None else UserSource.objects.all()
         )
 
+        queryset = queryset.select_related("application__workspace")
+
         try:
-            user_source = (
-                queryset.select_related("application", "application__workspace")
-                .get(id=user_source_id)
-                .specific
-            )
+            if specific:
+                user_source = queryset.get(id=user_source_id).specific
+                if user_source.integration_id:
+                    specific_integration = IntegrationHandler().get_integration(
+                        user_source.integration_id, specific=True
+                    )
+                    user_source.__class__.integration.field.set_cached_value(
+                        user_source, specific_integration
+                    )
+            else:
+                user_source = queryset.select_related("integration").get(
+                    id=user_source_id
+                )
+
         except UserSource.DoesNotExist as exc:
             raise UserSourceDoesNotExist() from exc
 
@@ -59,12 +75,14 @@ class UserSourceHandler:
         self,
         user_source_uid: str,
         base_queryset: Optional[QuerySet] = None,
+        specific=True,
     ) -> UserSource:
         """
         Returns a user_source instance from the database.
 
         :param user_source_uid: The uid of the user_source.
         :param base_queryset: The base queryset use to build the query if provided.
+        :param specific: To return the specific instances.
         :raises UserSourceDoesNotExist: If the user_source can't be found.
         :return: The specific user_source instance.
         """
@@ -73,12 +91,23 @@ class UserSourceHandler:
             base_queryset if base_queryset is not None else UserSource.objects.all()
         )
 
+        queryset = queryset.select_related("application__workspace")
+
         try:
-            user_source = (
-                queryset.select_related("application", "application__workspace")
-                .get(uid=user_source_uid)
-                .specific
-            )
+            if specific:
+                user_source = queryset.get(uid=user_source_uid).specific
+                if user_source.integration_id:
+                    specific_integration = IntegrationHandler().get_integration(
+                        user_source.integration_id, specific=True
+                    )
+                    user_source.__class__.integration.field.set_cached_value(
+                        user_source, specific_integration
+                    )
+            else:
+                user_source = queryset.select_related("integration").get(
+                    id=user_source_uid
+                )
+
         except UserSource.DoesNotExist as exc:
             raise UserSourceDoesNotExist() from exc
 
@@ -136,9 +165,7 @@ class UserSourceHandler:
                 user_source_type = user_source_type_registry.get_by_model(model)
                 return user_source_type.enhance_queryset(queryset)
 
-            queryset = queryset.select_related(
-                "content_type", "application", "application__workspace"
-            )
+            queryset = queryset.select_related("application__workspace", "integration")
 
             return specific_iterator(
                 queryset, per_content_type_queryset_hook=per_content_type_queryset_hook
diff --git a/backend/src/baserow/test_utils/fixtures/user_source.py b/backend/src/baserow/test_utils/fixtures/user_source.py
index 460b391a1..6b7e44f5d 100644
--- a/backend/src/baserow/test_utils/fixtures/user_source.py
+++ b/backend/src/baserow/test_utils/fixtures/user_source.py
@@ -40,3 +40,36 @@ class UserSourceFixtures:
             )
             user_sources.append(user_source)
         return user_sources
+
+    def create_user_table_and_role(self, user, builder, user_role, integration=None):
+        """Helper to create a User table with a particular user role."""
+
+        # Create the user table for the user_source
+        user_table, user_fields, user_rows = self.build_table(
+            user=user,
+            columns=[
+                ("Email", "text"),
+                ("Name", "text"),
+                ("Password", "text"),
+                ("Role", "text"),
+            ],
+            rows=[
+                ["foo@bar.com", "Foo User", "secret", user_role],
+            ],
+        )
+        email_field, name_field, password_field, role_field = user_fields
+
+        integration = integration or self.create_local_baserow_integration(
+            user=user, application=builder
+        )
+        user_source = self.create_user_source(
+            user_source_type_registry.get("local_baserow").model_class,
+            application=builder,
+            integration=integration,
+            table=user_table,
+            email_field=email_field,
+            name_field=name_field,
+            role_field=role_field,
+        )
+
+        return user_source, integration
diff --git a/backend/src/baserow/test_utils/pytest_conftest.py b/backend/src/baserow/test_utils/pytest_conftest.py
index 3d060e039..de6725dc5 100755
--- a/backend/src/baserow/test_utils/pytest_conftest.py
+++ b/backend/src/baserow/test_utils/pytest_conftest.py
@@ -28,6 +28,7 @@ from rest_framework.test import APIRequestFactory
 from sqlparse import format
 
 from baserow.contrib.database.application_types import DatabaseApplicationType
+from baserow.core.cache import local_cache
 from baserow.core.context import clear_current_workspace_id
 from baserow.core.exceptions import PermissionDenied
 from baserow.core.jobs.registries import job_type_registry
@@ -93,6 +94,14 @@ def api_request_factory():
     return APIRequestFactory()
 
 
+@pytest.fixture(autouse=True)
+def reset_cache():
+    """Automatically reset the short cache before each test."""
+
+    with local_cache.context():
+        yield
+
+
 @pytest.fixture
 def reset_schema(django_db_blocker):
     yield
diff --git a/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py b/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py
index f82ebac6a..3bd5e4d42 100644
--- a/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py
+++ b/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py
@@ -2,6 +2,7 @@ import json
 from unittest.mock import ANY, MagicMock, patch
 
 from django.db import transaction
+from django.test import override_settings
 from django.urls import reverse
 
 import pytest
@@ -18,10 +19,7 @@ from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.views.models import SORT_ORDER_ASC
 from baserow.core.services.models import Service
 from baserow.core.user_sources.user_source_user import UserSourceUser
-from baserow.test_utils.helpers import AnyInt, AnyStr
-from tests.baserow.contrib.builder.api.user_sources.helpers import (
-    create_user_table_and_role,
-)
+from baserow.test_utils.helpers import AnyInt, AnyStr, setup_interesting_test_table
 
 
 @pytest.fixture
@@ -48,8 +46,7 @@ def data_source_fixture(data_fixture):
     )
     page = data_fixture.create_builder_page(user=user, builder=builder)
 
-    user_source, _ = create_user_table_and_role(
-        data_fixture,
+    user_source, _ = data_fixture.create_user_table_and_role(
         user,
         builder,
         "foo_user_role",
@@ -841,6 +838,7 @@ def test_dispatch_data_source_with_refinements_referencing_trashed_field(
     service_filter = data_fixture.create_local_baserow_table_service_filter(
         service=data_source.service, field=trashed_field, value="abc", order=0
     )
+
     url = reverse(
         "api:builder:data_source:dispatch", kwargs={"data_source_id": data_source.id}
     )
@@ -872,6 +870,75 @@ def test_dispatch_data_source_with_refinements_referencing_trashed_field(
     }
 
 
+@pytest.mark.django_db
+@pytest.mark.disabled_in_ci
+# You must add --run-disabled-in-ci -s to pytest to run this test, you can do this in
+# intellij by editing the run config for this test and adding --run-disabled-in-ci -s
+# to additional args.
+# pytest -k "test_dispatch_data_sources_perf" -s --run-disabled-in-ci
+@override_settings(TESTS=False)
+def test_dispatch_data_sources_perf(api_client, data_fixture, profiler):
+    user, token = data_fixture.create_user_and_token()
+    table1, _, _, _, _ = setup_interesting_test_table(data_fixture, user)
+    table2, _, _ = data_fixture.build_table(
+        user=user,
+        columns=[
+            ("Name", "text"),
+            ("My Color", "text"),
+        ],
+        rows=[
+            [["2CV", "Green"]] * 40,
+        ],
+    )
+    table3, _, _ = data_fixture.build_table(
+        user=user,
+        columns=[
+            ("Name", "text"),
+            ("My Color", "text"),
+        ],
+        rows=[
+            [["Twingo", "White"]] * 40,
+        ],
+    )
+
+    view = data_fixture.create_grid_view(user, table=table1)
+    builder = data_fixture.create_builder_application(user=user)
+    integration = data_fixture.create_local_baserow_integration(
+        user=user, application=builder
+    )
+    page = data_fixture.create_builder_page(user=user, builder=builder)
+    data_source1 = data_fixture.create_builder_local_baserow_list_rows_data_source(
+        user=user,
+        page=page,
+        integration=integration,
+        view=view,
+        table=table1,
+    )
+    data_source2 = data_fixture.create_builder_local_baserow_list_rows_data_source(
+        user=user,
+        page=page,
+        integration=integration,
+        table=table2,
+    )
+    data_source3 = data_fixture.create_builder_local_baserow_list_rows_data_source(
+        user=user,
+        page=page,
+        integration=integration,
+        table=table3,
+    )
+
+    url = reverse("api:builder:data_source:dispatch-all", kwargs={"page_id": page.id})
+
+    with profiler(html_report_name="data_sources_dispatch_perf"):
+        for _ in range(30):
+            api_client.post(
+                url,
+                {},
+                format="json",
+                HTTP_AUTHORIZATION=f"JWT {token}",
+            )
+
+
 @pytest.mark.django_db
 def test_dispatch_data_source_with_adhoc_filters(api_client, data_fixture):
     user, token = data_fixture.create_user_and_token()
diff --git a/backend/tests/baserow/contrib/builder/api/data_sources/test_public_domain_views.py b/backend/tests/baserow/contrib/builder/api/data_sources/test_public_data_source_views.py
similarity index 94%
rename from backend/tests/baserow/contrib/builder/api/data_sources/test_public_domain_views.py
rename to backend/tests/baserow/contrib/builder/api/data_sources/test_public_data_source_views.py
index 9fa768cec..0a42fbba4 100644
--- a/backend/tests/baserow/contrib/builder/api/data_sources/test_public_domain_views.py
+++ b/backend/tests/baserow/contrib/builder/api/data_sources/test_public_data_source_views.py
@@ -5,7 +5,6 @@ from rest_framework.status import HTTP_200_OK
 
 from baserow.contrib.builder.elements.models import Element
 from baserow.contrib.builder.pages.models import Page
-from baserow.core.user_sources.registries import user_source_type_registry
 from baserow.core.user_sources.user_source_user import UserSourceUser
 
 
@@ -90,44 +89,6 @@ def data_source_element_roles_fixture(data_fixture):
     }
 
 
-def create_user_table_and_role(
-    data_fixture, user, builder, user_role, integration=None
-):
-    """Helper to create a User table with a particular user role."""
-
-    # Create the user table for the user_source
-    user_table, user_fields, user_rows = data_fixture.build_table(
-        user=user,
-        columns=[
-            ("Email", "text"),
-            ("Name", "text"),
-            ("Password", "text"),
-            ("Role", "text"),
-        ],
-        rows=[
-            ["foo@bar.com", "Foo User", "secret", user_role],
-        ],
-    )
-    email_field, name_field, password_field, role_field = user_fields
-
-    if integration is None:
-        integration = data_fixture.create_local_baserow_integration(
-            user=user, application=builder
-        )
-
-    user_source = data_fixture.create_user_source(
-        user_source_type_registry.get("local_baserow").model_class,
-        application=builder,
-        integration=integration,
-        table=user_table,
-        email_field=email_field,
-        name_field=name_field,
-        role_field=role_field,
-    )
-
-    return user_source, integration
-
-
 @pytest.mark.django_db
 def test_dispatch_data_sources_list_rows_no_elements(
     api_client, data_fixture, data_source_fixture
@@ -483,8 +444,7 @@ def test_dispatch_data_sources_list_rows_with_elements_and_role(
 
     page = data_source_element_roles_fixture["page"]
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
@@ -633,8 +593,7 @@ def test_dispatch_data_sources_page_visibility_logged_in_allow_all_returns_eleme
 
     integration = data_source_fixture["integration"]
     user = data_source_fixture["user"]
-    user_source, _ = create_user_table_and_role(
-        data_fixture,
+    user_source, _ = data_fixture.create_user_table_and_role(
         user,
         data_source_fixture["builder_to"],
         "foo_role",
@@ -818,8 +777,7 @@ def test_dispatch_data_sources_page_visibility_logged_in_allow_all_except(
 
     integration = data_source_fixture["integration"]
     user = data_source_fixture["user"]
-    user_source, _ = create_user_table_and_role(
-        data_fixture,
+    user_source, _ = data_fixture.create_user_table_and_role(
         user,
         data_source_fixture["builder_to"],
         user_role,
diff --git a/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py b/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py
index d5a008cd8..07d6b7e2e 100644
--- a/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py
+++ b/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py
@@ -23,9 +23,6 @@ from baserow.contrib.database.views.models import SORT_ORDER_ASC
 from baserow.core.exceptions import PermissionException
 from baserow.core.services.exceptions import DoesNotExist, ServiceImproperlyConfigured
 from baserow.core.user_sources.user_source_user import UserSourceUser
-from tests.baserow.contrib.builder.api.user_sources.helpers import (
-    create_user_table_and_role,
-)
 
 
 @pytest.fixture
@@ -768,8 +765,7 @@ def user_source_user_fixture(data_fixture):
     )
     page = data_fixture.create_builder_page(user=user, builder=builder)
 
-    user_source, _ = create_user_table_and_role(
-        data_fixture,
+    user_source, _ = data_fixture.create_user_table_and_role(
         user,
         builder,
         "foo_user_role",
@@ -1086,8 +1082,7 @@ def test_public_dispatch_data_sources_list_rows_with_elements_and_role(
 
     page = data_source_element_roles_fixture["page"]
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
@@ -1270,8 +1265,7 @@ def test_public_dispatch_data_sources_list_rows_with_page_visibility_all(
     page.roles = page_roles
     page.save()
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
@@ -1451,8 +1445,7 @@ def test_public_dispatch_data_sources_get_row_with_page_visibility_all(
     page.roles = page_roles
     page.save()
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
@@ -1586,8 +1579,7 @@ def test_public_dispatch_data_sources_list_rows_with_page_visibility_logged_in(
     page.roles = page_roles
     page.save()
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
@@ -1743,8 +1735,7 @@ def test_public_dispatch_data_sources_get_row_with_page_visibility_logged_in(
     page.roles = page_roles
     page.save()
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
@@ -1839,8 +1830,7 @@ def test_list_elements_with_page_visibility_all(
     page.roles = page_roles
     page.save()
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
@@ -2013,8 +2003,7 @@ def test_list_elements_with_page_visibility_logged_in(
     page.roles = page_roles
     page.save()
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         data_source_element_roles_fixture["user"],
         data_source_element_roles_fixture["builder_to"],
         user_role,
diff --git a/backend/tests/baserow/contrib/builder/api/user_sources/helpers.py b/backend/tests/baserow/contrib/builder/api/user_sources/helpers.py
deleted file mode 100644
index b5b3a30d2..000000000
--- a/backend/tests/baserow/contrib/builder/api/user_sources/helpers.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from baserow.core.user_sources.registries import user_source_type_registry
-
-
-def create_user_table_and_role(
-    data_fixture, user, builder, user_role, integration=None
-):
-    """Helper to create a User table with a particular user role."""
-
-    # Create the user table for the user_source
-    user_table, user_fields, user_rows = data_fixture.build_table(
-        user=user,
-        columns=[
-            ("Email", "text"),
-            ("Name", "text"),
-            ("Password", "text"),
-            ("Role", "text"),
-        ],
-        rows=[
-            ["foo@bar.com", "Foo User", "secret", user_role],
-        ],
-    )
-    email_field, name_field, password_field, role_field = user_fields
-
-    integration = integration or data_fixture.create_local_baserow_integration(
-        user=user, application=builder
-    )
-    user_source = data_fixture.create_user_source(
-        user_source_type_registry.get("local_baserow").model_class,
-        application=builder,
-        integration=integration,
-        table=user_table,
-        email_field=email_field,
-        name_field=name_field,
-        role_field=role_field,
-    )
-
-    return user_source, integration
diff --git a/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py b/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py
index 504bdb915..4dcf8b314 100644
--- a/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py
+++ b/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py
@@ -1338,7 +1338,7 @@ def test_data_source_data_extract_properties_calls_correct_service_type(
     result = DataSourceDataProviderType().extract_properties(path)
 
     assert result == {mocked_data_source.service_id: expected}
-    mocked_get_data_source.assert_called_once_with(int(data_source_id))
+    mocked_get_data_source.assert_called_once_with(int(data_source_id), with_cache=True)
     mocked_service_type.extract_properties.assert_called_once_with([expected])
 
 
@@ -1434,7 +1434,7 @@ def test_data_source_context_extract_properties_calls_correct_service_type(
     result = DataSourceContextDataProviderType().extract_properties(path)
 
     assert result == {mocked_data_source.service_id: expected}
-    mocked_get_data_source.assert_called_once_with(int(data_source_id))
+    mocked_get_data_source.assert_called_once_with(int(data_source_id), with_cache=True)
     mocked_service_type.extract_properties.assert_called_once_with([expected])
 
 
@@ -1512,7 +1512,7 @@ def test_data_source_context_data_provider_extract_properties_raises_if_data_sou
     with pytest.raises(InvalidBaserowFormula):
         DataSourceContextDataProviderType().extract_properties(path)
 
-    mock_get_data_source.assert_called_once_with(int(path[0]))
+    mock_get_data_source.assert_called_once_with(int(path[0]), with_cache=True)
 
 
 @pytest.mark.parametrize(
@@ -1539,7 +1539,7 @@ def test_data_source_data_provider_extract_properties_raises_if_data_source_does
     with pytest.raises(InvalidBaserowFormula):
         DataSourceDataProviderType().extract_properties(path)
 
-    mock_get_data_source.assert_called_once_with(int(path[0]))
+    mock_get_data_source.assert_called_once_with(int(path[0]), with_cache=True)
 
 
 @pytest.mark.parametrize("path", ([], [""], ["foo"]))
@@ -1581,7 +1581,9 @@ def test_current_record_extract_properties_raises_if_data_source_doesnt_exist(
     with pytest.raises(InvalidBaserowFormula):
         CurrentRecordDataProviderType().extract_properties(path, invalid_data_source_id)
 
-    mock_get_data_source.assert_called_once_with(invalid_data_source_id)
+    mock_get_data_source.assert_called_once_with(
+        invalid_data_source_id, with_cache=True
+    )
 
 
 @pytest.mark.django_db
@@ -1617,7 +1619,7 @@ def test_current_record_extract_properties_calls_correct_service_type(
     result = CurrentRecordDataProviderType().extract_properties(path, fake_element_id)
 
     assert result == {mocked_data_source.service_id: expected_field}
-    mock_get_data_source.assert_called_once_with(fake_element_id)
+    mock_get_data_source.assert_called_once_with(fake_element_id, with_cache=True)
     mocked_service_type.extract_properties.assert_called_once_with(
         ["0", expected_field]
     )
@@ -1676,7 +1678,7 @@ def test_current_record_extract_properties_called_with_correct_path(
         schema_property,
     )
 
-    mock_get_data_source.assert_called_once_with(data_source_id)
+    mock_get_data_source.assert_called_once_with(data_source_id, with_cache=True)
 
     if returns_list:
         if schema_property:
diff --git a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py
index c8dc452ba..05add367d 100644
--- a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py
+++ b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py
@@ -20,9 +20,6 @@ from baserow.core.exceptions import CannotCalculateIntermediateOrder
 from baserow.core.services.registries import service_type_registry
 from baserow.core.user_sources.user_source_user import UserSourceUser
 from baserow.test_utils.helpers import AnyStr
-from tests.baserow.contrib.builder.api.user_sources.helpers import (
-    create_user_table_and_role,
-)
 
 
 @pytest.mark.django_db
@@ -525,8 +522,7 @@ def test_dispatch_data_source_doesnt_return_formula_field_names(
     )
     page = data_fixture.create_builder_page(user=user, builder=builder)
 
-    user_source, _ = create_user_table_and_role(
-        data_fixture,
+    user_source, _ = data_fixture.create_user_table_and_role(
         user,
         builder,
         "foo_user_role",
diff --git a/backend/tests/baserow/contrib/builder/data_sources/test_dispatch_context.py b/backend/tests/baserow/contrib/builder/data_sources/test_dispatch_context.py
index ffdff3bb9..ac06d5e53 100644
--- a/backend/tests/baserow/contrib/builder/data_sources/test_dispatch_context.py
+++ b/backend/tests/baserow/contrib/builder/data_sources/test_dispatch_context.py
@@ -14,9 +14,6 @@ from baserow.contrib.builder.data_sources.exceptions import (
 from baserow.contrib.builder.elements.element_types import collection_element_types
 from baserow.core.services.utils import ServiceAdhocRefinements
 from baserow.core.user_sources.user_source_user import UserSourceUser
-from tests.baserow.contrib.builder.api.user_sources.helpers import (
-    create_user_table_and_role,
-)
 
 
 def test_dispatch_context_page_range():
@@ -48,8 +45,7 @@ def test_dispatch_context_page_from_context(mock_get_field_names, data_fixture):
     user = data_fixture.create_user()
     page = data_fixture.create_builder_page(user=user)
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         user,
         page.builder,
         "foo_user_role",
@@ -351,8 +347,7 @@ def test_builder_dispatch_context_public_allowed_properties_is_cached(
     )
     builder = data_fixture.create_builder_application(user=user)
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         user,
         builder,
         "foo_user_role",
@@ -393,7 +388,7 @@ def test_builder_dispatch_context_public_allowed_properties_is_cached(
     }
 
     # Initially calling the property should cause a bunch of DB queries.
-    with django_assert_num_queries(12):
+    with django_assert_num_queries(9):
         result = dispatch_context.public_allowed_properties
         assert result == expected_results
 
diff --git a/backend/tests/baserow/contrib/builder/elements/test_element_handler.py b/backend/tests/baserow/contrib/builder/elements/test_element_handler.py
index 9cbd964f9..1e2a412c5 100644
--- a/backend/tests/baserow/contrib/builder/elements/test_element_handler.py
+++ b/backend/tests/baserow/contrib/builder/elements/test_element_handler.py
@@ -557,11 +557,12 @@ def test_duplicate_element_with_workflow_action(data_fixture):
 def test_get_element_workflow_actions(data_fixture):
     page = data_fixture.create_builder_page()
     element = data_fixture.create_builder_button_element()
-    workflow_action_one = data_fixture.create_notification_workflow_action(
-        page=page, element=element
-    )
+    # Order is intentionally switched to check that the result is ordered correctly
     workflow_action_two = data_fixture.create_notification_workflow_action(
-        page=page, element=element
+        page=page, element=element, order=2
+    )
+    workflow_action_one = data_fixture.create_notification_workflow_action(
+        page=page, element=element, order=1
     )
 
     [
diff --git a/backend/tests/baserow/contrib/builder/test_builder_handler.py b/backend/tests/baserow/contrib/builder/test_builder_handler.py
index e782b06a3..e4b7f123c 100644
--- a/backend/tests/baserow/contrib/builder/test_builder_handler.py
+++ b/backend/tests/baserow/contrib/builder/test_builder_handler.py
@@ -7,9 +7,6 @@ import pytest
 from baserow.contrib.builder.handler import CACHE_KEY_PREFIX, BuilderHandler
 from baserow.core.exceptions import ApplicationDoesNotExist
 from baserow.core.user_sources.user_source_user import UserSourceUser
-from tests.baserow.contrib.builder.api.user_sources.helpers import (
-    create_user_table_and_role,
-)
 
 User = get_user_model()
 
@@ -111,8 +108,7 @@ def test_public_allowed_properties_is_cached(data_fixture, django_assert_num_que
     )
     builder = data_fixture.create_builder_application(user=user)
 
-    user_source, integration = create_user_table_and_role(
-        data_fixture,
+    user_source, integration = data_fixture.create_user_table_and_role(
         user,
         builder,
         "foo_user_role",
@@ -146,7 +142,7 @@ def test_public_allowed_properties_is_cached(data_fixture, django_assert_num_que
     }
 
     # Initially calling the property should cause a bunch of DB queries.
-    with django_assert_num_queries(12):
+    with django_assert_num_queries(9):
         result = handler.get_builder_public_properties(user_source_user, builder)
         assert result == expected_results
 
diff --git a/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py b/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
index 8f5bc9464..34c5a28d5 100644
--- a/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
@@ -45,6 +45,7 @@ from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.table.handler import TableHandler
 from baserow.contrib.database.table.models import GeneratedTableModel, Table
 from baserow.contrib.database.views.handler import ViewHandler
+from baserow.core.cache import local_cache
 from baserow.core.handler import CoreHandler
 from baserow.core.models import TrashEntry, WorkspaceUser
 from baserow.core.registries import ImportExportConfig
@@ -2206,10 +2207,10 @@ def test_clear_link_row_limit_selection_view_when_view_is_deleted(
 
     view_handler = ViewHandler()
 
-    with CaptureQueriesContext(connection) as queries_request_1:
+    with CaptureQueriesContext(connection) as queries_request_1, local_cache.context():
         view_handler.delete_view(user, view_3)
 
-    with CaptureQueriesContext(connection) as queries_request_2:
+    with CaptureQueriesContext(connection) as queries_request_2, local_cache.context():
         view_handler.delete_view(user, view)
 
     assert len(queries_request_1.captured_queries) + 1 == len(
diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py b/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py
index fd3400ff7..e6e4db7e1 100644
--- a/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py
+++ b/backend/tests/baserow/contrib/integrations/local_baserow/test_mixins.py
@@ -34,7 +34,7 @@ def get_test_service_type(mixin_class):
 
 
 @pytest.mark.django_db
-def test_local_baserow_table_service_filterable_mixin_get_queryset(
+def test_local_baserow_table_service_filterable_mixin_get_table_queryset(
     data_fixture,
 ):
     service_type = get_test_service_type(LocalBaserowTableServiceFilterableMixin)
@@ -60,7 +60,7 @@ def test_local_baserow_table_service_filterable_mixin_get_queryset(
     # No filters of any kind.
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [alessia.id, alex.id, alastair.id, alexandra.id]
@@ -77,7 +77,7 @@ def test_local_baserow_table_service_filterable_mixin_get_queryset(
 
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [alessia.id, alex.id, alexandra.id]
@@ -100,7 +100,7 @@ def test_local_baserow_table_service_filterable_mixin_get_queryset(
 
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [alexandra.id]
@@ -244,7 +244,7 @@ def test_local_baserow_table_service_filterable_mixin_get_dispatch_filters_raise
 
 
 @pytest.mark.django_db
-def test_local_baserow_table_service_sortable_mixin_get_queryset(
+def test_local_baserow_table_service_sortable_mixin_get_table_queryset(
     data_fixture,
 ):
     service_type = get_test_service_type(LocalBaserowTableServiceSortableMixin)
@@ -270,7 +270,7 @@ def test_local_baserow_table_service_sortable_mixin_get_queryset(
     # No sorts of any kind.
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [aardvark.id, badger.id, crow.id, dragonfly.id]
@@ -282,7 +282,7 @@ def test_local_baserow_table_service_sortable_mixin_get_queryset(
 
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [dragonfly.id, crow.id, badger.id, aardvark.id]
@@ -294,7 +294,7 @@ def test_local_baserow_table_service_sortable_mixin_get_queryset(
 
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [aardvark.id, badger.id, crow.id, dragonfly.id]
@@ -347,7 +347,7 @@ def test_local_baserow_table_service_sortable_mixin_get_dispatch_sorts_raises_ex
 
 
 @pytest.mark.django_db(transaction=True)
-def test_local_baserow_table_service_searchable_mixin_get_queryset(
+def test_local_baserow_table_service_searchable_mixin_get_table_queryset(
     data_fixture,
 ):
     service_type = get_test_service_type(LocalBaserowTableServiceSearchableMixin)
@@ -374,7 +374,7 @@ def test_local_baserow_table_service_searchable_mixin_get_queryset(
     # No search query of any kind.
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [alessia.id, alex.id, alastair.id, alexandra.id]
@@ -384,7 +384,7 @@ def test_local_baserow_table_service_searchable_mixin_get_queryset(
 
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [alessia.id, alex.id, alexandra.id]
@@ -396,7 +396,7 @@ def test_local_baserow_table_service_searchable_mixin_get_queryset(
 
     assert [
         row.id
-        for row in service_type.get_queryset(
+        for row in service_type.get_table_queryset(
             service, table, dispatch_context, table_model
         )
     ] == [alexandra.id]
diff --git a/backend/tests/baserow/core/test_basic_permissions.py b/backend/tests/baserow/core/test_basic_permissions.py
index 1bf40306f..ab0c3d517 100755
--- a/backend/tests/baserow/core/test_basic_permissions.py
+++ b/backend/tests/baserow/core/test_basic_permissions.py
@@ -9,6 +9,7 @@ import pytest
 
 from baserow.contrib.database.models import Database
 from baserow.contrib.database.operations import ListTablesDatabaseTableOperationType
+from baserow.core.cache import local_cache
 from baserow.core.exceptions import (
     PermissionDenied,
     UserInvalidWorkspacePermissionsError,
@@ -1141,7 +1142,7 @@ def test_allow_if_template_permission_manager_filter_queryset(data_fixture):
         "member",
         "token",
         "basic",
-    ]
+    ],
 )
 def test_allow_if_template_permission_manager_query_count(data_fixture):
     buser = data_fixture.create_user(username="Auth user")
@@ -1161,7 +1162,9 @@ def test_allow_if_template_permission_manager_query_count(data_fixture):
             workspace=workspace_1,
         )
 
-    with CaptureQueriesContext(connection) as query_not_for_template:
+    with CaptureQueriesContext(
+        connection
+    ) as query_not_for_template, local_cache.context():
         CoreHandler().check_permissions(
             buser,
             UpdateIntegrationOperationType.type,
diff --git a/backend/tests/baserow/core/test_cache.py b/backend/tests/baserow/core/test_cache.py
new file mode 100644
index 000000000..aa192825b
--- /dev/null
+++ b/backend/tests/baserow/core/test_cache.py
@@ -0,0 +1,80 @@
+from django.http import HttpRequest
+from django.test import override_settings
+
+import pytest
+
+from baserow.core.cache import LocalCacheMiddleware, local_cache
+
+
+# Simulate a Django view
+def mock_view(request):
+    user_profile = local_cache.get(
+        "user_profile", lambda: {"id": 1, "name": "Test User"}
+    )
+    return user_profile
+
+
+@pytest.fixture
+def middleware():
+    """Provide an instance of the middleware."""
+
+    return LocalCacheMiddleware(get_response=mock_view)
+
+
+def test_cache_storage(middleware):
+    """Test that the cache stores and retrieves values correctly."""
+
+    request = HttpRequest()
+    response = middleware(request)
+
+    assert response == {"id": 1, "name": "Test User"}
+
+    # Test that the value is cached
+    cached_value = local_cache.get("user_profile")
+    assert cached_value is None
+
+
+def test_callable_default():
+    """Test that callable defaults are executed and cached."""
+
+    # Check if the callable default was executed
+    assert local_cache.get("user_profile", lambda: "test") == "test"
+
+
+def test_cache_isolation(middleware):
+    """Test that the cache is isolated between simulated requests."""
+
+    local_cache.get("user_profile", "before")
+
+    request1 = HttpRequest()
+    result = middleware(request1)
+
+    assert result == {"id": 1, "name": "Test User"}
+    assert local_cache.get("user_profile", "No Cache") == "No Cache"
+
+    # Simulate a new request and ensure the cache is isolated
+    request2 = HttpRequest()
+    middleware(request2)
+
+    # Ensure the second request starts with an empty cache
+    assert local_cache.get("user_profile", "No Cache") == "No Cache"
+
+
+def test_cache_cleanup_after_request(middleware):
+    """Test that the cache is cleared after the request lifecycle."""
+
+    request = HttpRequest()
+    middleware(request)
+
+    # and after the cache, the cache should be cleaned up
+    assert local_cache.get("user_profile", "after") == "after"
+
+
+@override_settings(BASEROW_USE_LOCAL_CACHE=False)
+def test_cache_disabled():
+    """
+    Test that the cache does not store values when BASEROW_USE_LOCAL_CACHE is False.
+    """
+
+    assert local_cache.get("user_profile", lambda: "disabled") == "disabled"
+    assert local_cache.get("user_profile") is None
diff --git a/changelog/entries/unreleased/refactor/2978_builder_improve_data_dispatching_performances.json b/changelog/entries/unreleased/refactor/2978_builder_improve_data_dispatching_performances.json
new file mode 100644
index 000000000..32dcf771c
--- /dev/null
+++ b/changelog/entries/unreleased/refactor/2978_builder_improve_data_dispatching_performances.json
@@ -0,0 +1,7 @@
+{
+    "type": "refactor",
+    "message": "[Builder] Improve data dispatching performances",
+    "issue_number": 2978,
+    "bullet_points": [],
+    "created_at": "2024-09-26"
+}
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index 17fe6e8ad..a4b895d96 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -199,7 +199,7 @@ x-backend-variables: &backend-variables
   BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE:
   BASEROW_ENTERPRISE_PERIODIC_DATA_SYNC_CHECK_INTERVAL_MINUTES:
   BASEROW_ENTERPRISE_MAX_PERIODIC_DATA_SYNC_CONSECUTIVE_ERRORS:
-
+  BASEROW_USE_LOCAL_CACHE:
 
 services:
   # A caddy reverse proxy sitting in-front of all the services. Responsible for routing
diff --git a/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/auth_provider_types.py b/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/auth_provider_types.py
index 6a86d5ad9..4d28f2898 100644
--- a/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/auth_provider_types.py
+++ b/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/auth_provider_types.py
@@ -199,6 +199,17 @@ class LocalBaserowPasswordAppAuthProviderType(AppAuthProviderType):
             **kwargs,
         )
 
+    def get_user_model_field_ids(self, auth_provider: AuthProviderModelSubClass):
+        """
+        This method is specific to local_baserow app_auth_providers to return the list
+        of fields used by this provider to select them on the table model to save
+        queries.
+        """
+
+        return (
+            [auth_provider.password_field_id] if auth_provider.password_field_id else []
+        )
+
     def authenticate(
         self,
         auth_provider: AuthProviderModelSubClass,
@@ -213,8 +224,9 @@ class LocalBaserowPasswordAppAuthProviderType(AppAuthProviderType):
 
         user = user_source.get_type().get_user(user_source, email=email)
 
-        password_field = auth_provider.password_field
-        encoded_password = getattr(user.original_user, password_field.db_column)
+        encoded_password = getattr(
+            user.original_user, f"field_{auth_provider.password_field_id}"
+        )
 
         if encoded_password and check_password(password, encoded_password):
             return user
diff --git a/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py b/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py
index 0e7945457..0d420c34f 100644
--- a/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py
+++ b/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py
@@ -446,7 +446,30 @@ class LocalBaserowUserSourceType(UserSourceType):
 
         integration = user_source.integration.specific
 
-        model = table.get_model()
+        app_auth_providers = (
+            AppAuthProviderHandler.list_app_auth_providers_for_user_source(user_source)
+        )
+
+        providers_fields = [
+            f
+            for ap in app_auth_providers
+            if hasattr(ap.get_type(), "get_user_model_field_ids")
+            for f in ap.get_type().get_user_model_field_ids(ap)
+        ]
+
+        model = table.get_model(
+            add_dependencies=False,
+            field_ids=[
+                f
+                for f in [
+                    user_source.email_field_id,
+                    user_source.name_field_id,
+                    user_source.role_field_id,
+                    *providers_fields,
+                ]
+                if f
+            ],
+        )
 
         CoreHandler().check_permissions(
             integration.authorized_user,
@@ -614,8 +637,8 @@ class LocalBaserowUserSourceType(UserSourceType):
                 user_source,
                 user,
                 user.id,
-                getattr(user, user_source.name_field.db_column),
-                getattr(user, user_source.email_field.db_column),
+                getattr(user, f"field_{user_source.name_field_id}"),
+                getattr(user, f"field_{user_source.email_field_id}"),
                 self.get_user_role(user, user_source),
             )
 
diff --git a/enterprise/backend/src/baserow_enterprise/role/permission_manager.py b/enterprise/backend/src/baserow_enterprise/role/permission_manager.py
index 750d55aa1..6aa0d1f37 100644
--- a/enterprise/backend/src/baserow_enterprise/role/permission_manager.py
+++ b/enterprise/backend/src/baserow_enterprise/role/permission_manager.py
@@ -1,5 +1,5 @@
 from collections import defaultdict
-from functools import cached_property
+from functools import cached_property, partial
 from typing import Any, Dict, List, Optional, Set, Tuple, TypedDict
 
 from django.contrib.auth import get_user_model
@@ -7,6 +7,7 @@ from django.contrib.auth.models import AbstractUser
 
 from baserow_premium.license.handler import LicenseHandler
 
+from baserow.core.cache import local_cache
 from baserow.core.exceptions import PermissionDenied
 from baserow.core.models import Workspace
 from baserow.core.registries import (
@@ -43,7 +44,10 @@ class RolePermissionManagerType(PermissionManagerType):
         :param workspace: The workspace in which we want to use this permission manager.
         """
 
-        return LicenseHandler.workspace_has_feature(RBAC, workspace)
+        return local_cache.get(
+            f"has_rbac_permission_{workspace.id}",
+            partial(LicenseHandler.workspace_has_feature, RBAC, workspace),
+        )
 
     def get_role_operations(self, role: Role) -> List[str]:
         """
diff --git a/premium/backend/src/baserow_premium/permission_manager.py b/premium/backend/src/baserow_premium/permission_manager.py
index a63cf6e72..683a8d01b 100644
--- a/premium/backend/src/baserow_premium/permission_manager.py
+++ b/premium/backend/src/baserow_premium/permission_manager.py
@@ -1,3 +1,4 @@
+from functools import partial
 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
 
 from django.contrib.auth import get_user_model
@@ -48,6 +49,7 @@ from baserow.contrib.database.views.operations import (
     UpdateViewSlugOperationType,
     UpdateViewSortOperationType,
 )
+from baserow.core.cache import local_cache
 from baserow.core.exceptions import PermissionDenied, PermissionException
 from baserow.core.handler import CoreHandler
 from baserow.core.registries import PermissionManagerType, object_scope_type_registry
@@ -172,7 +174,10 @@ class ViewOwnershipPermissionManagerType(PermissionManagerType):
             ):
                 continue
 
-            premium = LicenseHandler.user_has_feature(PREMIUM, actor, workspace)
+            premium = local_cache.get(
+                f"has_premium_permission_{actor.id}_{workspace.id}",
+                partial(LicenseHandler.user_has_feature, PREMIUM, actor, workspace),
+            )
 
             view_scope_type = object_scope_type_registry.get("database_view")
             view = object_scope_type_registry.get_parent(
@@ -259,7 +264,10 @@ class ViewOwnershipPermissionManagerType(PermissionManagerType):
         if not workspace:
             return queryset
 
-        premium = LicenseHandler.user_has_feature(PREMIUM, actor, workspace)
+        premium = local_cache.get(
+            f"has_premium_permission_{actor.id}_{workspace.id}",
+            lambda: LicenseHandler.user_has_feature(PREMIUM, actor, workspace),
+        )
 
         if premium:
             allowed_tables = CoreHandler().filter_queryset(
diff --git a/web-frontend/modules/builder/store/element.js b/web-frontend/modules/builder/store/element.js
index bf4bab459..bc9d9212d 100644
--- a/web-frontend/modules/builder/store/element.js
+++ b/web-frontend/modules/builder/store/element.js
@@ -10,7 +10,7 @@ const populateElement = (element, registry) => {
   element._ = {
     contentLoading: true,
     content: [],
-    hasNextPage: false,
+    hasNextPage: true,
     reset: 0,
     shouldBeFocused: false,
     elementNamespacePath: null,
diff --git a/web-frontend/modules/builder/store/elementContent.js b/web-frontend/modules/builder/store/elementContent.js
index 68739d5ea..baf1f3066 100644
--- a/web-frontend/modules/builder/store/elementContent.js
+++ b/web-frontend/modules/builder/store/elementContent.js
@@ -44,7 +44,7 @@ const mutations = {
 
   CLEAR_CONTENT(state, { element }) {
     element._.content = []
-    element._.hasNextPage = false
+    element._.hasNextPage = true
   },
   TRIGGER_RESET(state, { element }) {
     element._.reset += 1
@@ -106,7 +106,11 @@ const actions = {
      * - Root collection element (with a dataSource):
      *      - Parent collection element (this `element`!) with a schema property.
      */
-    if (dataSource === null) {
+    if (!dataSource) {
+      // We clearly can't have more page for that one
+      commit('SET_HAS_MORE_PAGE', { element, value: false })
+      commit('SET_LOADING', { element, value: false })
+
       if (!element.schema_property) {
         // We have a collection element that supports schema properties, and
         // we have A) no data source and B) no schema property
@@ -117,8 +121,6 @@ const actions = {
         return
       }
 
-      commit('SET_LOADING', { element, value: true })
-
       // Collect all collection element ancestors, with a `data_source_id`.
       const collectionAncestors = this.app.store.getters[
         'element/getAncestors'
@@ -167,6 +169,8 @@ const actions = {
         element,
         value: elementContent,
       })
+      // No more content for sure
+      commit('SET_HAS_MORE_PAGE', { element, value: false })
       commit('SET_LOADING', { element, value: false })
       return
     }
@@ -198,7 +202,8 @@ const actions = {
           ])
 
           // Everything is already loaded we can quit now
-          if (!rangeToFetch) {
+          if (!rangeToFetch || !getters.getHasMorePage(element)) {
+            commit('SET_LOADING', { element, value: false })
             return
           }
           rangeToFetch = [rangeToFetch[0], rangeToFetch[1] - rangeToFetch[0]]
diff --git a/web-frontend/test/unit/builder/components/elements/components/__snapshots__/RecordSelectorElement.spec.js.snap b/web-frontend/test/unit/builder/components/elements/components/__snapshots__/RecordSelectorElement.spec.js.snap
index 0eecf300e..4ac501335 100644
--- a/web-frontend/test/unit/builder/components/elements/components/__snapshots__/RecordSelectorElement.spec.js.snap
+++ b/web-frontend/test/unit/builder/components/elements/components/__snapshots__/RecordSelectorElement.spec.js.snap
@@ -189,7 +189,6 @@ exports[`RecordSelectorElement does not paginate if API returns 400/404 1`] = `
                    
                   <div
                     class="infinite-scroll__loading-wrapper"
-                    style=""
                   >
                     <!---->
                   </div>
@@ -406,7 +405,6 @@ exports[`RecordSelectorElement does not paginate if API returns 400/404 2`] = `
                    
                   <div
                     class="infinite-scroll__loading-wrapper"
-                    style=""
                   >
                     <!---->
                   </div>
@@ -623,7 +621,6 @@ exports[`RecordSelectorElement does not paginate if API returns 400/404 3`] = `
                    
                   <div
                     class="infinite-scroll__loading-wrapper"
-                    style=""
                   >
                     <!---->
                   </div>