diff --git a/backend/src/baserow/api/generative_ai/serializers.py b/backend/src/baserow/api/generative_ai/serializers.py
index f8b7ead19..0ff2f9aed 100644
--- a/backend/src/baserow/api/generative_ai/serializers.py
+++ b/backend/src/baserow/api/generative_ai/serializers.py
@@ -46,3 +46,17 @@ class OllamaSettingsSerializer(GenerativeAIModelsSerializer):
         required=False,
         help_text="The host that is used to authenticate with the Ollama API.",
     )
+
+
+class OpenRouterSettingsSerializer(OpenAISettingsSerializer):
+    api_key = serializers.CharField(
+        allow_blank=True,
+        required=False,
+        help_text="The OpenRouter API key that is used to authenticate with the OpenAI "
+        "API.",
+    )
+    organization = serializers.CharField(
+        allow_blank=True,
+        required=False,
+        help_text="The organization that the OpenRouter API key belongs to.",
+    )
diff --git a/backend/src/baserow/config/settings/base.py b/backend/src/baserow/config/settings/base.py
index 74bb89969..5be41771c 100644
--- a/backend/src/baserow/config/settings/base.py
+++ b/backend/src/baserow/config/settings/base.py
@@ -1288,6 +1288,15 @@ BASEROW_OPENAI_MODELS = (
     BASEROW_OPENAI_MODELS.split(",") if BASEROW_OPENAI_MODELS else []
 )
 
+BASEROW_OPENROUTER_API_KEY = os.getenv("BASEROW_OPENROUTER_API_KEY", None)
+BASEROW_OPENROUTER_ORGANIZATION = (
+    os.getenv("BASEROW_OPENROUTER_ORGANIZATION", "") or None
+)
+BASEROW_OPENROUTER_MODELS = os.getenv("BASEROW_OPENROUTER_MODELS", "")
+BASEROW_OPENROUTER_MODELS = (
+    BASEROW_OPENROUTER_MODELS.split(",") if BASEROW_OPENROUTER_MODELS else []
+)
+
 BASEROW_ANTHROPIC_API_KEY = os.getenv("BASEROW_ANTHROPIC_API_KEY", None)
 BASEROW_ANTHROPIC_MODELS = os.getenv("BASEROW_ANTHROPIC_MODELS", "")
 BASEROW_ANTHROPIC_MODELS = (
diff --git a/backend/src/baserow/config/settings/test.py b/backend/src/baserow/config/settings/test.py
index 41644e824..c67faffb3 100644
--- a/backend/src/baserow/config/settings/test.py
+++ b/backend/src/baserow/config/settings/test.py
@@ -71,6 +71,9 @@ if "baserow.middleware.ConcurrentUserRequestsMiddleware" in MIDDLEWARE:
 BASEROW_OPENAI_API_KEY = None
 BASEROW_OPENAI_ORGANIZATION = None
 BASEROW_OPENAI_MODELS = []
+BASEROW_OPENROUTER_API_KEY = None
+BASEROW_OPENROUTER_ORGANIZATION = None
+BASEROW_OPENROUTER_MODELS = []
 BASEROW_ANTHROPIC_API_KEY = None
 BASEROW_ANTHROPIC_MODELS = []
 BASEROW_MISTRAL_API_KEY = None
diff --git a/backend/src/baserow/core/apps.py b/backend/src/baserow/core/apps.py
index 133f3e6e5..97228353c 100755
--- a/backend/src/baserow/core/apps.py
+++ b/backend/src/baserow/core/apps.py
@@ -332,6 +332,7 @@ class CoreConfig(AppConfig):
             MistralGenerativeAIModelType,
             OllamaGenerativeAIModelType,
             OpenAIGenerativeAIModelType,
+            OpenRouterGenerativeAIModelType,
         )
         from baserow.core.generative_ai.registries import (
             generative_ai_model_type_registry,
@@ -341,6 +342,7 @@ class CoreConfig(AppConfig):
         generative_ai_model_type_registry.register(AnthropicGenerativeAIModelType())
         generative_ai_model_type_registry.register(MistralGenerativeAIModelType())
         generative_ai_model_type_registry.register(OllamaGenerativeAIModelType())
+        generative_ai_model_type_registry.register(OpenRouterGenerativeAIModelType())
 
         # Must import the Posthog signal, otherwise it won't work.
         import baserow.core.posthog  # noqa: F403, F401
diff --git a/backend/src/baserow/core/generative_ai/generative_ai_model_types.py b/backend/src/baserow/core/generative_ai/generative_ai_model_types.py
index a09cbfeb8..44bf5dd47 100644
--- a/backend/src/baserow/core/generative_ai/generative_ai_model_types.py
+++ b/backend/src/baserow/core/generative_ai/generative_ai_model_types.py
@@ -18,11 +18,7 @@ from baserow.core.generative_ai.types import FileId
 from .registries import GenerativeAIModelType, GenerativeAIWithFilesModelType
 
 
-class OpenAIGenerativeAIModelType(
-    GenerativeAIWithFilesModelType, GenerativeAIModelType
-):
-    type = "openai"
-
+class BaseOpenAIGenerativeAIModelType(GenerativeAIModelType):
     def get_api_key(self, workspace=None):
         return (
             self.get_workspace_setting(workspace, "api_key")
@@ -69,6 +65,12 @@ class OpenAIGenerativeAIModelType(
             raise GenerativeAIPromptError(str(exc)) from exc
         return chat_completion.choices[0].message.content
 
+
+class OpenAIGenerativeAIModelType(
+    GenerativeAIWithFilesModelType, BaseOpenAIGenerativeAIModelType
+):
+    type = "openai"
+
     def is_file_compatible(self, file_name: str) -> bool:
         # See supported files at:
         # https://platform.openai.com/docs/assistants/tools/file-search/supported-files
@@ -317,3 +319,44 @@ class OllamaGenerativeAIModelType(GenerativeAIModelType):
         from baserow.api.generative_ai.serializers import OllamaSettingsSerializer
 
         return OllamaSettingsSerializer
+
+
+class OpenRouterGenerativeAIModelType(BaseOpenAIGenerativeAIModelType):
+    """
+    The OpenRouter API is compatible with the OpenAI API.
+    """
+
+    type = "openrouter"
+
+    def get_api_key(self, workspace=None):
+        return (
+            self.get_workspace_setting(workspace, "api_key")
+            or settings.BASEROW_OPENROUTER_API_KEY
+        )
+
+    def get_enabled_models(self, workspace=None):
+        workspace_models = self.get_workspace_setting(workspace, "models")
+        return workspace_models or settings.BASEROW_OPENROUTER_MODELS
+
+    def get_organization(self, workspace=None):
+        return (
+            self.get_workspace_setting(workspace, "organization")
+            or settings.BASEROW_OPENROUTER_ORGANIZATION
+        )
+
+    def get_client(self, workspace=None):
+        api_key = self.get_api_key(workspace)
+        organization = self.get_organization(workspace)
+        return OpenAI(
+            api_key=api_key,
+            organization=organization,
+            base_url="https://openrouter.ai/api/v1",
+        )
+
+    def get_settings_serializer(self):
+        from baserow.api.generative_ai.serializers import OpenRouterSettingsSerializer
+
+        return OpenRouterSettingsSerializer
+
+    def is_file_compatible(self, file_name):
+        return False
diff --git a/changelog/entries/unreleased/feature/3259_open_router_generative_ai_integration.json b/changelog/entries/unreleased/feature/3259_open_router_generative_ai_integration.json
new file mode 100644
index 000000000..f2c82f948
--- /dev/null
+++ b/changelog/entries/unreleased/feature/3259_open_router_generative_ai_integration.json
@@ -0,0 +1,7 @@
+{
+  "type": "feature",
+  "message": "OpenRouter.ai generative AI integration.",
+  "issue_number": 3259,
+  "bullet_points": [],
+  "created_at": "2024-12-05"
+}
diff --git a/docker-compose.yml b/docker-compose.yml
index adccfc37a..6c9a030cc 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -176,6 +176,9 @@ x-backend-variables: &backend-variables
   BASEROW_OPENAI_API_KEY:
   BASEROW_OPENAI_ORGANIZATION:
   BASEROW_OPENAI_MODELS:
+  BASEROW_OPENROUTER_API_KEY:
+  BASEROW_OPENROUTER_ORGANIZATION:
+  BASEROW_OPENROUTER_MODELS:
   BASEROW_ANTHROPIC_API_KEY:
   BASEROW_ANTHROPIC_MODELS:
   BASEROW_MISTRAL_API_KEY:
diff --git a/docs/installation/configuration.md b/docs/installation/configuration.md
index b154f8182..ec9587a4e 100644
--- a/docs/installation/configuration.md
+++ b/docs/installation/configuration.md
@@ -122,17 +122,20 @@ The installation methods referred to in the variable descriptions are:
 
 ### Generative AI configuration
 
-| Name                          | Description                                                                                                                                                                                                                                                                                                                                                 | Defaults |
-|-------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|
-| BASEROW\_OPENAI\_API\_KEY     | Provide an OpenAI API key to allow using OpenAI for the generative AI features like the AI field. (https://platform.openai.com/api-keys)                                                                                                                                                                                                                    |          |
-| BASEROW\_OPENAI\_ORGANIZATION | Optionally provide an OpenAI organization name that will be used when making an API connection.                                                                                                                                                                                                                                                             |          |
-| BASEROW\_OPENAI\_MODELS       | Provide a comma separated list of OpenAI models (https://platform.openai.com/docs/models/overview) that you would like to enable in the instance (e.g. `gpt-3.5-turbo,gpt-4-turbo-preview`). Note that this only works if an OpenAI API key is set. If this variable is not provided, the user won't be able to choose a model.                             |          |
-| BASEROW\_ANTHROPIC\_API\_KEY  | Provide an Anthropic API key to allow using Anthropic for the generative AI features like the AI field. (https://docs.anthropic.com/en/api/getting-started)                                                                                                                                                                                                 |          |
-| BASEROW\_ANTHROPIC\_MODELS    | Provide a comma separated list of Anthropic models (https://docs.anthropic.com/en/docs/about-claude/models) that you would like to enable in the instance (e.g. `claude-3-5-sonnet-20241022,claude-3-opus-20240229`). Note that this only works if an Anthropic API key is set. If this variable is not provided, the user won't be able to choose a model. |          |
-| BASEROW\_MISTRAL\_API\_KEY    | Provide a Mistral API key to allow using Mistral for the generative AI features like the AI field. (https://docs.mistral.ai/getting-started/quickstart/)                                                                                                                                                                                                      |          |
-| BASEROW\_MISTRAL\_MODELS      | Provide a comma separated list of Mistral models (https://docs.mistral.ai/getting-started/models/models_overview/) that you would like to enable in the instance (e.g. `mistral-large-latest,mistral-small-latest`). Note that this only works if an Mistral API key is set. If this variable is not provided, the user won't be able to choose a model.    |          |
-| BASEROW\_OLLAMA\_HOST         | Provide an OLLAMA host to allow using OLLAMA for generative AI features like the AI field.                                                                                                                                                                                                                                                                  |          |
-| BASEROW\_OLLAMA\_MODELS       | Provide a comma separated list of Ollama models (https://ollama.com/library) that you would like to enable in the instance (e.g. `llama2`). Note that this only works if an Ollama host is set. If this variable is not provided, the user won't be able to choose a model.                                                                                 |          |
+| Name                              | Description                                                                                                                                                                                                                                                                                                                                                 | Defaults |
+|-----------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|
+| BASEROW\_OPENAI\_API\_KEY         | Provide an OpenAI API key to allow using OpenAI for the generative AI features like the AI field. (https://platform.openai.com/api-keys)                                                                                                                                                                                                                    |          |
+| BASEROW\_OPENAI\_ORGANIZATION     | Optionally provide an OpenAI organization name that will be used when making an API connection.                                                                                                                                                                                                                                                             |          |
+| BASEROW\_OPENAI\_MODELS           | Provide a comma separated list of OpenAI models (https://platform.openai.com/docs/models/overview) that you would like to enable in the instance (e.g. `gpt-3.5-turbo,gpt-4-turbo-preview`). Note that this only works if an OpenAI API key is set. If this variable is not provided, the user won't be able to choose a model.                             |          |
+| BASEROW\_OPENROUTER\_API\_KEY     | Provide an Open Router API key to allow using Open Router for the generative AI features like the AI field. (https://openrouter.ai/settings/keys)                                                                                                                                                                                                           |          |
+| BASEROW\_OPENROUTER\_ORGANIZATION | Optionally provide an Open Router organization name that will be used when making an API connection.                                                                                                                                                                                                                                                        |          |
+| BASEROW\_OPENROUTER\_MODELS       | Provide a comma separated list of Open Router models (https://openrouter.ai/models) that you would like to enable in the instance (e.g. `openai/gpt-4o,anthropic/claude-3-haiku`). Note that this only works if an OpenAI API key is set. If this variable is not provided, the user won't be able to choose a model.                                                        |          |
+| BASEROW\_ANTHROPIC\_API\_KEY      | Provide an Anthropic API key to allow using Anthropic for the generative AI features like the AI field. (https://docs.anthropic.com/en/api/getting-started)                                                                                                                                                                                                 |          |
+| BASEROW\_ANTHROPIC\_MODELS        | Provide a comma separated list of Anthropic models (https://docs.anthropic.com/en/docs/about-claude/models) that you would like to enable in the instance (e.g. `claude-3-5-sonnet-20241022,claude-3-opus-20240229`). Note that this only works if an Anthropic API key is set. If this variable is not provided, the user won't be able to choose a model. |          |
+| BASEROW\_MISTRAL\_API\_KEY        | Provide a Mistral API key to allow using Mistral for the generative AI features like the AI field. (https://docs.mistral.ai/getting-started/quickstart/)                                                                                                                                                                                                    |          |
+| BASEROW\_MISTRAL\_MODELS          | Provide a comma separated list of Mistral models (https://docs.mistral.ai/getting-started/models/models_overview/) that you would like to enable in the instance (e.g. `mistral-large-latest,mistral-small-latest`). Note that this only works if an Mistral API key is set. If this variable is not provided, the user won't be able to choose a model.    |          |
+| BASEROW\_OLLAMA\_HOST             | Provide an OLLAMA host to allow using OLLAMA for generative AI features like the AI field.                                                                                                                                                                                                                                                                  |          |
+| BASEROW\_OLLAMA\_MODELS           | Provide a comma separated list of Ollama models (https://ollama.com/library) that you would like to enable in the instance (e.g. `llama2`). Note that this only works if an Ollama host is set. If this variable is not provided, the user won't be able to choose a model.                                                                                 |          |
 
 ### Backend Misc Configuration
 | Name                                                       | Description                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                        | Defaults               |
diff --git a/premium/backend/src/baserow_premium/fields/models.py b/premium/backend/src/baserow_premium/fields/models.py
index 76f7a64f0..fc877dd3d 100644
--- a/premium/backend/src/baserow_premium/fields/models.py
+++ b/premium/backend/src/baserow_premium/fields/models.py
@@ -9,7 +9,7 @@ from .registries import ai_field_output_registry
 
 class AIField(Field):
     ai_generative_ai_type = models.CharField(max_length=32, null=True)
-    ai_generative_ai_model = models.CharField(max_length=32, null=True)
+    ai_generative_ai_model = models.CharField(max_length=128, null=True)
     ai_output_type = models.CharField(
         max_length=32,
         db_default=TextAIFieldOutputType.type,
diff --git a/premium/backend/src/baserow_premium/migrations/0024_alter_aifield_ai_generative_ai_model.py b/premium/backend/src/baserow_premium/migrations/0024_alter_aifield_ai_generative_ai_model.py
new file mode 100644
index 000000000..6595f5476
--- /dev/null
+++ b/premium/backend/src/baserow_premium/migrations/0024_alter_aifield_ai_generative_ai_model.py
@@ -0,0 +1,17 @@
+# Generated by Django 5.0.9 on 2024-12-05 21:26
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ("baserow_premium", "0023_aifield_ai_output_type"),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name="aifield",
+            name="ai_generative_ai_model",
+            field=models.CharField(max_length=128, null=True),
+        ),
+    ]
diff --git a/web-frontend/modules/core/generativeAIModelTypes.js b/web-frontend/modules/core/generativeAIModelTypes.js
index d92d2c61f..3b297310b 100644
--- a/web-frontend/modules/core/generativeAIModelTypes.js
+++ b/web-frontend/modules/core/generativeAIModelTypes.js
@@ -176,3 +176,39 @@ export class OllamaModelType extends GenerativeAIModelType {
     return 1
   }
 }
+
+export class OpenRouterModelType extends GenerativeAIModelType {
+  static getType() {
+    return 'openrouter'
+  }
+
+  getName() {
+    const { i18n } = this.app
+    return i18n.t('generativeAIModelType.openRouter')
+  }
+
+  getSettings() {
+    const { i18n } = this.app
+    return [
+      {
+        key: 'api_key',
+        label: i18n.t('generativeAIModelType.openRouterApiKeyLabel'),
+        description: i18n.t(
+          'generativeAIModelType.openRouterApiKeyDescription'
+        ),
+      },
+      {
+        key: 'organization',
+        label: i18n.t('generativeAIModelType.openRouterOrganization'),
+      },
+      modelSettings(
+        i18n.t('generativeAIModelType.openRouterModelsLabel'),
+        i18n.t('generativeAIModelType.openRouterModelsDescription')
+      ),
+    ]
+  }
+
+  getOrder() {
+    return 50
+  }
+}
diff --git a/web-frontend/modules/core/locales/en.json b/web-frontend/modules/core/locales/en.json
index 537f2aefd..74d6de64a 100644
--- a/web-frontend/modules/core/locales/en.json
+++ b/web-frontend/modules/core/locales/en.json
@@ -242,7 +242,13 @@
     "ollamaHostLabel": "Host",
     "ollamaHostDescription": "Provide the hostname to your [Ollama](https://ollama.com/) server. This typically runs locally on your own device.",
     "ollamaModelsLabel": "Enabled Models",
-    "ollamaModelsDescription": "Provide a list of comma separated [Ollama installed models](https://ollama.com/library). Note that the model must be downloaded and installed before it can be used. (e.g. `llama2,mistral`)"
+    "ollamaModelsDescription": "Provide a list of comma separated [Ollama installed models](https://ollama.com/library). Note that the model must be downloaded and installed before it can be used. (e.g. `llama2,mistral`)",
+    "openRouter": "OpenRouter",
+    "openRouterApiKeyLabel": "API Key",
+    "openRouterApiKeyDescription": "Provide an OpenRouter API key if you would like to enable the integration. [get an API key](https://openrouter.ai/settings/keys).",
+    "openRouterOrganization": "Organization (optional)",
+    "openRouterModelsLabel": "Enabled Models",
+    "openRouterModelsDescription": "Provide a list of comma separated [OpenRouter models](https://openrouter.ai/models) that can be used in Baserow. (e.g. `openai/gpt-4o,anthropic/claude-3-haiku`)"
   },
   "generativeAIWorkspaceSettings": {
     "title": "Generative AI settings",
diff --git a/web-frontend/modules/core/plugin.js b/web-frontend/modules/core/plugin.js
index 53e308f8f..d332b7206 100644
--- a/web-frontend/modules/core/plugin.js
+++ b/web-frontend/modules/core/plugin.js
@@ -23,6 +23,7 @@ import {
   OllamaModelType,
   AnthropicModelType,
   MistralModelType,
+  OpenRouterModelType,
 } from '@baserow/modules/core/generativeAIModelTypes'
 import {
   UploadFileUserFileUploadType,
@@ -149,6 +150,7 @@ export default (context, inject) => {
   registry.register('generativeAIModel', new AnthropicModelType(context))
   registry.register('generativeAIModel', new MistralModelType(context))
   registry.register('generativeAIModel', new OllamaModelType(context))
+  registry.register('generativeAIModel', new OpenRouterModelType(context))
 
   registry.register('permissionManager', new CorePermissionManagerType(context))
   registry.register(