1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-04 13:15:24 +00:00

Resolve "OpenRouter.ai generative AI integration"

This commit is contained in:
Bram Wiepjes 2024-12-10 20:58:43 +00:00
parent caa20c7799
commit 383927c837
13 changed files with 163 additions and 18 deletions
backend/src/baserow
api/generative_ai
config/settings
core
changelog/entries/unreleased/feature
docker-compose.yml
docs/installation
premium/backend/src/baserow_premium
web-frontend/modules/core

View file

@ -46,3 +46,17 @@ class OllamaSettingsSerializer(GenerativeAIModelsSerializer):
required=False,
help_text="The host that is used to authenticate with the Ollama API.",
)
class OpenRouterSettingsSerializer(OpenAISettingsSerializer):
api_key = serializers.CharField(
allow_blank=True,
required=False,
help_text="The OpenRouter API key that is used to authenticate with the OpenAI "
"API.",
)
organization = serializers.CharField(
allow_blank=True,
required=False,
help_text="The organization that the OpenRouter API key belongs to.",
)

View file

@ -1288,6 +1288,15 @@ BASEROW_OPENAI_MODELS = (
BASEROW_OPENAI_MODELS.split(",") if BASEROW_OPENAI_MODELS else []
)
BASEROW_OPENROUTER_API_KEY = os.getenv("BASEROW_OPENROUTER_API_KEY", None)
BASEROW_OPENROUTER_ORGANIZATION = (
os.getenv("BASEROW_OPENROUTER_ORGANIZATION", "") or None
)
BASEROW_OPENROUTER_MODELS = os.getenv("BASEROW_OPENROUTER_MODELS", "")
BASEROW_OPENROUTER_MODELS = (
BASEROW_OPENROUTER_MODELS.split(",") if BASEROW_OPENROUTER_MODELS else []
)
BASEROW_ANTHROPIC_API_KEY = os.getenv("BASEROW_ANTHROPIC_API_KEY", None)
BASEROW_ANTHROPIC_MODELS = os.getenv("BASEROW_ANTHROPIC_MODELS", "")
BASEROW_ANTHROPIC_MODELS = (

View file

@ -71,6 +71,9 @@ if "baserow.middleware.ConcurrentUserRequestsMiddleware" in MIDDLEWARE:
BASEROW_OPENAI_API_KEY = None
BASEROW_OPENAI_ORGANIZATION = None
BASEROW_OPENAI_MODELS = []
BASEROW_OPENROUTER_API_KEY = None
BASEROW_OPENROUTER_ORGANIZATION = None
BASEROW_OPENROUTER_MODELS = []
BASEROW_ANTHROPIC_API_KEY = None
BASEROW_ANTHROPIC_MODELS = []
BASEROW_MISTRAL_API_KEY = None

View file

@ -332,6 +332,7 @@ class CoreConfig(AppConfig):
MistralGenerativeAIModelType,
OllamaGenerativeAIModelType,
OpenAIGenerativeAIModelType,
OpenRouterGenerativeAIModelType,
)
from baserow.core.generative_ai.registries import (
generative_ai_model_type_registry,
@ -341,6 +342,7 @@ class CoreConfig(AppConfig):
generative_ai_model_type_registry.register(AnthropicGenerativeAIModelType())
generative_ai_model_type_registry.register(MistralGenerativeAIModelType())
generative_ai_model_type_registry.register(OllamaGenerativeAIModelType())
generative_ai_model_type_registry.register(OpenRouterGenerativeAIModelType())
# Must import the Posthog signal, otherwise it won't work.
import baserow.core.posthog # noqa: F403, F401

View file

@ -18,11 +18,7 @@ from baserow.core.generative_ai.types import FileId
from .registries import GenerativeAIModelType, GenerativeAIWithFilesModelType
class OpenAIGenerativeAIModelType(
GenerativeAIWithFilesModelType, GenerativeAIModelType
):
type = "openai"
class BaseOpenAIGenerativeAIModelType(GenerativeAIModelType):
def get_api_key(self, workspace=None):
return (
self.get_workspace_setting(workspace, "api_key")
@ -69,6 +65,12 @@ class OpenAIGenerativeAIModelType(
raise GenerativeAIPromptError(str(exc)) from exc
return chat_completion.choices[0].message.content
class OpenAIGenerativeAIModelType(
GenerativeAIWithFilesModelType, BaseOpenAIGenerativeAIModelType
):
type = "openai"
def is_file_compatible(self, file_name: str) -> bool:
# See supported files at:
# https://platform.openai.com/docs/assistants/tools/file-search/supported-files
@ -317,3 +319,44 @@ class OllamaGenerativeAIModelType(GenerativeAIModelType):
from baserow.api.generative_ai.serializers import OllamaSettingsSerializer
return OllamaSettingsSerializer
class OpenRouterGenerativeAIModelType(BaseOpenAIGenerativeAIModelType):
"""
The OpenRouter API is compatible with the OpenAI API.
"""
type = "openrouter"
def get_api_key(self, workspace=None):
return (
self.get_workspace_setting(workspace, "api_key")
or settings.BASEROW_OPENROUTER_API_KEY
)
def get_enabled_models(self, workspace=None):
workspace_models = self.get_workspace_setting(workspace, "models")
return workspace_models or settings.BASEROW_OPENROUTER_MODELS
def get_organization(self, workspace=None):
return (
self.get_workspace_setting(workspace, "organization")
or settings.BASEROW_OPENROUTER_ORGANIZATION
)
def get_client(self, workspace=None):
api_key = self.get_api_key(workspace)
organization = self.get_organization(workspace)
return OpenAI(
api_key=api_key,
organization=organization,
base_url="https://openrouter.ai/api/v1",
)
def get_settings_serializer(self):
from baserow.api.generative_ai.serializers import OpenRouterSettingsSerializer
return OpenRouterSettingsSerializer
def is_file_compatible(self, file_name):
return False

View file

@ -0,0 +1,7 @@
{
"type": "feature",
"message": "OpenRouter.ai generative AI integration.",
"issue_number": 3259,
"bullet_points": [],
"created_at": "2024-12-05"
}

View file

@ -176,6 +176,9 @@ x-backend-variables: &backend-variables
BASEROW_OPENAI_API_KEY:
BASEROW_OPENAI_ORGANIZATION:
BASEROW_OPENAI_MODELS:
BASEROW_OPENROUTER_API_KEY:
BASEROW_OPENROUTER_ORGANIZATION:
BASEROW_OPENROUTER_MODELS:
BASEROW_ANTHROPIC_API_KEY:
BASEROW_ANTHROPIC_MODELS:
BASEROW_MISTRAL_API_KEY:

View file

@ -122,17 +122,20 @@ The installation methods referred to in the variable descriptions are:
### Generative AI configuration
| Name | Description | Defaults |
|-------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|
| BASEROW\_OPENAI\_API\_KEY | Provide an OpenAI API key to allow using OpenAI for the generative AI features like the AI field. (https://platform.openai.com/api-keys) | |
| BASEROW\_OPENAI\_ORGANIZATION | Optionally provide an OpenAI organization name that will be used when making an API connection. | |
| BASEROW\_OPENAI\_MODELS | Provide a comma separated list of OpenAI models (https://platform.openai.com/docs/models/overview) that you would like to enable in the instance (e.g. `gpt-3.5-turbo,gpt-4-turbo-preview`). Note that this only works if an OpenAI API key is set. If this variable is not provided, the user won't be able to choose a model. | |
| BASEROW\_ANTHROPIC\_API\_KEY | Provide an Anthropic API key to allow using Anthropic for the generative AI features like the AI field. (https://docs.anthropic.com/en/api/getting-started) | |
| BASEROW\_ANTHROPIC\_MODELS | Provide a comma separated list of Anthropic models (https://docs.anthropic.com/en/docs/about-claude/models) that you would like to enable in the instance (e.g. `claude-3-5-sonnet-20241022,claude-3-opus-20240229`). Note that this only works if an Anthropic API key is set. If this variable is not provided, the user won't be able to choose a model. | |
| BASEROW\_MISTRAL\_API\_KEY | Provide a Mistral API key to allow using Mistral for the generative AI features like the AI field. (https://docs.mistral.ai/getting-started/quickstart/) | |
| BASEROW\_MISTRAL\_MODELS | Provide a comma separated list of Mistral models (https://docs.mistral.ai/getting-started/models/models_overview/) that you would like to enable in the instance (e.g. `mistral-large-latest,mistral-small-latest`). Note that this only works if an Mistral API key is set. If this variable is not provided, the user won't be able to choose a model. | |
| BASEROW\_OLLAMA\_HOST | Provide an OLLAMA host to allow using OLLAMA for generative AI features like the AI field. | |
| BASEROW\_OLLAMA\_MODELS | Provide a comma separated list of Ollama models (https://ollama.com/library) that you would like to enable in the instance (e.g. `llama2`). Note that this only works if an Ollama host is set. If this variable is not provided, the user won't be able to choose a model. | |
| Name | Description | Defaults |
|-----------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|
| BASEROW\_OPENAI\_API\_KEY | Provide an OpenAI API key to allow using OpenAI for the generative AI features like the AI field. (https://platform.openai.com/api-keys) | |
| BASEROW\_OPENAI\_ORGANIZATION | Optionally provide an OpenAI organization name that will be used when making an API connection. | |
| BASEROW\_OPENAI\_MODELS | Provide a comma separated list of OpenAI models (https://platform.openai.com/docs/models/overview) that you would like to enable in the instance (e.g. `gpt-3.5-turbo,gpt-4-turbo-preview`). Note that this only works if an OpenAI API key is set. If this variable is not provided, the user won't be able to choose a model. | |
| BASEROW\_OPENROUTER\_API\_KEY | Provide an Open Router API key to allow using Open Router for the generative AI features like the AI field. (https://openrouter.ai/settings/keys) | |
| BASEROW\_OPENROUTER\_ORGANIZATION | Optionally provide an Open Router organization name that will be used when making an API connection. | |
| BASEROW\_OPENROUTER\_MODELS | Provide a comma separated list of Open Router models (https://openrouter.ai/models) that you would like to enable in the instance (e.g. `openai/gpt-4o,anthropic/claude-3-haiku`). Note that this only works if an OpenAI API key is set. If this variable is not provided, the user won't be able to choose a model. | |
| BASEROW\_ANTHROPIC\_API\_KEY | Provide an Anthropic API key to allow using Anthropic for the generative AI features like the AI field. (https://docs.anthropic.com/en/api/getting-started) | |
| BASEROW\_ANTHROPIC\_MODELS | Provide a comma separated list of Anthropic models (https://docs.anthropic.com/en/docs/about-claude/models) that you would like to enable in the instance (e.g. `claude-3-5-sonnet-20241022,claude-3-opus-20240229`). Note that this only works if an Anthropic API key is set. If this variable is not provided, the user won't be able to choose a model. | |
| BASEROW\_MISTRAL\_API\_KEY | Provide a Mistral API key to allow using Mistral for the generative AI features like the AI field. (https://docs.mistral.ai/getting-started/quickstart/) | |
| BASEROW\_MISTRAL\_MODELS | Provide a comma separated list of Mistral models (https://docs.mistral.ai/getting-started/models/models_overview/) that you would like to enable in the instance (e.g. `mistral-large-latest,mistral-small-latest`). Note that this only works if an Mistral API key is set. If this variable is not provided, the user won't be able to choose a model. | |
| BASEROW\_OLLAMA\_HOST | Provide an OLLAMA host to allow using OLLAMA for generative AI features like the AI field. | |
| BASEROW\_OLLAMA\_MODELS | Provide a comma separated list of Ollama models (https://ollama.com/library) that you would like to enable in the instance (e.g. `llama2`). Note that this only works if an Ollama host is set. If this variable is not provided, the user won't be able to choose a model. | |
### Backend Misc Configuration
| Name | Description | Defaults |

View file

@ -9,7 +9,7 @@ from .registries import ai_field_output_registry
class AIField(Field):
ai_generative_ai_type = models.CharField(max_length=32, null=True)
ai_generative_ai_model = models.CharField(max_length=32, null=True)
ai_generative_ai_model = models.CharField(max_length=128, null=True)
ai_output_type = models.CharField(
max_length=32,
db_default=TextAIFieldOutputType.type,

View file

@ -0,0 +1,17 @@
# Generated by Django 5.0.9 on 2024-12-05 21:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("baserow_premium", "0023_aifield_ai_output_type"),
]
operations = [
migrations.AlterField(
model_name="aifield",
name="ai_generative_ai_model",
field=models.CharField(max_length=128, null=True),
),
]

View file

@ -176,3 +176,39 @@ export class OllamaModelType extends GenerativeAIModelType {
return 1
}
}
export class OpenRouterModelType extends GenerativeAIModelType {
static getType() {
return 'openrouter'
}
getName() {
const { i18n } = this.app
return i18n.t('generativeAIModelType.openRouter')
}
getSettings() {
const { i18n } = this.app
return [
{
key: 'api_key',
label: i18n.t('generativeAIModelType.openRouterApiKeyLabel'),
description: i18n.t(
'generativeAIModelType.openRouterApiKeyDescription'
),
},
{
key: 'organization',
label: i18n.t('generativeAIModelType.openRouterOrganization'),
},
modelSettings(
i18n.t('generativeAIModelType.openRouterModelsLabel'),
i18n.t('generativeAIModelType.openRouterModelsDescription')
),
]
}
getOrder() {
return 50
}
}

View file

@ -242,7 +242,13 @@
"ollamaHostLabel": "Host",
"ollamaHostDescription": "Provide the hostname to your [Ollama](https://ollama.com/) server. This typically runs locally on your own device.",
"ollamaModelsLabel": "Enabled Models",
"ollamaModelsDescription": "Provide a list of comma separated [Ollama installed models](https://ollama.com/library). Note that the model must be downloaded and installed before it can be used. (e.g. `llama2,mistral`)"
"ollamaModelsDescription": "Provide a list of comma separated [Ollama installed models](https://ollama.com/library). Note that the model must be downloaded and installed before it can be used. (e.g. `llama2,mistral`)",
"openRouter": "OpenRouter",
"openRouterApiKeyLabel": "API Key",
"openRouterApiKeyDescription": "Provide an OpenRouter API key if you would like to enable the integration. [get an API key](https://openrouter.ai/settings/keys).",
"openRouterOrganization": "Organization (optional)",
"openRouterModelsLabel": "Enabled Models",
"openRouterModelsDescription": "Provide a list of comma separated [OpenRouter models](https://openrouter.ai/models) that can be used in Baserow. (e.g. `openai/gpt-4o,anthropic/claude-3-haiku`)"
},
"generativeAIWorkspaceSettings": {
"title": "Generative AI settings",

View file

@ -23,6 +23,7 @@ import {
OllamaModelType,
AnthropicModelType,
MistralModelType,
OpenRouterModelType,
} from '@baserow/modules/core/generativeAIModelTypes'
import {
UploadFileUserFileUploadType,
@ -149,6 +150,7 @@ export default (context, inject) => {
registry.register('generativeAIModel', new AnthropicModelType(context))
registry.register('generativeAIModel', new MistralModelType(context))
registry.register('generativeAIModel', new OllamaModelType(context))
registry.register('generativeAIModel', new OpenRouterModelType(context))
registry.register('permissionManager', new CorePermissionManagerType(context))
registry.register(