mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-15 01:28:30 +00:00
Merge branch '931-allow-webhooks-to-access-private-addresses-urls' into 'develop'
Resolve "Allow Webhooks to access private addresses/urls" Closes #1124 and #931 See merge request bramw/baserow!803
This commit is contained in:
commit
5a60bb0d9a
31 changed files with 1184 additions and 105 deletions
.env.examplechangelog.mddocker-compose.local-build.ymldocker-compose.no-caddy.ymldocker-compose.yml
backend
src/baserow
config/settings
contrib/database
api/webhooks
migrations
webhooks
test_utils
tests/baserow/contrib/database/webhooks
docs/installation
web-frontend
locales
modules
core
database
components/webhook
locales
test/unit/core/utils
10
.env.example
10
.env.example
|
@ -73,6 +73,16 @@ DATABASE_NAME=baserow
|
|||
# PUBLIC_WEB_FRONTEND_URL=
|
||||
# MEDIA_URL=
|
||||
# MEDIA_ROOT=
|
||||
# BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS=
|
||||
# BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST=
|
||||
# BASEROW_WEBHOOKS_IP_WHITELIST=
|
||||
# BASEROW_WEBHOOKS_IP_BLACKLIST=
|
||||
# BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS=
|
||||
# BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=
|
||||
# BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=
|
||||
# BASEROW_WEBHOOKS_MAX_PER_TABLE=
|
||||
# BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES=
|
||||
# BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS=
|
||||
|
||||
# BASEROW_AIRTABLE_IMPORT_SOFT_TIME_LIMIT=
|
||||
# HOURS_UNTIL_TRASH_PERMANENTLY_DELETED=
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import datetime
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
from decimal import Decimal
|
||||
from ipaddress import ip_network
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
|
@ -559,11 +561,40 @@ DONT_UPDATE_FORMULAS_AFTER_MIGRATION = bool(
|
|||
os.getenv("DONT_UPDATE_FORMULAS_AFTER_MIGRATION", "")
|
||||
)
|
||||
|
||||
WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES = 8
|
||||
WEBHOOKS_MAX_RETRIES_PER_CALL = 8
|
||||
WEBHOOKS_MAX_PER_TABLE = 20
|
||||
WEBHOOKS_MAX_CALL_LOG_ENTRIES = 10
|
||||
WEBHOOKS_REQUEST_TIMEOUT_SECONDS = 5
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES = int(
|
||||
os.getenv("BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES", 8)
|
||||
)
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL = int(
|
||||
os.getenv("BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL", 8)
|
||||
)
|
||||
BASEROW_WEBHOOKS_MAX_PER_TABLE = int(os.getenv("BASEROW_WEBHOOKS_MAX_PER_TABLE", 20))
|
||||
BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES = int(
|
||||
os.getenv("BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES", 10)
|
||||
)
|
||||
BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS = int(
|
||||
os.getenv("BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS", 5)
|
||||
)
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS = bool(
|
||||
os.getenv("BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS", False)
|
||||
)
|
||||
BASEROW_WEBHOOKS_IP_BLACKLIST = [
|
||||
ip_network(ip.strip())
|
||||
for ip in os.getenv("BASEROW_WEBHOOKS_IP_BLACKLIST", "").split(",")
|
||||
if ip.strip() != ""
|
||||
]
|
||||
BASEROW_WEBHOOKS_IP_WHITELIST = [
|
||||
ip_network(ip.strip())
|
||||
for ip in os.getenv("BASEROW_WEBHOOKS_IP_WHITELIST", "").split(",")
|
||||
if ip.strip() != ""
|
||||
]
|
||||
BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST = [
|
||||
re.compile(url_regex.strip())
|
||||
for url_regex in os.getenv("BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST", "").split(",")
|
||||
if url_regex.strip() != ""
|
||||
]
|
||||
BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS = int(
|
||||
os.getenv("BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS", "10")
|
||||
)
|
||||
|
||||
# ======== WARNING ========
|
||||
# Please read and understand everything at:
|
||||
|
|
|
@ -5,8 +5,8 @@ from .base import * # noqa: F403, F401
|
|||
SECRET_KEY = os.getenv("SECRET_KEY", "dev_hardcoded_secret_key") # noqa: F405
|
||||
|
||||
DEBUG = True
|
||||
WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES = 4
|
||||
WEBHOOKS_MAX_RETRIES_PER_CALL = 4
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES = 4
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL = 4
|
||||
|
||||
INSTALLED_APPS += ["django_extensions", "silk"] # noqa: F405
|
||||
|
||||
|
|
|
@ -11,5 +11,5 @@ ERROR_TABLE_WEBHOOK_MAX_LIMIT_EXCEEDED = (
|
|||
"ERROR_TABLE_WEBHOOK_MAX_LIMIT_EXCEEDED",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
f"The maximally allowed webhooks per table has been exceeded. You can create "
|
||||
f"a maximum of {settings.WEBHOOKS_MAX_PER_TABLE} webhooks.",
|
||||
f"a maximum of {settings.BASEROW_WEBHOOKS_MAX_PER_TABLE} webhooks.",
|
||||
)
|
||||
|
|
|
@ -156,6 +156,11 @@ class TableWebhookTestCallRequestSerializer(serializers.ModelSerializer):
|
|||
help_text="The additional headers as an object where the key is the name and "
|
||||
"the value the value.",
|
||||
)
|
||||
url = serializers.URLField(
|
||||
max_length=2000,
|
||||
validators=[url_validation],
|
||||
help_text="The URL that must be called when the webhook is triggered.",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = TableWebhook
|
||||
|
|
|
@ -21,8 +21,8 @@ def url_validation(value: str) -> str:
|
|||
try:
|
||||
url_validator(value)
|
||||
return value
|
||||
except DjangoValidationError:
|
||||
raise serializers.ValidationError(detail="Not a valid url", code="invalid_url")
|
||||
except DjangoValidationError as e:
|
||||
raise serializers.ValidationError(detail=e.message, code=e.code)
|
||||
|
||||
|
||||
def http_header_validation(headers: dict) -> dict:
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
# Generated by Django 3.2.13 on 2022-07-08 11:19
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
import baserow.contrib.database.webhooks.validators
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("database", "0088_multiple_collaborators_field"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="tablewebhook",
|
||||
name="url",
|
||||
field=models.TextField(
|
||||
help_text="The URL that must be called when the webhook is triggered.",
|
||||
validators=[
|
||||
django.core.validators.MaxLengthValidator(2000),
|
||||
baserow.contrib.database.webhooks.validators.url_validator,
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tablewebhookcall",
|
||||
name="called_url",
|
||||
field=models.TextField(
|
||||
validators=[
|
||||
django.core.validators.MaxLengthValidator(2000),
|
||||
baserow.contrib.database.webhooks.validators.url_validator,
|
||||
]
|
||||
),
|
||||
),
|
||||
]
|
|
@ -20,6 +20,7 @@ from .models import (
|
|||
TableWebhookHeader,
|
||||
)
|
||||
from .registries import webhook_event_type_registry
|
||||
from .validators import get_webhook_request_function
|
||||
|
||||
|
||||
class WebhookHandler:
|
||||
|
@ -132,7 +133,7 @@ class WebhookHandler:
|
|||
|
||||
webhook_count = TableWebhook.objects.filter(table_id=table.id).count()
|
||||
|
||||
if webhook_count >= settings.WEBHOOKS_MAX_PER_TABLE:
|
||||
if webhook_count >= settings.BASEROW_WEBHOOKS_MAX_PER_TABLE:
|
||||
raise TableWebhookMaxAllowedCountExceeded
|
||||
|
||||
allowed_fields = [
|
||||
|
@ -298,17 +299,14 @@ class WebhookHandler:
|
|||
:return: The request and response as the tuple (request, response)
|
||||
"""
|
||||
|
||||
if settings.DEBUG is True:
|
||||
from requests import request
|
||||
else:
|
||||
from advocate import request
|
||||
request = get_webhook_request_function()
|
||||
|
||||
response = request(
|
||||
method,
|
||||
url,
|
||||
headers=headers,
|
||||
json=payload,
|
||||
timeout=settings.WEBHOOKS_REQUEST_TIMEOUT_SECONDS,
|
||||
timeout=settings.BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
if response.history:
|
||||
|
@ -409,7 +407,7 @@ class WebhookHandler:
|
|||
def clean_webhook_calls(self, webhook: TableWebhook):
|
||||
"""
|
||||
Cleans up oldest webhook calls and makes sure that the total amount of calls
|
||||
will never exceed the `WEBHOOKS_MAX_CALL_LOG_ENTRIES` setting.
|
||||
will never exceed the `BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES` setting.
|
||||
|
||||
:param webhook: The webhook for which the calls must be cleaned up.
|
||||
"""
|
||||
|
@ -417,7 +415,9 @@ class WebhookHandler:
|
|||
calls_to_keep = (
|
||||
TableWebhookCall.objects.filter(webhook=webhook)
|
||||
.order_by("-called_time")
|
||||
.values_list("id", flat=True)[: settings.WEBHOOKS_MAX_CALL_LOG_ENTRIES]
|
||||
.values_list("id", flat=True)[
|
||||
: settings.BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES
|
||||
]
|
||||
)
|
||||
TableWebhookCall.objects.filter(
|
||||
~Q(id__in=calls_to_keep), webhook=webhook
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import uuid
|
||||
|
||||
from django.core.validators import MaxLengthValidator, URLValidator
|
||||
from django.core.validators import MaxLengthValidator
|
||||
from django.db import models
|
||||
|
||||
from baserow.contrib.database.table.models import Table
|
||||
|
@ -34,7 +34,7 @@ class TableWebhook(CreatedAndUpdatedOnMixin, models.Model):
|
|||
# limited `CharField`
|
||||
url = models.TextField(
|
||||
help_text="The URL that must be called when the webhook is triggered.",
|
||||
validators=[MaxLengthValidator(2000), URLValidator(), url_validator],
|
||||
validators=[MaxLengthValidator(2000), url_validator],
|
||||
)
|
||||
request_method = models.CharField(
|
||||
max_length=10,
|
||||
|
@ -89,9 +89,7 @@ class TableWebhookCall(models.Model):
|
|||
)
|
||||
event_type = models.CharField(max_length=50)
|
||||
called_time = models.DateTimeField(null=True)
|
||||
called_url = models.TextField(
|
||||
validators=[MaxLengthValidator(2000), URLValidator(), url_validator]
|
||||
)
|
||||
called_url = models.TextField(validators=[MaxLengthValidator(2000), url_validator])
|
||||
request = models.TextField(
|
||||
null=True, help_text="A text copy of the request headers and body."
|
||||
)
|
||||
|
|
|
@ -4,7 +4,11 @@ from django.db import transaction
|
|||
from baserow.config.celery import app
|
||||
|
||||
|
||||
@app.task(bind=True, max_retries=settings.WEBHOOKS_MAX_RETRIES_PER_CALL, queue="export")
|
||||
@app.task(
|
||||
bind=True,
|
||||
max_retries=settings.BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL,
|
||||
queue="export",
|
||||
)
|
||||
def call_webhook(
|
||||
self,
|
||||
webhook_id: int,
|
||||
|
@ -14,7 +18,7 @@ def call_webhook(
|
|||
url: str,
|
||||
headers: dict,
|
||||
payload: dict,
|
||||
**kwargs: dict
|
||||
**kwargs: dict,
|
||||
):
|
||||
"""
|
||||
This task should be called asynchronously when the webhook call must be trigged.
|
||||
|
@ -65,7 +69,7 @@ def call_webhook(
|
|||
response = exception.response
|
||||
error = str(exception)
|
||||
except UnacceptableAddressException as exception:
|
||||
error = str(exception)
|
||||
error = f"UnacceptableAddressException: {exception}"
|
||||
|
||||
TableWebhookCall.objects.update_or_create(
|
||||
id=event_id,
|
||||
|
@ -95,7 +99,8 @@ def call_webhook(
|
|||
webhook.failed_triggers = 0
|
||||
webhook.save()
|
||||
elif not success and (
|
||||
webhook.failed_triggers < settings.WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES
|
||||
webhook.failed_triggers
|
||||
< settings.BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES
|
||||
):
|
||||
# If the task has reached the maximum amount of failed calls, we're going to
|
||||
# give up and increase the total failed triggers of the webhook if we're
|
||||
|
@ -113,7 +118,10 @@ def call_webhook(
|
|||
# This part must be outside of the transaction block, otherwise it could cause
|
||||
# the transaction to rollback when the retry exception is raised, and we don't want
|
||||
# that to happen.
|
||||
if not success and self.request.retries < settings.WEBHOOKS_MAX_RETRIES_PER_CALL:
|
||||
if (
|
||||
not success
|
||||
and self.request.retries < settings.BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL
|
||||
):
|
||||
# If the task is still operating within the max retries per call limit,
|
||||
# then we want to retry the task with an exponential backoff.
|
||||
self.retry(countdown=2 ** self.request.retries)
|
||||
|
|
|
@ -1,15 +1,54 @@
|
|||
from http.client import _is_illegal_header_value, _is_legal_header_name
|
||||
from socket import gaierror, timeout
|
||||
from typing import Callable
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
|
||||
from advocate.addrvalidator import AddrValidator
|
||||
from advocate import AddrValidator, RequestsAPIWrapper
|
||||
from advocate.connection import (
|
||||
UnacceptableAddressException,
|
||||
validating_create_connection,
|
||||
)
|
||||
|
||||
INVALID_URL_CODE = "invalid_url"
|
||||
|
||||
|
||||
def get_webhook_request_function() -> Callable:
|
||||
"""
|
||||
Return the appropriate request function based on production environment
|
||||
or settings.
|
||||
In production mode, the advocate library is used so that the internal
|
||||
network can't be reached. This can be disabled by changing the Django
|
||||
setting BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS.
|
||||
"""
|
||||
|
||||
if settings.BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS is True:
|
||||
from requests import request
|
||||
|
||||
return request
|
||||
else:
|
||||
from advocate import request
|
||||
|
||||
addr_validator = get_advocate_address_validator()
|
||||
baserow_advocate = RequestsAPIWrapper(addr_validator)
|
||||
|
||||
return baserow_advocate.request
|
||||
|
||||
|
||||
def get_advocate_address_validator() -> AddrValidator:
|
||||
"""
|
||||
Return Advocate's AddrValidator with the user configurable white and black lists.
|
||||
"""
|
||||
|
||||
return AddrValidator(
|
||||
ip_blacklist=settings.BASEROW_WEBHOOKS_IP_BLACKLIST,
|
||||
ip_whitelist=settings.BASEROW_WEBHOOKS_IP_WHITELIST,
|
||||
hostname_blacklist=settings.BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST,
|
||||
)
|
||||
|
||||
|
||||
def url_validator(value):
|
||||
"""
|
||||
|
@ -18,20 +57,25 @@ def url_validator(value):
|
|||
use of the advocate libraries own address validation.
|
||||
|
||||
:param value: The URL that must be validated.
|
||||
:raises serializers.ValidationError: When the provided URL is not valid.
|
||||
:raises django.core.exceptions.ValidationError: When the provided URL is not valid.
|
||||
:return: The provided URL if valid.
|
||||
"""
|
||||
|
||||
# in case we run the develop server we want to allow every url.
|
||||
if settings.DEBUG is True:
|
||||
if settings.BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS is True:
|
||||
return value
|
||||
|
||||
# Make sure we a are valid URL with a schema before parsing otherwise the parser can
|
||||
# return incorrect hostnames etc. For example without this the URL
|
||||
# `www.google.com` will be parsed by urlparse to have a path of `www.google.com`
|
||||
# and an empty hostname.
|
||||
URLValidator(code=INVALID_URL_CODE)(value)
|
||||
|
||||
try:
|
||||
url = urlparse(value)
|
||||
# Reading an invalid port can raise a ValueError exception
|
||||
port = url.port
|
||||
except ValueError:
|
||||
raise ValidationError("Invalid URL")
|
||||
except ValueError as e:
|
||||
raise ValidationError("Invalid URL", code=INVALID_URL_CODE) from e
|
||||
|
||||
# in case the user does not provide a port we assume 80 if it is a
|
||||
# http url or 443 otherwise.
|
||||
|
@ -41,13 +85,17 @@ def url_validator(value):
|
|||
else:
|
||||
port = 443
|
||||
|
||||
addr_validator = AddrValidator()
|
||||
addr_validator = get_advocate_address_validator()
|
||||
|
||||
try:
|
||||
validating_create_connection((url.hostname, port), validator=addr_validator)
|
||||
validating_create_connection(
|
||||
(url.hostname, port),
|
||||
validator=addr_validator,
|
||||
timeout=settings.BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS,
|
||||
)
|
||||
return value
|
||||
except UnacceptableAddressException:
|
||||
raise ValidationError("Invalid URL")
|
||||
except (UnacceptableAddressException, gaierror, ConnectionError, timeout) as e:
|
||||
raise ValidationError("Invalid URL", code=INVALID_URL_CODE) from e
|
||||
|
||||
|
||||
def header_name_validator(value):
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from contextlib import contextmanager
|
||||
from decimal import Decimal
|
||||
from ipaddress import ip_network
|
||||
from socket import AF_INET, AF_INET6, IPPROTO_TCP, SOCK_STREAM
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
|
@ -469,3 +471,17 @@ def assert_serialized_rows_contain_same_values(row_1, row_2):
|
|||
assert_serialized_field_values_are_the_same(
|
||||
row_1_value, row_2_value, field_name=field_name
|
||||
)
|
||||
|
||||
|
||||
# The httpretty stub implementation of socket.getaddrinfo is incorrect and doesn't
|
||||
# return an IP causing advocate to fail, instead we patch to fix this.
|
||||
def stub_getaddrinfo(host, port, family=None, socktype=None, proto=None, flags=None):
|
||||
try:
|
||||
ip_network(host)
|
||||
ip = host
|
||||
except ValueError:
|
||||
ip = "1.1.1.1"
|
||||
return [
|
||||
(AF_INET, SOCK_STREAM, IPPROTO_TCP, host, (ip, port)),
|
||||
(AF_INET6, SOCK_STREAM, IPPROTO_TCP, "", (ip, port)),
|
||||
]
|
||||
|
|
|
@ -199,7 +199,7 @@ def test_get_all_table_webhooks(data_fixture, django_assert_num_queries):
|
|||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@override_settings(WEBHOOKS_MAX_PER_TABLE=4)
|
||||
@override_settings(BASEROW_WEBHOOKS_MAX_PER_TABLE=4)
|
||||
def test_create_webhook(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_2 = data_fixture.create_user()
|
||||
|
@ -419,7 +419,7 @@ def test_trigger_test_call(data_fixture):
|
|||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(WEBHOOKS_MAX_CALL_LOG_ENTRIES=2)
|
||||
@override_settings(BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES=2)
|
||||
def test_clean_webhook_calls(data_fixture):
|
||||
webhook = data_fixture.create_table_webhook()
|
||||
deleted_1 = data_fixture.create_table_webhook_call(webhook=webhook) # deleted
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
from django.db import transaction
|
||||
from django.test import override_settings
|
||||
|
||||
import httpretty
|
||||
import pytest
|
||||
import responses
|
||||
from celery.exceptions import Retry
|
||||
|
||||
from baserow.contrib.database.webhooks.models import TableWebhookCall
|
||||
from baserow.contrib.database.webhooks.tasks import call_webhook
|
||||
from baserow.test_utils.helpers import stub_getaddrinfo
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@responses.activate
|
||||
@override_settings(
|
||||
WEBHOOKS_MAX_RETRIES_PER_CALL=1, WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=1,
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=1,
|
||||
)
|
||||
def test_call_webhook(data_fixture):
|
||||
webhook = data_fixture.create_table_webhook()
|
||||
|
@ -137,3 +142,68 @@ def test_call_webhook(data_fixture):
|
|||
assert "{}" in created_call.response
|
||||
assert created_call.response_status == 400
|
||||
assert created_call.error == ""
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@override_settings(
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS=False,
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=0,
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=0,
|
||||
)
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_cant_call_webhook_to_localhost_when_private_addresses_not_allowed(
|
||||
patched_getaddrinfo,
|
||||
data_fixture,
|
||||
):
|
||||
httpretty.register_uri(httpretty.POST, "http://127.0.0.1", status=200)
|
||||
webhook = data_fixture.create_table_webhook()
|
||||
|
||||
assert webhook.active
|
||||
call_webhook.run(
|
||||
webhook_id=webhook.id,
|
||||
event_id="00000000-0000-0000-0000-000000000000",
|
||||
event_type="rows.created",
|
||||
method="POST",
|
||||
url="http://127.0.0.1",
|
||||
headers={"Baserow-header-1": "Value 1"},
|
||||
payload={"type": "rows.created"},
|
||||
)
|
||||
call = TableWebhookCall.objects.get(webhook=webhook)
|
||||
webhook.refresh_from_db()
|
||||
assert call.error == "UnacceptableAddressException: ('127.0.0.1', 80)"
|
||||
assert not webhook.active
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@responses.activate
|
||||
@override_settings(
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS=True,
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL=0,
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES=0,
|
||||
)
|
||||
def test_can_call_webhook_to_localhost_when_private_addresses_allowed(
|
||||
data_fixture,
|
||||
):
|
||||
responses.add(
|
||||
responses.POST,
|
||||
"http://127.0.0.1",
|
||||
status=201,
|
||||
)
|
||||
webhook = data_fixture.create_table_webhook()
|
||||
|
||||
assert webhook.active
|
||||
call_webhook.run(
|
||||
webhook_id=webhook.id,
|
||||
event_id="00000000-0000-0000-0000-000000000000",
|
||||
event_type="rows.created",
|
||||
method="POST",
|
||||
url="http://127.0.0.1",
|
||||
headers={"Baserow-header-1": "Value 1"},
|
||||
payload={"type": "rows.created"},
|
||||
)
|
||||
call = TableWebhookCall.objects.get(webhook=webhook)
|
||||
webhook.refresh_from_db()
|
||||
assert not call.error
|
||||
assert call.response_status == 201
|
||||
assert webhook.active
|
||||
|
|
|
@ -0,0 +1,188 @@
|
|||
import re
|
||||
from ipaddress import ip_network
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.test import override_settings
|
||||
|
||||
import httpretty as httpretty
|
||||
import pytest
|
||||
|
||||
from baserow.contrib.database.webhooks.validators import url_validator
|
||||
from baserow.test_utils.helpers import stub_getaddrinfo
|
||||
|
||||
URL_BLACKLIST_ONLY_ALLOWING_GOOGLE_WEBHOOKS = re.compile(r"(?!(www\.)?google\.com).*")
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_advocate_blocks_internal_address(mock):
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.1/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://2.2.2.2/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://127.0.0.1/", status=200)
|
||||
|
||||
# This request should go through
|
||||
url_validator("https://1.1.1.1/")
|
||||
|
||||
# This request should not go through
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("http://127.0.0.1/")
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_advocate_blocks_invalid_urls(mock):
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.1/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://2.2.2.2/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://127.0.0.1/", status=200)
|
||||
|
||||
# This request should go through
|
||||
url_validator("https://1.1.1.1/")
|
||||
|
||||
# This request should not go through
|
||||
with pytest.raises(ValidationError) as exec_info:
|
||||
url_validator("google.com")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
with pytest.raises(ValidationError) as exec_info:
|
||||
url_validator("127.0.0.1")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@override_settings(BASEROW_WEBHOOKS_IP_WHITELIST=[ip_network("127.0.0.1/32")])
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_advocate_whitelist_rules(mock):
|
||||
httpretty.register_uri(httpretty.GET, "http://127.0.0.1/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://10.0.0.1/", status=200)
|
||||
|
||||
# This request should go through
|
||||
url_validator("http://127.0.0.1/")
|
||||
|
||||
# Other private addresses should still blocked
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("http://10.0.0.1/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@override_settings(BASEROW_WEBHOOKS_IP_BLACKLIST=[ip_network("1.1.1.1/32")])
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_advocate_blacklist_rules(mock):
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.1", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://127.0.0.1/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://2.2.2.2/", status=200)
|
||||
|
||||
# This request should not go through
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("https://1.1.1.1/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
# Private address is still blocked
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("http://127.0.0.1/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
# This request should still go through
|
||||
url_validator("https://2.2.2.2/")
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@override_settings(
|
||||
BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST=[re.compile(r"(?:www\.?)?google.com")]
|
||||
)
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_hostname_blacklist_rules(patched_addr_info):
|
||||
httpretty.register_uri(httpretty.GET, "https://google.com", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://1.1.1.1", status=200)
|
||||
|
||||
# The httpretty stub implemenation of socket.getaddrinfo is incorrect and doesn't
|
||||
# return an IP causing advocate to fail, instead we patch to fix this.
|
||||
|
||||
# This request should not go through
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("https://www.google.com/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
# This request should still go through
|
||||
url_validator("https://www.otherdomain.com")
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@override_settings(
|
||||
BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST=[URL_BLACKLIST_ONLY_ALLOWING_GOOGLE_WEBHOOKS]
|
||||
)
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_hostname_blacklist_rules_only_allow_one_host(patched_addr_info):
|
||||
httpretty.register_uri(httpretty.GET, "https://google.com", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://google.com", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://1.1.1.1", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.1", status=200)
|
||||
|
||||
url_validator("https://www.google.com/")
|
||||
url_validator("https://google.com/")
|
||||
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("https://www.otherdomain.com")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("https://google2.com")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@override_settings(
|
||||
BASEROW_WEBHOOKS_IP_BLACKLIST=[ip_network("1.0.0.0/8")],
|
||||
BASEROW_WEBHOOKS_IP_WHITELIST=[ip_network("1.1.1.1/32")],
|
||||
)
|
||||
def test_advocate_combination_of_whitelist_blacklist_rules():
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.1", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.2", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://127.0.0.1/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://2.2.2.2/", status=200)
|
||||
|
||||
url_validator("https://1.1.1.1/")
|
||||
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("https://1.1.1.2/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
# Private address is still blocked
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("http://127.0.0.1/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
# This request should still go through
|
||||
url_validator("https://2.2.2.2/")
|
||||
|
||||
|
||||
@httpretty.activate(verbose=True, allow_net_connect=False)
|
||||
@override_settings(
|
||||
BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST=[URL_BLACKLIST_ONLY_ALLOWING_GOOGLE_WEBHOOKS],
|
||||
BASEROW_WEBHOOKS_IP_BLACKLIST=[ip_network("1.0.0.0/8")],
|
||||
BASEROW_WEBHOOKS_IP_WHITELIST=[ip_network("1.1.1.1/32")],
|
||||
)
|
||||
@patch("socket.getaddrinfo", wraps=stub_getaddrinfo)
|
||||
def test_advocate_hostname_blacklist_overrides_ip_lists(
|
||||
mock,
|
||||
):
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.1", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://1.1.1.2", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "http://127.0.0.1/", status=200)
|
||||
httpretty.register_uri(httpretty.GET, "https://2.2.2.2/", status=200)
|
||||
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("https://1.1.1.1/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("https://1.1.1.2/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
# Private address is still blocked
|
||||
with pytest.raises(ValidationError, match="Invalid URL") as exec_info:
|
||||
url_validator("http://127.0.0.1/")
|
||||
assert exec_info.value.code == "invalid_url"
|
||||
|
||||
# This request should still go through
|
||||
url_validator("https://www.google.com/")
|
|
@ -44,6 +44,7 @@ For example:
|
|||
* Allow not creating a reversed relationship with the link row field. [#1063](https://gitlab.com/bramw/baserow/-/issues/1063)
|
||||
* Add API token authentication support to multipart and via-URL file uploads. [#255](https://gitlab.com/bramw/baserow/-/issues/255)
|
||||
* Add a rich preview while importing data to an existing table. [#1120](https://gitlab.com/bramw/baserow/-/issues/1120)
|
||||
* Add env vars for controlling which URLs and IPs webhooks are allowed to use. [#931](https://gitlab.com/bramw/baserow/-/issues/931)
|
||||
|
||||
### Bug Fixes
|
||||
* Resolve circular dependency in `FieldWithFiltersAndSortsSerializer` [#1113](https://gitlab.com/bramw/baserow/-/issues/1113)
|
||||
|
|
|
@ -94,6 +94,16 @@ x-backend-variables: &backend-variables
|
|||
BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
|
||||
BASEROW_JOB_SOFT_TIME_LIMIT:
|
||||
BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT:
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS:
|
||||
BASEROW_WEBHOOKS_IP_BLACKLIST:
|
||||
BASEROW_WEBHOOKS_IP_WHITELIST:
|
||||
BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST:
|
||||
BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS:
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES:
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL:
|
||||
BASEROW_WEBHOOKS_MAX_PER_TABLE:
|
||||
BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES:
|
||||
BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS:
|
||||
|
||||
services:
|
||||
# A caddy reverse proxy sitting in-front of all the services. Responsible for routing
|
||||
|
|
|
@ -113,6 +113,16 @@ x-backend-variables: &backend-variables
|
|||
BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
|
||||
BASEROW_JOB_SOFT_TIME_LIMIT:
|
||||
BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT:
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS:
|
||||
BASEROW_WEBHOOKS_IP_BLACKLIST:
|
||||
BASEROW_WEBHOOKS_IP_WHITELIST:
|
||||
BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST:
|
||||
BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS:
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES:
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL:
|
||||
BASEROW_WEBHOOKS_MAX_PER_TABLE:
|
||||
BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES:
|
||||
BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS:
|
||||
|
||||
services:
|
||||
backend:
|
||||
|
|
|
@ -112,7 +112,16 @@ x-backend-variables: &backend-variables
|
|||
BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT:
|
||||
BASEROW_MAX_SNAPSHOTS_PER_GROUP:
|
||||
BASEROW_SNAPSHOT_EXPIRATION_TIME_DAYS:
|
||||
|
||||
BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS:
|
||||
BASEROW_WEBHOOKS_IP_BLACKLIST:
|
||||
BASEROW_WEBHOOKS_IP_WHITELIST:
|
||||
BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST:
|
||||
BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS:
|
||||
BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES:
|
||||
BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL:
|
||||
BASEROW_WEBHOOKS_MAX_PER_TABLE:
|
||||
BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES:
|
||||
BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS:
|
||||
|
||||
services:
|
||||
# A caddy reverse proxy sitting in-front of all the services. Responsible for routing
|
||||
|
|
|
@ -34,6 +34,7 @@ The installation methods referred to in the variable descriptions are:
|
|||
| BASEROW\_CADDY\_EXTRA\_CONF | **Not supported by standalone images.** Will be substituted into the end of the Caddyfiles server block | |
|
||||
| BASEROW\_MAX\_IMPORT\_FILE\_SIZE\_MB | The maximum file size in mb you can import to create a new table. Default 512Mb. | 512 |
|
||||
|
||||
|
||||
### Backend Configuration
|
||||
| Name | Description | Defaults |
|
||||
| ------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
|
@ -91,6 +92,20 @@ The installation methods referred to in the variable descriptions are:
|
|||
| BASEROW\_AMOUNT\_OF\_WORKERS | The number of concurrent celery worker processes used to process asynchronous tasks. If not set will default to the number of available cores. Each celery process uses memory, to reduce Baserow's memory footprint consider setting and reducing this variable. | 1 for the All-in-one, Heroku and Cloudron images. Defaults to empty and hence the number of available cores in the standalone images. |
|
||||
| BASEROW\_RUN\_MINIMAL | When BASEROW\_AMOUNT\_OF\_WORKERS is 1 and this is set to a non empty value Baserow will not run the export-worker but instead run both the celery export and normal tasks on the normal celery worker. Set this to lower the memory usage of Baserow in expense of performance. | |
|
||||
|
||||
### Webhook Configuration
|
||||
| Name | Description | Defaults |
|
||||
| ------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| BASEROW\_WEBHOOKS\_ALLOW\_PRIVATE\_ADDRESS | If set to any non empty value allows webhooks to access all addresses. Enabling this flag is a security risk as it will allow users to send webhook requests to internal addresses on your network. Instead consider using the three variables below first to allow access to only some internal network hostnames or IPs. | |
|
||||
| BASEROW\_WEBHOOKS\_URL\_REGEX\_BLACKLIST | **Disabled if BASEROW\_WEBHOOKS\_ALLOW\_PRIVATE\_ADDRESS is set.** List of comma seperated regexes used to validate user configured webhook URLs, will show the user an error if any regexes match their webhook URL and prevent it from running. Applied before and so supersedes BASEROW\_WEBHOOKS\_IP\_WHITELIST and BASEROW\_WEBHOOKS\_IP\_BLACKLIST. Do not include any schema like `http://`, `https://` as regexes will only be run against the hostname/IP of the user configured URL. For example set this to `^(?!(www\.)?allowedhost\.com).*` to block all hostnames and IPs other than `allowedhost.com` or `www.allowedhost.com`. | |
|
||||
| BASEROW\_WEBHOOKS\_IP\_WHITELIST | **Disabled if BASEROW\_WEBHOOKS\_ALLOW\_PRIVATE\_ADDRESS is set.** List of comma seperated IP addresses or ranges that webhooks will be **allowed** to use after the webhook URL has been resolved to an IP using DNS. Only checked if the URL passes the BASEROW\_WEBHOOKS\_URL\_REGEX\_BLACKLIST. Takes precedence over BASEROW\_WEBHOOKS\_IP\_BLACKLIST meaning that a whitelisted IP will always be let through regardless of the ranges in BASEROW\_WEBHOOKS\_IP\_BLACKLIST. So use BASEROW\_WEBHOOKS\_IP\_WHITELIST to punch holes the ranges in BASEROW\_WEBHOOKS\_IP\_BLACKLIST, and not the other way around. Accepts a string in the format: "127.0.0.1/32,192.168.1.1/32" | |
|
||||
| BASEROW\_WEBHOOKS\_IP\_BLACKLIST | **Disabled if BASEROW\_WEBHOOKS\_ALLOW\_PRIVATE\_ADDRESS is set.** List of comma seperated IP addresses or ranges that webhooks will be **denied** from using after the URL has been resolved to an IP using DNS. Only checked if the URL passes the BASEROW\_WEBHOOKS\_URL\_REGEX\_BLACKLIST. BASEROW\_WEBHOOKS\_IP\_WHITELIST supersedes any ranges specified in this variable. Accepts a string in the format: "127.0.0.1/32,192.168.1.1/32" | |
|
||||
| BASEROW\_WEBHOOKS\_URL\_CHECK\_TIMEOUT\_SECS | **Disabled if BASEROW\_WEBHOOKS\_ALLOW\_PRIVATE\_ADDRESS is set.** How long to wait before timing out and returning an error when checking if an url can be accessed for a webhook. | 10 seconds |
|
||||
| BASEROW\_WEBHOOKS\_MAX\_CONSECUTIVE\_TRIGGER\_FAILURES | The number of consecutive trigger failures that can occur before a webhook is disabled. | 8 |
|
||||
| BASEROW\_WEBHOOKS\_MAX\_RETRIES\_PER\_CALL | The max number of retries per webhook call. | 8 |
|
||||
| BASEROW\_WEBHOOKS\_MAX\_PER\_TABLE | The max number of webhooks per Baserow table. | 20 |
|
||||
| BASEROW\_WEBHOOKS\_MAX\_CALL\_LOG\_ENTRIES | The maximum number of call log entries stored per webhook.| 10 |
|
||||
| BASEROW\_WEBHOOKS\_REQUEST\_TIMEOUT\_SECONDS | How long to wait on making the webhook request before timing out.| 5 |
|
||||
|
||||
### Backend Misc Configuration
|
||||
| Name | Description | Defaults |
|
||||
| ------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
|
|
|
@ -171,6 +171,10 @@
|
|||
"request": "Request",
|
||||
"response": "Response",
|
||||
"successfullyUpdated": "Webhook successfully updated.",
|
||||
"form": {
|
||||
"invalidURLTitle": "Invalid URL.",
|
||||
"invalidURLDescription": "The webhook URL is invalid, inaccessible or prohibited."
|
||||
},
|
||||
"status": {
|
||||
"noStatus": "NO STATUS",
|
||||
"statusOK": "OK",
|
||||
|
|
|
@ -16,9 +16,18 @@ export default {
|
|||
* Can be called after catching an error. If an handler is available the error
|
||||
* data is populated with the correct error message.
|
||||
*/
|
||||
handleError(error, name, specificErrorMap = null) {
|
||||
handleError(
|
||||
error,
|
||||
name,
|
||||
specificErrorMap = null,
|
||||
requestBodyErrorMap = null
|
||||
) {
|
||||
if (error.handler) {
|
||||
const message = error.handler.getMessage(name, specificErrorMap)
|
||||
const message = error.handler.getMessage(
|
||||
name,
|
||||
specificErrorMap,
|
||||
requestBodyErrorMap
|
||||
)
|
||||
this.showError(message)
|
||||
error.handler.handled()
|
||||
} else {
|
||||
|
|
|
@ -155,12 +155,15 @@ export class ErrorHandler {
|
|||
this.detail = detail
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if there is a readable error.
|
||||
* @return {boolean}
|
||||
*/
|
||||
hasError() {
|
||||
return this.response !== undefined && this.response.code !== null
|
||||
hasBaserowAPIError() {
|
||||
return this.response !== undefined && this.code != null
|
||||
}
|
||||
|
||||
hasRequestBodyValidationError() {
|
||||
return (
|
||||
this.response !== undefined &&
|
||||
this.response?.data?.error === 'ERROR_REQUEST_BODY_VALIDATION'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -203,10 +206,85 @@ export class ErrorHandler {
|
|||
return this.errorMap[this.code]
|
||||
}
|
||||
|
||||
return new ResponseErrorMessage(
|
||||
this.app.i18n.t('clientHandler.notCompletedTitle'),
|
||||
this.app.i18n.t('clientHandler.notCompletedDescription')
|
||||
)
|
||||
return this.genericDefaultError()
|
||||
}
|
||||
|
||||
searchForMatchingFieldException(
|
||||
listOfDetailErrors,
|
||||
mapOfDetailCodeToResponseError
|
||||
) {
|
||||
for (const detailError of listOfDetailErrors) {
|
||||
if (
|
||||
detailError &&
|
||||
typeof detailError === 'object' &&
|
||||
typeof detailError.code === 'string'
|
||||
) {
|
||||
const handledError = mapOfDetailCodeToResponseError[detailError.code]
|
||||
if (handledError) {
|
||||
return handledError
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a "ERROR_REQUEST_BODY_VALIDATION" error has occurred this function matches
|
||||
* a provided error map against the machine readable error codes in the "detail"
|
||||
* key in the response.
|
||||
*
|
||||
* For example if the response contains an error looking like:
|
||||
*
|
||||
* {
|
||||
* "error": "ERROR_REQUEST_BODY_VALIDATION",
|
||||
* "detail": {
|
||||
* "url": [
|
||||
* {
|
||||
* "error": "Enter a valid URL.",
|
||||
* "code": "invalid"
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Then you would call this function like so to match the above error and get your
|
||||
* ResponseErrorMessage returned:
|
||||
*
|
||||
* getRequestBodyErrorMessage({"url":{"invalid": new ResponseErrorMessage('a','b')}})
|
||||
*
|
||||
* @param requestBodyErrorMap An object where it's keys are the names of the
|
||||
* request body attribute that can fail with a value being another sub object. This
|
||||
* sub object should be keyed by the "code" returned in the error detail with the
|
||||
* value being a ResponseErrorMessage that should be returned if the API returned an
|
||||
* error for that attribute and code.
|
||||
* @return Any The first ResponseErrorMessage which is found in the error map that
|
||||
* matches an error in the response body, or null if no match is found.
|
||||
*/
|
||||
getRequestBodyErrorMessage(requestBodyErrorMap) {
|
||||
const detail = this.response?.data?.detail
|
||||
|
||||
if (requestBodyErrorMap && detail && typeof detail === 'object') {
|
||||
for (const fieldName of Object.keys(detail)) {
|
||||
const errorsForField = detail[fieldName]
|
||||
const supportedExceptionsForField = requestBodyErrorMap[fieldName]
|
||||
|
||||
if (
|
||||
errorsForField != null &&
|
||||
Array.isArray(errorsForField) &&
|
||||
supportedExceptionsForField
|
||||
) {
|
||||
const matchingException = this.searchForMatchingFieldException(
|
||||
errorsForField,
|
||||
supportedExceptionsForField
|
||||
)
|
||||
if (matchingException) {
|
||||
return matchingException
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -218,7 +296,7 @@ export class ErrorHandler {
|
|||
this.app.i18n.t('clientHandler.notFoundTitle', {
|
||||
name: upperCaseFirst(name),
|
||||
}),
|
||||
this.app.i18n.t('clientHandler.notFoundTitle', {
|
||||
this.app.i18n.t('clientHandler.notFoundDescription', {
|
||||
name: name.toLowerCase(),
|
||||
})
|
||||
)
|
||||
|
@ -252,20 +330,34 @@ export class ErrorHandler {
|
|||
* If there is an error or the requested detail is not found an error
|
||||
* message related to the problem is returned.
|
||||
*/
|
||||
getMessage(name = null, specificErrorMap = null) {
|
||||
getMessage(name = null, specificErrorMap = null, requestBodyErrorMap = null) {
|
||||
if (this.isTooManyRequests()) {
|
||||
return this.getTooManyRequestsError()
|
||||
}
|
||||
if (this.hasNetworkError()) {
|
||||
return this.getNetworkErrorMessage()
|
||||
}
|
||||
if (this.hasError()) {
|
||||
if (this.hasBaserowAPIError()) {
|
||||
if (this.hasRequestBodyValidationError()) {
|
||||
const matchingRequestBodyError =
|
||||
this.getRequestBodyErrorMessage(requestBodyErrorMap)
|
||||
if (matchingRequestBodyError) {
|
||||
return matchingRequestBodyError
|
||||
}
|
||||
}
|
||||
return this.getErrorMessage(specificErrorMap)
|
||||
}
|
||||
if (this.isNotFound()) {
|
||||
return this.getNotFoundMessage(name)
|
||||
}
|
||||
return null
|
||||
return this.genericDefaultError()
|
||||
}
|
||||
|
||||
genericDefaultError() {
|
||||
return new ResponseErrorMessage(
|
||||
this.app.i18n.t('clientHandler.notCompletedTitle'),
|
||||
this.app.i18n.t('clientHandler.notCompletedDescription')
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -275,7 +367,11 @@ export class ErrorHandler {
|
|||
*/
|
||||
notifyIf(name = null, message = null) {
|
||||
if (
|
||||
!(this.hasError() || this.hasNetworkError() || this.isNotFound()) ||
|
||||
!(
|
||||
this.hasBaserowAPIError() ||
|
||||
this.hasNetworkError() ||
|
||||
this.isNotFound()
|
||||
) ||
|
||||
this.isHandled
|
||||
) {
|
||||
return
|
||||
|
@ -306,6 +402,29 @@ export class ErrorHandler {
|
|||
}
|
||||
}
|
||||
|
||||
export function makeErrorResponseInterceptor(store, app, clientErrorMap) {
|
||||
return (error) => {
|
||||
error.handler = new ErrorHandler(store, app, clientErrorMap, error.response)
|
||||
|
||||
// Add the error message in the response to the error object.
|
||||
const rspCode = error.response?.status
|
||||
const rspData = error.response?.data
|
||||
|
||||
if (rspCode === 401) {
|
||||
store.dispatch('notification/setAuthorizationError', true)
|
||||
error.handler.handled()
|
||||
} else if (
|
||||
typeof rspData === 'object' &&
|
||||
'error' in rspData &&
|
||||
'detail' in rspData
|
||||
) {
|
||||
error.handler.setError(rspData.error, rspData.detail)
|
||||
}
|
||||
|
||||
return Promise.reject(error)
|
||||
}
|
||||
}
|
||||
|
||||
export default function ({ store, app }, inject) {
|
||||
// Create and inject the client error map, so that other modules can also register
|
||||
// default error messages.
|
||||
|
@ -341,38 +460,11 @@ export default function ({ store, app }, inject) {
|
|||
return config
|
||||
})
|
||||
|
||||
// Create a response interceptor to add more detail tot the error message
|
||||
// Create a response interceptor to add more detail to the error message
|
||||
// and to create a notification when there is a network error.
|
||||
client.interceptors.response.use(
|
||||
(response) => {
|
||||
return response
|
||||
},
|
||||
(error) => {
|
||||
error.handler = new ErrorHandler(
|
||||
store,
|
||||
app,
|
||||
clientErrorMap,
|
||||
error.response
|
||||
)
|
||||
|
||||
// Add the error message in the response to the error object.
|
||||
const rspCode = error.response?.status
|
||||
const rspData = error.response?.data
|
||||
|
||||
if (rspCode === 401) {
|
||||
store.dispatch('notification/setAuthorizationError', true)
|
||||
error.handler.handled()
|
||||
} else if (
|
||||
typeof rspData === 'object' &&
|
||||
'error' in rspData &&
|
||||
'detail' in rspData
|
||||
) {
|
||||
error.handler.setError(rspData.error, rspData.detail)
|
||||
}
|
||||
|
||||
return Promise.reject(error)
|
||||
}
|
||||
)
|
||||
client.interceptors.response.use((response) => {
|
||||
return response
|
||||
}, makeErrorResponseInterceptor(store, app, clientErrorMap))
|
||||
|
||||
inject('client', client)
|
||||
}
|
||||
|
|
|
@ -41,10 +41,20 @@ export const slugify = (string) => {
|
|||
* after the dot.
|
||||
*/
|
||||
export const isValidURL = (str) => {
|
||||
const pattern = /^[^\s]{0,255}(?:\.|\/\/)[^\s]{1,}$/gi
|
||||
const pattern = /^[^\s]{0,255}(?:\.|\/\/)[^\s]{1,}$/i
|
||||
return !!pattern.test(str)
|
||||
}
|
||||
|
||||
/**
|
||||
* A slightly stricter URL validator than requires any url begins with a http:// or
|
||||
* https:// and that it also passes the isValidURL validator above.
|
||||
*/
|
||||
export const isValidURLWithHttpScheme = (str) => {
|
||||
const trimmedStr = str.trim()
|
||||
const pattern = /^https?:\/\//i
|
||||
return !!pattern.test(trimmedStr) && isValidURL(trimmedStr)
|
||||
}
|
||||
|
||||
export const isValidEmail = (str) => {
|
||||
// Please keep these regex in sync with the backend
|
||||
// See baserow.contrib.database.fields.field_types.EmailFieldType
|
||||
|
|
|
@ -49,14 +49,26 @@ export default {
|
|||
)
|
||||
this.$emit('created', data)
|
||||
} catch (error) {
|
||||
this.handleError(error, 'webhook', {
|
||||
ERROR_TABLE_WEBHOOK_MAX_LIMIT_EXCEEDED: new ResponseErrorMessage(
|
||||
this.$t('createWebhook.errorTableWebhookMaxLimitExceededTitle'),
|
||||
this.$t(
|
||||
'createWebhook.errorTableWebhookMaxLimitExceededDescription'
|
||||
)
|
||||
),
|
||||
})
|
||||
this.handleError(
|
||||
error,
|
||||
'webhook',
|
||||
{
|
||||
ERROR_TABLE_WEBHOOK_MAX_LIMIT_EXCEEDED: new ResponseErrorMessage(
|
||||
this.$t('createWebhook.errorTableWebhookMaxLimitExceededTitle'),
|
||||
this.$t(
|
||||
'createWebhook.errorTableWebhookMaxLimitExceededDescription'
|
||||
)
|
||||
),
|
||||
},
|
||||
{
|
||||
url: {
|
||||
invalid_url: new ResponseErrorMessage(
|
||||
this.$t('webhook.form.invalidURLTitle'),
|
||||
this.$t('webhook.form.invalidURLDescription')
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
this.loading = false
|
||||
|
|
|
@ -55,6 +55,7 @@
|
|||
<script>
|
||||
import modal from '@baserow/modules/core/mixins/modal'
|
||||
import error from '@baserow/modules/core/mixins/error'
|
||||
import { ResponseErrorMessage } from '@baserow/modules/core/plugins/clientHandler'
|
||||
import WebhookService from '@baserow/modules/database/services/webhook'
|
||||
|
||||
export default {
|
||||
|
@ -111,7 +112,14 @@ export default {
|
|||
this.response = data.response
|
||||
this.statusCode = data.status_code
|
||||
} catch (e) {
|
||||
this.handleError(e)
|
||||
this.handleError(e, 'webhook', null, {
|
||||
url: {
|
||||
invalid_url: new ResponseErrorMessage(
|
||||
this.$t('webhook.form.invalidURLTitle'),
|
||||
this.$t('webhook.form.invalidURLDescription')
|
||||
),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
this.isLoading = false
|
||||
|
|
|
@ -44,6 +44,10 @@ import WebhookForm from '@baserow/modules/database/components/webhook/WebhookFor
|
|||
import DeleteWebhookModal from '@baserow/modules/database/components/webhook/DeleteWebhookModal'
|
||||
import WebhookService from '@baserow/modules/database/services/webhook'
|
||||
|
||||
const {
|
||||
ResponseErrorMessage,
|
||||
} = require('@baserow/modules/core/plugins/clientHandler')
|
||||
|
||||
export default {
|
||||
name: 'UpdateWebhook',
|
||||
components: { WebhookForm, DeleteWebhookModal },
|
||||
|
@ -84,7 +88,14 @@ export default {
|
|||
this.saved = false
|
||||
}, 5000)
|
||||
} catch (error) {
|
||||
this.handleError(error)
|
||||
this.handleError(error, 'webhook', null, {
|
||||
url: {
|
||||
invalid_url: new ResponseErrorMessage(
|
||||
this.$t('webhook.form.invalidURLTitle'),
|
||||
this.$t('webhook.form.invalidURLDescription')
|
||||
),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
this.loading = false
|
||||
|
|
|
@ -80,7 +80,8 @@
|
|||
<div
|
||||
v-if="
|
||||
fieldHasErrors('url') &&
|
||||
(!$v.values.url.required || !$v.values.url.url)
|
||||
(!$v.values.url.required ||
|
||||
!$v.values.url.isValidURLWithHttpScheme)
|
||||
"
|
||||
class="error"
|
||||
>
|
||||
|
@ -245,13 +246,14 @@
|
|||
|
||||
<script>
|
||||
import { mapGetters } from 'vuex'
|
||||
import { required, maxLength, url } from 'vuelidate/lib/validators'
|
||||
import { required, maxLength } from 'vuelidate/lib/validators'
|
||||
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
import error from '@baserow/modules/core/mixins/error'
|
||||
import Checkbox from '@baserow/modules/core/components/Checkbox'
|
||||
import Radio from '@baserow/modules/core/components/Radio'
|
||||
import TestWebhookModal from '@baserow/modules/database/components/webhook/TestWebhookModal'
|
||||
import { isValidURLWithHttpScheme } from '@baserow/modules/core/utils/string'
|
||||
|
||||
export default {
|
||||
name: 'WebhookForm',
|
||||
|
@ -352,7 +354,7 @@ export default {
|
|||
validations: {
|
||||
values: {
|
||||
name: { required },
|
||||
url: { required, maxLength: maxLength(2000), url },
|
||||
url: { required, maxLength: maxLength(2000), isValidURLWithHttpScheme },
|
||||
},
|
||||
headers: {
|
||||
$each: {
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
"example": "Example payload"
|
||||
},
|
||||
"errors": {
|
||||
"urlField": "This field is required and needs to be a valid url.",
|
||||
"urlField": "Must be a valid url starting with 'https://' or 'http://'.",
|
||||
"invalidHeaders": "One of the headers is invalid."
|
||||
},
|
||||
"checkbox": {
|
||||
|
|
416
web-frontend/test/unit/core/utils/errors.spec.js
Normal file
416
web-frontend/test/unit/core/utils/errors.spec.js
Normal file
|
@ -0,0 +1,416 @@
|
|||
import {
|
||||
ClientErrorMap,
|
||||
makeErrorResponseInterceptor,
|
||||
ResponseErrorMessage,
|
||||
} from '@baserow/modules/core/plugins/clientHandler'
|
||||
|
||||
function errorInterceptorWithStubAppAndStore(storeDispatches = []) {
|
||||
const stubApp = {
|
||||
i18n: {
|
||||
t(t) {
|
||||
return t
|
||||
},
|
||||
},
|
||||
}
|
||||
return makeErrorResponseInterceptor(
|
||||
{
|
||||
dispatch(action, ...args) {
|
||||
storeDispatches.push({
|
||||
action,
|
||||
args,
|
||||
})
|
||||
},
|
||||
},
|
||||
stubApp,
|
||||
new ClientErrorMap(stubApp)
|
||||
)
|
||||
}
|
||||
|
||||
describe('test error handling', () => {
|
||||
test(
|
||||
'test an 500 response with error and detail body attributes matches against' +
|
||||
' corresponding specificErrorMap entry',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'matchesASpecificErrorCode',
|
||||
detail: 'detail',
|
||||
},
|
||||
status: 500,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name', {
|
||||
matchesASpecificErrorCode: new ResponseErrorMessage(
|
||||
'title',
|
||||
'message'
|
||||
),
|
||||
})
|
||||
expect(message.title).toBe('title')
|
||||
expect(message.message).toBe('message')
|
||||
}
|
||||
}
|
||||
)
|
||||
test(
|
||||
'test an 500 response with error and detail body attributes with no ' +
|
||||
' matching specificErrorMap entry results in the generic error message',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'noMatchingEntryInSpecificErrorMap',
|
||||
detail: 'detail',
|
||||
},
|
||||
status: 500,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name', {
|
||||
doesntMatch: new ResponseErrorMessage('title', 'message'),
|
||||
})
|
||||
expect(message.title).toBe('clientHandler.notCompletedTitle')
|
||||
expect(message.message).toBe('clientHandler.notCompletedDescription')
|
||||
}
|
||||
}
|
||||
)
|
||||
test('test an 429 response results in a too many requests error', async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
status: 429,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name')
|
||||
expect(message.title).toBe('clientHandler.tooManyRequestsTitle')
|
||||
expect(message.message).toBe('clientHandler.tooManyRequestsDescription')
|
||||
}
|
||||
})
|
||||
test('test empty response results in a network error', async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name')
|
||||
expect(message.title).toBe('clientHandler.networkErrorTitle')
|
||||
expect(message.message).toBe('clientHandler.networkErrorDescription')
|
||||
}
|
||||
})
|
||||
test(
|
||||
'test an 500 response that matches a default error returns its default ' +
|
||||
'message',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'ERROR_USER_NOT_IN_GROUP',
|
||||
detail: 'detail',
|
||||
},
|
||||
status: 500,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name', {
|
||||
matchesSomeOtherError: new ResponseErrorMessage('title', 'message'),
|
||||
})
|
||||
expect(message.title).toBe('clientHandler.userNotInGroupTitle')
|
||||
expect(message.message).toBe('clientHandler.userNotInGroupDescription')
|
||||
}
|
||||
}
|
||||
)
|
||||
test(
|
||||
'test an 500 response that matches a default error returns its default ' +
|
||||
'message',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'ERROR_USER_NOT_IN_GROUP',
|
||||
detail: 'detail',
|
||||
},
|
||||
status: 500,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name', {
|
||||
matchesSomeOtherError: new ResponseErrorMessage('title', 'message'),
|
||||
})
|
||||
expect(message.title).toBe('clientHandler.userNotInGroupTitle')
|
||||
expect(message.message).toBe('clientHandler.userNotInGroupDescription')
|
||||
}
|
||||
}
|
||||
)
|
||||
test('test an 401 response returns a not authorized error', async () => {
|
||||
const actualStoreDispatches = []
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore(actualStoreDispatches)({
|
||||
response: {
|
||||
data: {},
|
||||
status: 401,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name', {
|
||||
matchesSomeOtherError: new ResponseErrorMessage('title', 'message'),
|
||||
})
|
||||
expect(message.title).toBe('clientHandler.notCompletedTitle')
|
||||
expect(message.message).toBe('clientHandler.notCompletedDescription')
|
||||
expect(actualStoreDispatches).toEqual([
|
||||
{
|
||||
action: 'notification/setAuthorizationError',
|
||||
args: [true],
|
||||
},
|
||||
])
|
||||
}
|
||||
})
|
||||
test('test an 404 response returns a not found error', async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {},
|
||||
status: 404,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage('name', {
|
||||
matchesSomeOtherError: new ResponseErrorMessage('title', 'message'),
|
||||
})
|
||||
expect(message.title).toBe('clientHandler.notFoundTitle')
|
||||
expect(message.message).toBe('clientHandler.notFoundDescription')
|
||||
}
|
||||
})
|
||||
test(
|
||||
'test an 400 response with a body validation error which doesnt match' +
|
||||
' requestBodyErrorMap falls back to matching against specificErrorMap',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
detail: { field: [{ code: 'no_matching_entry' }] },
|
||||
},
|
||||
status: 404,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage(
|
||||
'name',
|
||||
{
|
||||
ERROR_REQUEST_BODY_VALIDATION: new ResponseErrorMessage(
|
||||
'should fallback to this title',
|
||||
'should fallback to this message'
|
||||
),
|
||||
},
|
||||
{
|
||||
field: {
|
||||
doesnt_match: new ResponseErrorMessage(
|
||||
"shouldn't match",
|
||||
"shouldn't match"
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(message.title).toBe('should fallback to this title')
|
||||
expect(message.message).toBe('should fallback to this message')
|
||||
}
|
||||
}
|
||||
)
|
||||
test(
|
||||
'test an 400 response with a body validation error which does match' +
|
||||
' requestBodyErrorMap doesnt falls back to matching against specificErrorMap',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
detail: { field: [{ code: 'matchesRequestBodyErrorMap' }] },
|
||||
},
|
||||
status: 404,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage(
|
||||
'name',
|
||||
{
|
||||
ERROR_REQUEST_BODY_VALIDATION: new ResponseErrorMessage(
|
||||
"shouldn't match this fallback title",
|
||||
"shouldn't match this fallback message"
|
||||
),
|
||||
},
|
||||
{
|
||||
field: {
|
||||
matchesRequestBodyErrorMap: new ResponseErrorMessage(
|
||||
'should match title',
|
||||
'should match description'
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(message.title).toBe('should match title')
|
||||
expect(message.message).toBe('should match description')
|
||||
}
|
||||
}
|
||||
)
|
||||
test(
|
||||
'test an 400 response with a body validation error which doesnt match any map' +
|
||||
' falls back to generic default',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
detail: { field: [{ code: 'matchesRequestBodyErrorMap' }] },
|
||||
},
|
||||
status: 404,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage(
|
||||
'name',
|
||||
{
|
||||
SHOULD_NOT_MATCH: new ResponseErrorMessage(
|
||||
"shouldn't match this fallback title",
|
||||
"shouldn't match this fallback message"
|
||||
),
|
||||
},
|
||||
{
|
||||
field: {
|
||||
shouldntMatch: new ResponseErrorMessage(
|
||||
'should not match title',
|
||||
'should not match description'
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(message.title).toBe('clientHandler.notCompletedTitle')
|
||||
expect(message.message).toBe('clientHandler.notCompletedDescription')
|
||||
}
|
||||
}
|
||||
)
|
||||
test(
|
||||
'test an 400 response with a body validation error which has multiple errors ' +
|
||||
' per field returns the first matched exception',
|
||||
async () => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
detail: {
|
||||
other_field: [{ code: 'doesnt_match' }, { code: 'does_match' }],
|
||||
field: [{ code: 'doesnt_match' }, { code: 'does_match' }],
|
||||
},
|
||||
},
|
||||
status: 404,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage(
|
||||
'name',
|
||||
{},
|
||||
{
|
||||
other_field: {
|
||||
no_matches: new ResponseErrorMessage(
|
||||
'should not match because for other field',
|
||||
'should not match because for other field'
|
||||
),
|
||||
},
|
||||
field: {
|
||||
does_match: new ResponseErrorMessage(
|
||||
'should match title',
|
||||
'should match message'
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(message.title).toBe('should match title')
|
||||
expect(message.message).toBe('should match message')
|
||||
}
|
||||
}
|
||||
)
|
||||
describe('test invalid response details for request body validation errors', () => {
|
||||
const invalidDetails = [
|
||||
null,
|
||||
undefined,
|
||||
'string',
|
||||
1,
|
||||
[],
|
||||
1.0,
|
||||
false,
|
||||
BigInt(Number.MIN_SAFE_INTEGER),
|
||||
Symbol('test'),
|
||||
{},
|
||||
{ weirdField: [] },
|
||||
{ weirdField: null },
|
||||
{ weirdField: undefined },
|
||||
{ weirdField: 'string' },
|
||||
{ weirdField: 1 },
|
||||
{ weirdField: 1.0 },
|
||||
{ weirdField: false },
|
||||
{ weirdField: BigInt(Number.MIN_SAFE_INTEGER) },
|
||||
{ weirdField: Symbol('weird key test') },
|
||||
{ weirdField: [null] },
|
||||
{ weirdField: [undefined] },
|
||||
{ weirdField: ['string'] },
|
||||
{ weirdField: [1] },
|
||||
{ weirdField: [1.0] },
|
||||
{ weirdField: [false] },
|
||||
{ weirdField: [BigInt(Number.MIN_SAFE_INTEGER)] },
|
||||
{ weirdField: [Symbol('weird inner detail value')] },
|
||||
{ weirdField: [{}] },
|
||||
{ weirdField: [{ code: [] }] },
|
||||
{ weirdField: [{ code: null }] },
|
||||
{ weirdField: [{ code: undefined }] },
|
||||
{ weirdField: [{ code: 'string' }] },
|
||||
{ weirdField: [{ code: 1 }] },
|
||||
{ weirdField: [{ code: 1.0 }] },
|
||||
{ weirdField: [{ code: {} }] },
|
||||
{ weirdField: [{ code: false }] },
|
||||
{ weirdField: [{ code: BigInt(Number.MAX_SAFE_INTEGER) }] },
|
||||
{ weirdField: [{ code: Symbol('weird inner code value') }] },
|
||||
]
|
||||
test.each(invalidDetails)(
|
||||
'test with invalid detail %s',
|
||||
async (invalidDetail) => {
|
||||
try {
|
||||
await errorInterceptorWithStubAppAndStore()({
|
||||
response: {
|
||||
data: {
|
||||
error: 'ERROR_REQUEST_BODY_VALIDATION',
|
||||
detail: invalidDetail,
|
||||
},
|
||||
status: 400,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error.handler.getMessage(
|
||||
'name',
|
||||
{
|
||||
matchesSomeOtherError: new ResponseErrorMessage(
|
||||
'title',
|
||||
'message'
|
||||
),
|
||||
},
|
||||
{
|
||||
weirdField: {
|
||||
shouldNotMatchAnything: new ResponseErrorMessage(
|
||||
'request body error matched title',
|
||||
'request body error matched description'
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(message.title).toBe('clientHandler.notCompletedTitle')
|
||||
expect(message.message).toBe('clientHandler.notCompletedDescription')
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
|
@ -1,4 +1,8 @@
|
|||
import { isRelativeUrl } from '@baserow/modules/core/utils/url'
|
||||
import {
|
||||
isValidURL,
|
||||
isValidURLWithHttpScheme,
|
||||
} from '@baserow/modules/core/utils/string'
|
||||
|
||||
describe('test url utils', () => {
|
||||
describe('test isRelativeUrl', () => {
|
||||
|
@ -21,4 +25,59 @@ describe('test url utils', () => {
|
|||
expect(isRelativeUrl(url)).toBe(false)
|
||||
})
|
||||
})
|
||||
describe('test is valid url', () => {
|
||||
const invalidURLs = [
|
||||
'/test',
|
||||
'test/test',
|
||||
'/',
|
||||
'/dashboard?test=true',
|
||||
'asdasd',
|
||||
]
|
||||
const validURLs = [
|
||||
'http://example.com',
|
||||
'HTTP://EXAMPLE.COM',
|
||||
'https://www.exmaple.com',
|
||||
'ftp://example.com/file.txt',
|
||||
'//cdn.example.com/lib.js',
|
||||
'git+ssh://example.con/item',
|
||||
]
|
||||
test.each(validURLs)('test with valid url %s', (url) => {
|
||||
expect(isValidURL(url)).toBe(true)
|
||||
})
|
||||
test.each(invalidURLs)('test with invalid url %s', (url) => {
|
||||
expect(isValidURL(url)).toBe(false)
|
||||
})
|
||||
})
|
||||
describe('test is valid https url', () => {
|
||||
const invalidURLs = [
|
||||
'/test',
|
||||
'test/test',
|
||||
'/',
|
||||
'/dashboard?test=true',
|
||||
'asdasd',
|
||||
'ftp://example.com/file.txt',
|
||||
'//cdn.example.com/lib.js',
|
||||
'git+ssh://example.con/item',
|
||||
]
|
||||
const validURLs = [
|
||||
'https://example.com',
|
||||
'HTTPs://EXAMPLE.COM',
|
||||
'https://www.exmaple.com',
|
||||
'https://example.com/file.txt',
|
||||
'https://cdn.example.com/lib.js',
|
||||
'HtTps://example.con/item',
|
||||
'http://example.com',
|
||||
'HTTP://EXAMPLE.COM',
|
||||
'http://example.com',
|
||||
'http://example.com/file.txt',
|
||||
'http://cdn.example.com/lib.js',
|
||||
'HtTp://example.con/item',
|
||||
]
|
||||
test.each(validURLs)('test with valid http/s url %s', (url) => {
|
||||
expect(isValidURLWithHttpScheme(url)).toBe(true)
|
||||
})
|
||||
test.each(invalidURLs)('test with invalid http/s url %s', (url) => {
|
||||
expect(isValidURLWithHttpScheme(url)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
Loading…
Add table
Reference in a new issue