1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-07 06:15:36 +00:00

Zapier actions and triggers integration

This commit is contained in:
Bram Wiepjes 2022-09-23 18:32:17 +00:00
parent 402084a0cd
commit fc340ae8a2
39 changed files with 9993 additions and 45 deletions

View file

@ -521,6 +521,17 @@ web-frontend-test:
path: coverage.xml
coverage: '/Lines\s*:\s*(\d+.?\d*)%/'
zapier-integration-test:
extends:
- .docker-image-test-stage
- .skippable-job
variables:
RUN_WHEN_CHANGES_MADE_IN: "integrations/zapier"
script:
- cd integrations/zapier
- yarn install
- yarn run zapier test
# If pipeline not triggered by tag:
# - Build and store non-dev images in CI repo under the `ci-tested` tag so we know
# those images have passed the tests.

View file

@ -13,10 +13,14 @@ from baserow.contrib.database.fields.registries import field_type_registry
class FieldSerializer(serializers.ModelSerializer):
type = serializers.SerializerMethodField(help_text="The type of the related field.")
read_only = serializers.SerializerMethodField(
help_text="Indicates whether the field is a read only field. If true, "
"it's not possible to update the cell value."
)
class Meta:
model = Field
fields = ("id", "table_id", "name", "order", "type", "primary")
fields = ("id", "table_id", "name", "order", "type", "primary", "read_only")
extra_kwargs = {
"id": {"read_only": True},
"table_id": {"read_only": True},
@ -26,6 +30,10 @@ class FieldSerializer(serializers.ModelSerializer):
def get_type(self, instance):
return field_type_registry.get_by_model(instance.specific_class).type
@extend_schema_field(OpenApiTypes.BOOL)
def get_read_only(self, instance):
return field_type_registry.get_by_model(instance.specific_class).read_only
class RelatedFieldsSerializer(serializers.Serializer):
related_fields = serializers.SerializerMethodField(

View file

@ -1,10 +1,11 @@
from django.urls import re_path
from .views import TokensView, TokenView
from .views import TokenCheckView, TokensView, TokenView
app_name = "baserow.contrib.database.api.tokens"
urlpatterns = [
re_path(r"check/$", TokenCheckView.as_view(), name="check"),
re_path(r"(?P<token_id>[0-9]+)/$", TokenView.as_view(), name="item"),
re_path(r"$", TokensView.as_view(), name="list"),
]

View file

@ -21,6 +21,7 @@ from baserow.contrib.database.tokens.models import Token
from baserow.core.exceptions import UserNotInGroup
from baserow.core.handler import CoreHandler
from .authentications import TokenAuthentication
from .errors import ERROR_TOKEN_DOES_NOT_EXIST
from .serializers import TokenCreateSerializer, TokenSerializer, TokenUpdateSerializer
@ -210,3 +211,24 @@ class TokenView(APIView):
token = TokenHandler().get_token(request.user, token_id)
TokenHandler().delete_token(request.user, token)
return Response(status=204)
class TokenCheckView(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
@extend_schema(
tags=["Database tokens"],
operation_id="check_database_token",
description=(
"This endpoint check be used to check if the provided personal API token "
"is valid. If returns a `200` response if so and a `403` is not. This can "
"be used by integrations like Zapier or n8n to test if a token is valid."
),
responses={
200: None,
403: get_error_schema(["ERROR_TOKEN_DOES_NOT_EXIST"]),
},
)
def get(self, request):
return Response({"token": "OK"})

View file

@ -17,6 +17,7 @@ from baserow.contrib.database.fields.field_filters import (
FilterBuilder,
)
from baserow.contrib.database.fields.field_sortings import AnnotatedOrder
from baserow.contrib.database.fields.models import CreatedOnField, LastModifiedField
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.table.cache import (
get_cached_model_field_attrs,
@ -34,7 +35,9 @@ from baserow.core.mixins import (
)
from baserow.core.utils import split_comma_separated_string
deconstruct_filter_key_regex = re.compile(r"filter__field_([0-9]+)__([a-zA-Z0-9_]*)$")
deconstruct_filter_key_regex = re.compile(
r"filter__field_([0-9]+|created_on|updated_on)__([a-zA-Z0-9_]*)$"
)
class TableModelQuerySet(models.QuerySet):
@ -242,6 +245,12 @@ class TableModelQuerySet(models.QuerySet):
'filter__field_{id}__{view_filter_type}': {value}.
}
In addition to that, it's also possible to directly filter on the
`created_on` and `updated_on` fields, even if the CreatedOn and LastModified
fields are not created. This can be done by providing
`filter__field_created_on__{view_filter_type}` or
`filter__field_updated_on__{view_filter_type}` as keys in the `filter_object`.
:param filter_object: The object containing the field and filter type as key
and the filter value as value.
:type filter_object: object
@ -273,21 +282,32 @@ class TableModelQuerySet(models.QuerySet):
if not matches:
continue
field_id = int(matches[1])
fixed_field_instance_mapping = {
"created_on": CreatedOnField(),
"updated_on": LastModifiedField(),
}
if field_id not in self.model._field_objects or (
only_filter_by_field_ids is not None
and field_id not in only_filter_by_field_ids
):
raise FilterFieldNotFound(field_id, f"Field {field_id} does not exist.")
if matches[1] in fixed_field_instance_mapping.keys():
field_name = matches[1]
field_instance = fixed_field_instance_mapping.get(field_name)
else:
field_id = int(matches[1])
if field_id not in self.model._field_objects or (
only_filter_by_field_ids is not None
and field_id not in only_filter_by_field_ids
):
raise FilterFieldNotFound(
field_id, f"Field {field_id} does not exist."
)
field_object = self.model._field_objects[field_id]
field_instance = field_object["field"]
field_name = field_object["name"]
field_type = field_object["type"].type
field_object = self.model._field_objects[field_id]
field_instance = field_object["field"]
field_name = field_object["name"]
field_type = field_object["type"].type
model_field = self.model._meta.get_field(field_name)
view_filter_type = view_filter_type_registry.get(matches[2])
if not view_filter_type.field_is_compatible(field_instance):
raise ViewFilterTypeNotAllowedForField(
matches[2],
@ -300,7 +320,7 @@ class TableModelQuerySet(models.QuerySet):
for value in values:
filter_builder.filter(
view_filter_type.get_filter(
field_name, value, model_field, field_object["field"]
field_name, value, model_field, field_instance
)
)

View file

@ -1,7 +1,7 @@
from datetime import datetime, time, timedelta
from decimal import Decimal
from math import ceil, floor
from typing import Dict
from typing import Dict, Union
from django.contrib.postgres.aggregates.general import ArrayAgg
from django.db.models import DateTimeField, IntegerField, Q
@ -310,7 +310,7 @@ class DateEqualViewFilterType(ViewFilterType):
utc = timezone("UTC")
try:
datetime = parser.isoparse(value).astimezone(utc)
parsed_datetime = parser.isoparse(value).astimezone(utc)
except (ParserError, ValueError):
return Q()
@ -326,9 +326,9 @@ class DateEqualViewFilterType(ViewFilterType):
def query_dict(query_field_name):
return {
f"{query_field_name}__year": datetime.year,
f"{query_field_name}__month": datetime.month,
f"{query_field_name}__day": datetime.day,
f"{query_field_name}__year": parsed_datetime.year,
f"{query_field_name}__month": parsed_datetime.month,
f"{query_field_name}__day": parsed_datetime.day,
}
if has_timezone:
@ -343,7 +343,7 @@ class DateEqualViewFilterType(ViewFilterType):
else:
return Q(**query_dict(field_name))
else:
return Q(**{field_name: datetime})
return Q(**{field_name: parsed_datetime})
class BaseDateFieldLookupFilterType(ViewFilterType):
@ -375,7 +375,7 @@ class BaseDateFieldLookupFilterType(ViewFilterType):
]
@staticmethod
def parse_date(value: str) -> datetime.date:
def parse_date(value: str) -> Union[datetime.date, datetime]:
"""
Parses the provided value string and converts it to a date object.
Raises an error if the provided value is an empty string or cannot be parsed
@ -387,19 +387,33 @@ class BaseDateFieldLookupFilterType(ViewFilterType):
if value == "":
raise ValueError
utc = timezone("UTC")
try:
parsed_date = parser.isoparse(value).date()
return parsed_date
parsed_datetime = parser.isoparse(value).astimezone(utc)
return parsed_datetime
except ValueError as e:
raise e
@staticmethod
def is_date(value: str) -> bool:
try:
datetime.strptime(value, "%Y-%m-%d")
return True
except ValueError:
return False
def get_filter(self, field_name, value, model_field, field):
# in order to only compare the date part of a datetime field
# we need to verify that we are in fact dealing with a datetime field
# if so the django query lookup '__date' gets appended to the field_name
# otherwise (i.e. it is a date field) nothing gets appended
query_date_lookup = self.query_date_lookup
if isinstance(model_field, DateTimeField) and not query_date_lookup:
if (
isinstance(model_field, DateTimeField)
and self.is_date(value)
and not query_date_lookup
):
query_date_lookup = "__date"
try:
parsed_date = self.parse_date(value)
@ -433,14 +447,6 @@ class DateBeforeViewFilterType(BaseDateFieldLookupFilterType):
type = "date_before"
query_field_lookup = "__lt"
compatible_field_types = [
DateFieldType.type,
LastModifiedFieldType.type,
CreatedOnFieldType.type,
FormulaFieldType.compatible_with_formula_types(
BaserowFormulaDateType.type,
),
]
class DateAfterViewFilterType(BaseDateFieldLookupFilterType):

View file

@ -52,6 +52,7 @@ def test_list_fields(api_client, data_fixture):
assert response_json[0]["type"] == "text"
assert response_json[0]["primary"]
assert response_json[0]["text_default"] == field_1.text_default
assert response_json[0]["read_only"] is False
assert response_json[1]["id"] == field_3.id
assert response_json[1]["type"] == "number"
@ -128,6 +129,27 @@ def test_list_fields(api_client, data_fixture):
assert response.json()["error"] == "ERROR_TABLE_DOES_NOT_EXIST"
@pytest.mark.django_db
def test_list_read_only_fields(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token(
email="test@test.nl", password="password", first_name="Test1"
)
table_1 = data_fixture.create_database_table(user=user)
field_1 = data_fixture.create_created_on_field(table=table_1, order=1)
response = api_client.get(
reverse("api:database:fields:list", kwargs={"table_id": table_1.id}),
**{"HTTP_AUTHORIZATION": f"JWT {jwt_token}"},
)
assert response.status_code == HTTP_200_OK
response_json = response.json()
assert len(response_json) == 1
assert response_json[0]["id"] == field_1.id
assert response_json[0]["type"] == "created_on"
assert response_json[0]["read_only"] is True
@pytest.mark.django_db
def test_create_field(api_client, data_fixture):
user, jwt_token = data_fixture.create_user_and_token()

View file

@ -7,6 +7,7 @@ from rest_framework.status import (
HTTP_204_NO_CONTENT,
HTTP_400_BAD_REQUEST,
HTTP_401_UNAUTHORIZED,
HTTP_403_FORBIDDEN,
HTTP_404_NOT_FOUND,
)
@ -615,3 +616,25 @@ def test_trashing_table_hides_restores_tokens(api_client, data_fixture):
["database", database_1.id],
]
)
@pytest.mark.django_db
def test_check_token(api_client, data_fixture):
user = data_fixture.create_user()
group = data_fixture.create_group(user=user)
url = reverse("api:database:tokens:check")
response = api_client.get(url, format="json")
assert response.status_code == HTTP_403_FORBIDDEN
url = reverse("api:database:tokens:check")
response = api_client.get(url, format="json", HTTP_AUTHORIZATION="Token WRONG")
assert response.status_code == HTTP_403_FORBIDDEN
token = TokenHandler().create_token(user, group, "Good")
url = reverse("api:database:tokens:check")
response = api_client.get(
url, format="json", HTTP_AUTHORIZATION=f"Token {token.key}"
)
assert response.status_code == HTTP_200_OK
assert response.json() == {"token": "OK"}

View file

@ -360,6 +360,7 @@ def test_get_public_gallery_view(api_client, data_fixture):
"primary": False,
"text_default": "",
"type": "text",
"read_only": False,
}
],
"view": {

View file

@ -1774,6 +1774,7 @@ def test_get_public_grid_view(api_client, data_fixture):
"primary": False,
"text_default": "",
"type": "text",
"read_only": False,
}
],
"view": {

View file

@ -672,6 +672,68 @@ def test_filter_by_fields_object_queryset(data_fixture):
assert results[0].id == row_4.id
@pytest.mark.django_db
def test_filter_by_fields_object_with_created_on_queryset(data_fixture):
table = data_fixture.create_database_table(name="Cars")
model = table.get_model()
row_1 = model.objects.create()
row_1.created_on = datetime(2021, 1, 1, 12, 0, 0, tzinfo=utc)
row_1.save()
row_2 = model.objects.create()
row_2.created_on = datetime(2021, 1, 2, 12, 0, 0, tzinfo=utc)
row_2.save()
row_3 = model.objects.create()
row_3.created_on = datetime(2021, 1, 3, 12, 0, 0, tzinfo=utc)
row_3.save()
print(row_1.created_on)
print(row_2.created_on)
print(row_3.created_on)
results = model.objects.all().filter_by_fields_object(
filter_object={
f"filter__field_created_on__date_after": "2021-01-02 13:00",
},
filter_type="AND",
)
assert len(results) == 1
assert results[0].id == row_3.id
@pytest.mark.django_db
def test_filter_by_fields_object_with_updated_on_queryset(data_fixture):
table = data_fixture.create_database_table(name="Cars")
model = table.get_model()
row_1 = model.objects.create()
row_2 = model.objects.create()
row_3 = model.objects.create()
model.objects.filter(id=row_1.id).update(
updated_on=datetime(2021, 1, 1, 12, 0, 0, tzinfo=utc)
)
model.objects.filter(id=row_2.id).update(
updated_on=datetime(2021, 1, 2, 12, 0, 0, tzinfo=utc)
)
model.objects.filter(id=row_3.id).update(
updated_on=datetime(2021, 1, 3, 12, 0, 0, tzinfo=utc)
)
results = model.objects.all().filter_by_fields_object(
filter_object={
f"filter__field_updated_on__date_before": "2021-01-02 12:00",
},
filter_type="AND",
)
assert len(results) == 1
assert results[0].id == row_1.id
@pytest.mark.django_db
def test_table_model_fields_requiring_refresh_on_insert(data_fixture):
table = data_fixture.create_database_table(name="Cars")

View file

@ -2774,6 +2774,21 @@ def test_date_before_filter_type(data_fixture):
assert len(ids) == 1
assert row.id in ids
view_filter.field = date_time_field
view_filter.value = "2021-07-06 01:20"
view_filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 1
assert row.id in ids
view_filter.field = date_time_field
view_filter.value = "2021-07-06 01:40"
view_filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 2
assert row.id in ids
assert row_2.id in ids
view_filter.value = ""
view_filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
@ -2833,6 +2848,14 @@ def test_date_after_filter_type(data_fixture):
assert len(ids) == 1
assert row_4.id in ids
view_filter.field = date_time_field
view_filter.value = "2021-07-05"
view_filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 2
assert row_2.id in ids
assert row_4.id in ids
view_filter.field = date_time_field
view_filter.value = "2021-07-06"
view_filter.save()
@ -2840,6 +2863,21 @@ def test_date_after_filter_type(data_fixture):
assert len(ids) == 1
assert row_4.id in ids
view_filter.field = date_time_field
view_filter.value = "2021-07-06 01:40"
view_filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 2
assert row_2.id in ids
assert row_4.id in ids
view_filter.field = date_time_field
view_filter.value = "2021-07-06 02:41"
view_filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
assert len(ids) == 1
assert row_4.id in ids
view_filter.value = ""
view_filter.save()
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]

View file

@ -217,6 +217,7 @@ def test_when_field_unhidden_in_public_view_force_refresh_sent(
"type": "text",
"primary": False,
"text_default": "",
"read_only": False,
}
],
"view": view_serialized["view"],
@ -291,6 +292,7 @@ def test_when_only_field_options_updated_in_public_grid_view_force_refresh_sent(
"type": "text",
"primary": False,
"text_default": "",
"read_only": False,
}
],
"view": view_serialized["view"],

View file

@ -15,6 +15,11 @@ For example:
### New Features
* Added Zapier integration code. [#816](https://gitlab.com/bramw/baserow/-/issues/816)
* Made it possible to filter on the `created_on` and `updated_on` columns, even though
they're not exposed via fields.
* Expose `read_only` in the list fields endpoint.
### Bug Fixes
### Refactors

66
integrations/zapier/.gitignore vendored Normal file
View file

@ -0,0 +1,66 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/
# Dependency directories
node_modules/
jspm_packages/
# Typescript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# environment variables file
.env
.environment
# next.js build output
.next
zapier.test.js
.zapierapprc

View file

@ -0,0 +1,24 @@
# zapier
This Zapier integration project is generated by the `zapier init` CLI command.
These are what you normally do next:
```bash
# Install dependencies
npm install # or you can use yarn
# Run tests
zapier test
# Register the integration on Zapier if you haven't
zapier register "App Title"
# Or you can link to an existing integration on Zapier
zapier link
# Push it to Zapier
zapier push
```
Find out more on the latest docs: https://github.com/zapier/zapier-platform/blob/master/packages/cli/README.md.

View file

@ -0,0 +1,33 @@
module.exports = {
type: 'custom',
test: {
url: `{{bundle.authData.apiURL}}/api/database/tokens/check/`,
method: 'GET',
headers: { 'Authorization': 'Token {{bundle.authData.apiToken}}' },
},
fields: [
{
computed: false,
key: 'apiToken',
required: true,
label: 'Baserow API token',
type: 'string',
helpText:
'Please enter your Baserow API token. Can be found by clicking on your ' +
'account in the top left corner -> Settings -> API tokens.'
},
{
computed: false,
key: 'apiURL',
required: false,
label: 'Baserow API URL',
default: 'https://api.baserow.io',
type: 'string',
helpText:
'Please enter your Baserow API URL. If you are using baserow.io, you ' +
'can leave the default one.'
},
],
connectionLabel: 'Baserow API authentication',
customConfig: {}
}

View file

@ -0,0 +1,33 @@
const authentication = require('./authentication.js')
const deleteRowCreate = require('./src/creates/delete-row.js')
const newRowCreate = require('./src/creates/new-row.js')
const updateRowCreate = require('./src/creates/update-row.js')
const getSingleRowSearch = require('./src/searches/get-single-row.js')
const listRowsSearch = require('./src/searches/list-rows.js')
const rowCreatedTrigger = require('./src/triggers/row-created.js')
const rowUpdatedTrigger = require('./src/triggers/row-updated.js')
const rowUpdatedOrCreatedTrigger =require('./src/triggers/row-updated-or-created.js')
module.exports = {
version: require('./package.json').version,
platformVersion: require('zapier-platform-core').version,
authentication: authentication,
triggers: {
[rowCreatedTrigger.key]: rowCreatedTrigger,
[rowUpdatedTrigger.key]: rowUpdatedTrigger,
[rowUpdatedOrCreatedTrigger.key]: rowUpdatedOrCreatedTrigger
},
searches: {
[getSingleRowSearch.key]: getSingleRowSearch,
[listRowsSearch.key]: listRowsSearch
},
creates: {
[newRowCreate.key]: newRowCreate,
[deleteRowCreate.key]: deleteRowCreate,
[updateRowCreate.key]: updateRowCreate,
},
resources: {},
}

View file

@ -0,0 +1,17 @@
{
"name": "baserow-zapier",
"version": "1.1.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "jest --testTimeout 10000"
},
"dependencies": {
"zapier-platform-core": "11.1.1"
},
"devDependencies": {
"jest": "^26.6.3",
"zapier-platform-cli": "^12.0.3"
},
"private": true
}

View file

@ -0,0 +1,6 @@
// Must be in sync with the backend field types.
const unsupportedBaserowFieldTypes = ['file']
module.exports = {
unsupportedBaserowFieldTypes,
}

View file

@ -0,0 +1,49 @@
const { rowSample } = require('../samples/row')
const deleteRowInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
helpText: 'Please enter the table ID where the row must be deleted in. You can ' +
'find the ID by clicking on the three dots next to the table. It\'s the number ' +
'between brackets.'
},
{
key: 'rowID',
label: 'Row ID',
type: 'integer',
required: true,
helpText: 'Please the row ID that must be deleted.'
},
]
const DeleteRow = async (z, bundle) => {
const rowDeleteRequest = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/${bundle.inputData.rowID}/`,
method: 'DELETE',
headers: {
'Accept': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
})
return rowDeleteRequest.status === 204
? { message: `Row ${bundle.inputData.rowID} deleted successfully.` }
: { message: 'A problem occurred during DELETE operation. The row was not deleted.' }
}
module.exports = {
key: 'deleteRow',
noun: 'Row',
display: {
label: 'Delete Row',
description: 'Deletes an existing row.'
},
operation: {
perform: DeleteRow,
sample: rowSample,
inputFields: deleteRowInputFields
}
}

View file

@ -0,0 +1,51 @@
const { rowSample } = require('../samples/row')
const {
getRowInputValues,
prepareInputDataForBaserow
} = require('../helpers')
const rowInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
altersDynamicFields: true,
helpText: 'Please enter the table ID where the row must be created in. You can ' +
'find the ID by clicking on the three dots next to the table. It\'s the number ' +
'between brackets.'
},
]
const createRow = async (z, bundle) => {
const rowData = await prepareInputDataForBaserow(z, bundle)
const rowPostRequest = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/`,
method: 'POST',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
params: {
'user_field_names': 'true',
},
body: rowData,
})
return rowPostRequest.json
}
module.exports = {
key: 'newRow',
noun: 'Row',
display: {
label: 'Create Row',
description: 'Creates a new row.'
},
operation: {
perform: createRow,
sample: rowSample,
inputFields: [...rowInputFields, getRowInputValues]
}
}

View file

@ -0,0 +1,58 @@
const { rowSample } = require('../samples/row')
const {
getRowInputValues,
prepareInputDataForBaserow
} = require('../helpers')
const updateRowInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
altersDynamicFields: true,
helpText: 'Please enter the table ID where the row must be updated in. You can ' +
'find the ID by clicking on the three dots next to the table. It\'s the ' +
'number between brackets.'
},
{
key: 'rowID',
label: 'Row ID',
type: 'integer',
required: true,
helpText: 'Please enter the row ID that must be updated.'
},
]
const updateRow = async (z, bundle) => {
const rowData = await prepareInputDataForBaserow(z, bundle)
const rowPatchRequest = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/${bundle.inputData.rowID}/`,
method: 'PATCH',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
params: {
'user_field_names': 'true',
},
body: rowData,
})
return rowPatchRequest.json
}
module.exports = {
key: 'updateRow',
noun: 'Row',
display: {
label: 'Update Row',
description: 'Updates an existing row.'
},
operation: {
perform: updateRow,
sample: rowSample,
inputFields: [...updateRowInputFields, getRowInputValues]
}
}

View file

@ -0,0 +1,153 @@
const { unsupportedBaserowFieldTypes } = require('./constants')
/**
* Fetches the fields of a table and converts them to an array with valid Zapier
* field objects.
*/
const getRowInputValues = async (z, bundle) => {
if (!bundle.inputData.tableID) {
throw new Error('The `tableID` must be provided.')
}
const fieldsGetRequest = await z.request({
url: `${bundle.authData.apiURL}/api/database/fields/table/${bundle.inputData.tableID}/`,
method: 'GET',
headers: {
'Accept': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
})
return fieldsGetRequest.json.map(v => {
return mapBaserowFieldTypesToZapierTypes(v)
})
}
/**
* Fetches the fields and converts the input data to Baserow row compatible data.
*/
const prepareInputDataForBaserow = async (z, bundle) => {
if (!bundle.inputData.tableID) {
throw new Error('The `tableID` must be provided.')
}
const fieldsGetRequest = await z.request({
url: `${bundle.authData.apiURL}/api/database/fields/table/${bundle.inputData.tableID}/`,
method: 'GET',
headers: {
'Accept': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
})
let rowData = { id: bundle.inputData.rowID }
fieldsGetRequest
.json
.filter(
(baserowField) =>
baserowField.read_only
|| !unsupportedBaserowFieldTypes.includes(baserowField.type)
)
.filter((baserowField) => bundle.inputData.hasOwnProperty(baserowField.name))
.forEach(baserowField => {
let value = bundle.inputData[baserowField.name]
if (baserowField.type === 'multiple_collaborators') {
value = value.map(id => {
return { id }}
)
}
rowData[baserowField.name] = value
})
return rowData
}
/**
* Converts the provided Baserow field type object to a Zapier compatible object.
*/
const mapBaserowFieldTypesToZapierTypes = (baserowField) => {
const zapType = {
key: baserowField.name,
label: baserowField.name,
type: 'string'
}
if (baserowField.type === 'long_text') {
zapType.type = 'text'
}
if (baserowField.type === 'boolean') {
zapType.type = 'boolean'
}
if (baserowField.type === 'number') {
zapType.type = 'integer'
if (baserowField.number_decimal_places > 0) {
zapType.type = 'float'
}
}
if (baserowField.type === 'boolean') {
zapType.type = 'boolean'
}
if (baserowField.type === 'rating') {
zapType.type = 'integer'
}
if (['single_select', 'multiple_select'].includes(baserowField.type)) {
const choices = {}
baserowField.select_options.forEach(el => {
choices[`${el.id}`] = el.value
})
zapType.type = 'string'
zapType.choices = choices
}
if (baserowField.type === 'multiple_select') {
zapType.list = true
}
if (baserowField.type === 'link_row') {
zapType.type = 'integer'
zapType.helpText = 'Provide row ids that you want to link to.'
zapType.list = true
}
if (baserowField.type === 'multiple_collaborators') {
zapType.type = 'integer'
zapType.helpText = 'Provide user ids that you want to link to.'
zapType.list = true
}
if (baserowField.type === 'date' && !baserowField.date_include_time) {
zapType.type = 'date'
zapType.helpText =
'the date fields accepts a date in ISO format (e.g. 2020-01-01)'
}
if (baserowField.type === 'date' && baserowField.date_include_time) {
zapType.type = 'datetime'
zapType.helpText =
'the date fields accepts date and time in ISO format (e.g. 2020-01-01 12:00)'
}
if (
baserowField.read_only
|| unsupportedBaserowFieldTypes.includes(baserowField.type)
) {
// Read only and the file field are not supported.
return
}
return zapType
}
module.exports = {
getRowInputValues,
prepareInputDataForBaserow,
mapBaserowFieldTypesToZapierTypes,
}

View file

@ -0,0 +1,8 @@
const rowSample = {
id: 0,
order: '1.00000000000000000000',
Name: 'string',
Notes: 'string',
Active: true
}
module.exports = { rowSample }

View file

@ -0,0 +1,50 @@
const { rowSample } = require('../samples/row')
const getSingleRowInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
helpText: 'Please enter the table ID where you want to get the row from. You can ' +
'find the ID by clicking on the three dots next to the table. It\'s the number ' +
'between brackets.'
},
{
key: 'rowID',
label: 'Row ID',
type: 'integer',
required: true,
helpText: 'Please enter the ID of the row that you want to get.'
},
]
const getSingleRow = async (z, bundle) => {
const rowGetRequest = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/${bundle.inputData.rowID}/`,
method: 'GET',
headers: {
'Accept': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
params: {
'user_field_names': 'true',
},
})
return [rowGetRequest.json]
}
module.exports = {
key: 'getSingleRow',
noun: 'Row',
display: {
label: 'Get Single Row',
description: 'Finds a single row in a given table.'
},
operation: {
perform: getSingleRow,
sample: rowSample,
inputFields: getSingleRowInputFields
}
}

View file

@ -0,0 +1,83 @@
const { rowSample } = require('../samples/row')
const listRowsInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
helpText: 'Please enter the table ID where you want to get the rows from. You ' +
'can find the ID by clicking on the three dots next to the table. It\'s the ' +
'number between brackets.'
},
{
key: 'page',
label: 'page',
helpText: 'Defines which page of rows should be returned.',
type: 'string',
default: '1'
},
{
key: 'size',
label: 'size',
helpText: 'Defines how many rows should be returned per page.',
type: 'string',
default: '100'
},
{
key: 'search',
label: 'search',
helpText:
'If provided only rows with cell data that matches the search query ' +
'are going to be returned.',
type: 'string',
},
]
const listRows = async (z, bundle) => {
let params = {
'size': bundle.inputData.size,
'page': bundle.inputData.page,
'user_field_names': 'true'
}
if (bundle.inputData.search) {
params['search'] = bundle.inputData.search
}
const rowGetRequest = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/`,
method: 'GET',
headers: {
'Accept': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
params
})
// Modify array to be an single object, so it will display as 'row1-Name'.
let data = {}
rowGetRequest.json.results.forEach((row, index) => {
for (const [key, value] of Object.entries(row)) {
data[`row${index + 1}-${key}`] = value
}
})
// The search actions needs to be array of object with only one object. Other
// are not displayed in the UI.
return [data]
}
module.exports = {
key: 'listRows',
noun: 'Row',
display: {
label: 'List Rows',
description: 'Finds a page of rows in a given table.'
},
operation: {
perform: listRows,
sample: rowSample,
inputFields: listRowsInputFields
}
}

View file

@ -0,0 +1,73 @@
const { rowSample } = require('../samples/row')
const rowInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
helpText: 'Please enter your Baserow table ID. You can find the ID by clicking' +
' on the three dots next to the table. It\'s the number between brackets.'
}
]
const getCreatedRows = async (z, bundle) => {
const nowDate = new Date()
let fromDate = new Date()
// This is the recommended way of doing it according to Zapier support.
fromDate.setHours(fromDate.getHours() - 2)
const rows = []
const size = 200
let page = 1
let pages = null
while (page <= pages || pages === null) {
const request = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/`,
method: 'GET',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
params: {
size: size,
page: page,
'user_field_names': 'true',
'filter_type': 'AND',
'filter__field_created_on__date_before': nowDate.toISOString(),
'filter__field_created_on__date_after': fromDate.toISOString()
},
})
if (pages === null) {
// Calculate the amount of pages based on the total count of the backend.
pages = Math.ceil(request.json.count / size)
}
// Add the rows to one big array.
rows.push(...request.json.results)
// Increase the page because we have already fetched it.
page++
}
// Zapier figures out the duplicates.
return rows
}
module.exports = {
key: 'rowCreated',
noun: 'Row',
display: {
label: 'Row Created',
description: 'Trigger when new row is created.'
},
operation: {
type: 'polling',
perform: getCreatedRows,
canPaginate: false,
sample: rowSample,
inputFields: rowInputFields
}
}

View file

@ -0,0 +1,73 @@
const { rowSample } = require('../samples/row')
const rowInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
helpText: 'Please enter your Baserow table ID. You can find the ID by clicking' +
' on the three dots next to the table. It\'s the number between brackets.'
}
]
const getCreatedOrUpdatedRows = async (z, bundle) => {
const nowDate = new Date()
let fromDate = new Date()
// This is the recommended way of doing it according to Zapier support.
fromDate.setHours(fromDate.getHours() - 2)
const rows = []
const size = 200
let page = 1
let pages = null
while (page <= pages || pages === null) {
const request = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/`,
method: 'GET',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
params: {
size: size,
page: page,
'user_field_names': 'true',
'filter_type': 'AND',
'filter__field_updated_on__date_before': nowDate.toISOString(),
'filter__field_updated_on__date_after': fromDate.toISOString()
},
})
if (pages === null) {
// Calculate the amount of pages based on the total count of the backend.
pages = Math.ceil(request.json.count / size)
}
// Add the rows to one big array.
rows.push(...request.json.results)
// Increase the page because we have already fetched it.
page++
}
// Zapier figures out the duplicates.
return rows
}
module.exports = {
key: 'rowCreatedOrUpdated',
noun: 'Row',
display: {
label: 'Row created or updated',
description: 'Trigger when a new row is created or an existing one is updated.'
},
operation: {
type: 'polling',
perform: getCreatedOrUpdatedRows,
canPaginate: false,
sample: rowSample,
inputFields: rowInputFields
}
}

View file

@ -0,0 +1,79 @@
const { rowSample } = require('../samples/row')
const rowInputFields = [
{
key: 'tableID',
label: 'Table ID',
type: 'integer',
required: true,
helpText: 'Please enter your Baserow table ID. You can find the ID by clicking' +
' on the three dots next to the table. It\'s the number between brackets.'
}
]
const getUpdatedRows = async (z, bundle) => {
const nowDate = new Date()
let fromDate = new Date()
// This is the recommended way of doing it according to Zapier support.
fromDate.setHours(fromDate.getHours() - 2)
const rows = []
const size = 200
let page = 1
let pages = null
while (page <= pages || pages === null) {
const request = await z.request({
url: `${bundle.authData.apiURL}/api/database/rows/table/${bundle.inputData.tableID}/`,
method: 'GET',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': `Token ${bundle.authData.apiToken}`,
},
params: {
size: size,
page: page,
'user_field_names': 'true',
'filter_type': 'AND',
'filter__field_updated_on__date_before': nowDate.toISOString(),
'filter__field_updated_on__date_after': fromDate.toISOString(),
// This is not a bulletproof solution that only returns the updated rows
// because if the row is newly created and changed an hour later, it will
// return as updated here. I don't have any other ideas that can easily be
// implemented, so I think this is better than not having a row updated
// trigger at all.
'filter__field_created_on__date_before': fromDate.toISOString()
},
})
if (pages === null) {
// Calculate the amount of pages based on the count of the backend.
pages = Math.ceil(request.json.count / size)
}
// Add the rows to one big array.
rows.push(...request.json.results)
// Increase the page because we have already fetched it.
page++
}
// Zapier figures out the duplicates.
return rows
}
module.exports = {
key: 'rowUpdated',
noun: 'Row',
display: {
label: 'Row updated',
description: 'Trigger when an existing row is updated.'
},
operation: {
type: 'polling',
perform: getUpdatedRows,
canPaginate: false,
sample: rowSample,
inputFields: rowInputFields
}
}

View file

@ -0,0 +1,343 @@
const { mapBaserowFieldTypesToZapierTypes } = require('../src/helpers')
describe('helpers', () => {
describe('mapBaserowFieldTypesToZapierTypes ', () => {
it('text field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'text',
name: 'Name',
order: 0,
primary: false,
read_only: false,
text_default: ''
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'string'
})
})
it('long_text field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'long_text',
name: 'Name',
order: 0,
primary: false,
read_only: false
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'text'
})
})
it('url field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'url',
name: 'Name',
order: 0,
primary: false,
read_only: false
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'string'
})
})
it('email field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'email',
name: 'Name',
order: 0,
primary: false,
read_only: false
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'string'
})
})
it('number field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'number',
name: 'Name',
order: 0,
primary: false,
read_only: false,
number_decimal_places: 0,
number_negative: false,
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'integer'
})
})
it('number field with decimal places', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'number',
name: 'Name',
order: 0,
primary: false,
read_only: false,
number_decimal_places: 1,
number_negative: false,
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'float'
})
})
it('rating field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'rating',
name: 'Name',
order: 0,
primary: false,
read_only: false,
max_value: 5,
color: 'red',
style: 'star'
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'integer'
})
})
it('boolean field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'boolean',
name: 'Name',
order: 0,
primary: false,
read_only: false
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'boolean'
})
})
it('date field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'date',
name: 'Name',
order: 0,
primary: false,
read_only: false,
date_format: 'ISO',
date_include_time: false,
date_time_format: '12'
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'date'
})
})
it('date field with time', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'date',
name: 'Name',
order: 0,
primary: false,
read_only: false,
date_format: 'ISO',
date_include_time: true,
date_time_format: '24'
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'datetime'
})
})
it('last_modified field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'last_modified',
name: 'Name',
order: 0,
primary: false,
read_only: true,
date_format: 'ISO',
date_include_time: true,
date_time_format: '24'
})).toBeUndefined()
})
it('created_on field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'created_on',
name: 'Name',
order: 0,
primary: false,
read_only: true,
date_format: 'EU',
date_include_time: false,
date_time_format: '12'
})).toBeUndefined()
})
it('link_row field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'link_row',
name: 'Name',
order: 0,
primary: false,
read_only: false,
link_row_table_id: 1,
link_row_related_field_id: 2,
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'integer',
helpText: `Provide row ids that you want to link to.`,
list: true,
})
})
it('file field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'file',
name: 'Name',
order: 0,
primary: false,
read_only: false,
})).toBeUndefined()
})
it('single_select field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'single_select',
name: 'Name',
order: 0,
primary: false,
read_only: false,
select_options: [
{id: 1, value: 'test', color: 'red'},
{id: 2, value: 'value', color: 'green'}
]
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'string',
choices: {
'1': 'test',
'2': 'value',
}
})
})
it('multiple_select field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'multiple_select',
name: 'Name',
order: 0,
primary: false,
read_only: false,
select_options: [
{id: 1, value: 'test', color: 'red'},
{id: 2, value: 'value', color: 'green'}
]
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'string',
choices: {
'1': 'test',
'2': 'value',
},
list: true
})
})
it('phone_number field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'phone_number',
name: 'Name',
order: 0,
primary: false,
read_only: false
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'string'
})
})
it('formula field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'formula',
name: 'Name',
order: 0,
primary: false,
read_only: true
})).toBeUndefined()
})
it('lookup field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'lookup',
name: 'Name',
order: 0,
primary: false,
read_only: true,
})).toBeUndefined()
})
it('multiple_collaborators field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'multiple_collaborators',
name: 'Name',
order: 0,
primary: false,
read_only: false,
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'integer',
helpText: `Provide user ids that you want to link to.`,
list: true
})
})
it('unknown field', () => {
expect(mapBaserowFieldTypesToZapierTypes({
id: 1,
type: 'unknown_field_type',
name: 'Name',
order: 0,
primary: false,
read_only: false
})).toMatchObject({
key: 'Name',
label: 'Name',
type: 'string'
})
})
})
})

File diff suppressed because it is too large Load diff

View file

@ -1085,6 +1085,7 @@ def test_get_public_kanban_without_with_single_select_and_cover(
"primary": False,
"text_default": "",
"type": "text",
"read_only": False,
},
],
"view": {
@ -1153,6 +1154,7 @@ def test_get_public_kanban_view_with_single_select_and_cover(
"select_options": [],
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"type": "single_select",
"read_only": False,
},
{
"id": cover_field.id,
@ -1161,6 +1163,7 @@ def test_get_public_kanban_view_with_single_select_and_cover(
"primary": cover_field.primary,
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"type": "file",
"read_only": False,
},
{
"id": public_field.id,
@ -1170,6 +1173,7 @@ def test_get_public_kanban_view_with_single_select_and_cover(
"primary": False,
"text_default": "",
"type": "text",
"read_only": False,
},
],
"view": {

View file

@ -31,7 +31,9 @@
<i class="fas fa-ellipsis-v"></i>
</a>
<Context ref="context">
<div class="context__menu-title">{{ application.name }}</div>
<div class="context__menu-title">
{{ application.name }} ({{ application.id }})
</div>
<ul class="context__menu">
<slot name="context"></slot>
<li>

View file

@ -30,6 +30,9 @@
<APIDocsParameter name="type" :optional="false" type="string">
{{ $t('apiDocsTableListFields.type') }}
</APIDocsParameter>
<APIDocsParameter name="read_only" :optional="false" type="boolean">
{{ $t('apiDocsTableListFields.readOnly') }}
</APIDocsParameter>
</ul>
<p class="api-docs__content">
{{ $t('apiDocsTableListFields.extraProps') }}

View file

@ -22,7 +22,7 @@
<i class="fas fa-ellipsis-v"></i>
</a>
<Context ref="context">
<div class="context__menu-title">{{ table.name }}</div>
<div class="context__menu-title">{{ table.name }} ({{ table.id }})</div>
<ul class="context__menu">
<li>
<a @click="exportTable()">

View file

@ -409,14 +409,10 @@ export class FieldType extends Registerable {
* Generate a field sample for the given field that is displayed in auto-doc.
* @returns a sample for this field.
*/
getDocsFieldResponseExample({
id,
table_id: tableId,
name,
order,
type,
primary,
}) {
getDocsFieldResponseExample(
{ id, table_id: tableId, name, order, type, primary },
readOnly
) {
return {
id,
table_id: tableId,
@ -424,6 +420,7 @@ export class FieldType extends Registerable {
order,
type,
primary,
read_only: readOnly,
}
}

View file

@ -169,7 +169,8 @@
"order": "Field order in table. 0 for the first field.",
"primary": "Indicates if the field is a primary field. If `true` the field cannot be deleted and the value should represent the whole row.",
"type": "Type defined for this field.",
"extraProps": "Some extra properties are not described here because they are type specific."
"extraProps": "Some extra properties are not described here because they are type specific.",
"readOnly": "Indicates whether the field is a read only field. If true, it's not possible to update the cell value."
},
"apiDocsTableDeleteRow": {
"description": "Deletes an existing {name} row.",

View file

@ -275,7 +275,10 @@ export default {
description: fieldType.getDocsDescription(field),
requestExample: fieldType.getDocsRequestExample(field),
responseExample: fieldType.getDocsResponseExample(field),
fieldResponseExample: fieldType.getDocsFieldResponseExample(field),
fieldResponseExample: fieldType.getDocsFieldResponseExample(
field,
fieldType.isReadOnly
),
isReadOnly: fieldType.isReadOnly,
}
return field