1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-03-17 22:12:43 +00:00

Resolve "Airtable import web-frontend interface"

This commit is contained in:
Bram Wiepjes 2022-03-02 13:41:11 +00:00
parent fd2d04fdda
commit c393e786d9
40 changed files with 1650 additions and 51 deletions

View file

@ -66,6 +66,7 @@ def get_application_serializer(instance, **kwargs):
:return: An instantiated serializer for the instance.
:rtype: ApplicationSerializer
"""
application = application_type_registry.get_by_model(instance.specific_class)
serializer_class = application.instance_serializer_class

View file

@ -100,8 +100,8 @@ CELERY_TASK_ROUTES = {
},
"baserow.core.trash.tasks.permanently_delete_marked_trash": {"queue": "export"},
}
CELERY_SOFT_TIME_LIMIT = 60 * 5
CELERY_TIME_LIMIT = CELERY_SOFT_TIME_LIMIT + 60
CELERY_SOFT_TIME_LIMIT = 60 * 5 # 5 minutes
CELERY_TIME_LIMIT = CELERY_SOFT_TIME_LIMIT + 60 # 60 seconds
CELERY_REDBEAT_REDIS_URL = REDIS_URL
# Explicitly set the same value as the default loop interval here so we can use it
@ -278,6 +278,7 @@ SPECTACULAR_SETTINGS = {
{"name": "Database table export"},
{"name": "Database table webhooks"},
{"name": "Database tokens"},
{"name": "Database airtable import"},
{"name": "Admin"},
],
"ENUM_NAME_OVERRIDES": {
@ -490,6 +491,11 @@ BASEROW_BACKEND_DATABASE_LOG_LEVEL = os.getenv(
"BASEROW_BACKEND_DATABASE_LOG_LEVEL", "ERROR"
)
BASEROW_AIRTABLE_IMPORT_SOFT_TIME_LIMIT = int(
os.getenv("BASEROW_AIRTABLE_IMPORT_SOFT_TIME_LIMIT", 60 * 30) # 30 minutes
)
LOGGING = {
"version": 1,
"disable_existing_loggers": False,

View file

@ -1,3 +1,5 @@
from copy import deepcopy
from .base import * # noqa: F403, F401
@ -10,6 +12,9 @@ CELERY_TASK_EAGER_PROPAGATES = True
CHANNEL_LAYERS = {"default": {"BACKEND": "channels.layers.InMemoryChannelLayer"}}
# Open a second database connection that can be used to test transactions.
DATABASES["default-copy"] = deepcopy(DATABASES["default"]) # noqa: F405
USER_FILES_DIRECTORY = "user_files"
USER_THUMBNAILS_DIRECTORY = "thumbnails"
USER_THUMBNAILS = {"tiny": [21, 21]}

View file

@ -0,0 +1,2 @@
def airtable_import_job_progress_key(job_id: int):
return f"airtable_import_job_progress_{job_id}"

View file

@ -1,3 +1,6 @@
AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING = "pending"
AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED = "failed"
AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED = "finished"
AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE = "downloading-base"
AIRTABLE_EXPORT_JOB_CONVERTING = "converting"
AIRTABLE_EXPORT_JOB_DOWNLOADING_FILES = "downloading-files"

View file

@ -1,2 +1,10 @@
class AirtableBaseNotPublic(Exception):
"""Raised when the Airtable base is not publicly shared."""
class AirtableImportJobDoesNotExist(Exception):
"""Raised when the Airtable import job does not exist."""
class AirtableImportJobAlreadyRunning(Exception):
"""Raised when a user starts another import job while one is already running."""

View file

@ -1,7 +1,7 @@
import re
import json
import requests
from pytz import UTC, BaseTzInfo
from pytz import UTC, BaseTzInfo, timezone as pytz_timezone
from collections import defaultdict
from typing import List, Tuple, Union, Dict, Optional
from requests import Response
@ -10,6 +10,8 @@ from zipfile import ZipFile, ZIP_DEFLATED
from datetime import datetime
from django.core.files.storage import Storage
from django.contrib.auth import get_user_model
from django.db import transaction
from baserow.core.handler import CoreHandler
from baserow.core.utils import (
@ -22,6 +24,8 @@ from baserow.contrib.database.export_serialized import DatabaseExportSerializedS
from baserow.contrib.database.models import Database
from baserow.contrib.database.fields.models import Field
from baserow.contrib.database.fields.field_types import FieldType, field_type_registry
from baserow.contrib.database.views.registries import view_type_registry
from baserow.contrib.database.views.models import GridView
from baserow.contrib.database.application_types import DatabaseApplicationType
from baserow.contrib.database.airtable.registry import (
AirtableColumnType,
@ -33,7 +37,16 @@ from baserow.contrib.database.airtable.constants import (
AIRTABLE_EXPORT_JOB_CONVERTING,
)
from .exceptions import AirtableBaseNotPublic
from .exceptions import (
AirtableBaseNotPublic,
AirtableImportJobDoesNotExist,
AirtableImportJobAlreadyRunning,
)
from .models import AirtableImportJob
from .tasks import run_import_from_airtable
User = get_user_model()
BASE_HEADERS = {
@ -393,6 +406,7 @@ class AirtableHandler:
row["id"] = new_id
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
view_id = 0
for table_index, table in enumerate(schema["tableSchemas"]):
field_mapping = {}
@ -481,12 +495,20 @@ class AirtableHandler:
)
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
# Create a default grid view because the importing of views doesn't work
# yet. It's a bit quick and dirty, but it will be replaced soon.
view_id += 1
grid_view = GridView(id=view_id, name="Grid", order=1)
grid_view.get_field_options = lambda *args, **kwargs: []
grid_view_type = view_type_registry.get_by_model(grid_view)
exported_views = [grid_view_type.export_serialized(grid_view, None, None)]
exported_table = DatabaseExportSerializedStructure.table(
id=table["id"],
name=table["name"],
order=table_index,
fields=exported_fields,
views=[],
views=exported_views,
rows=exported_rows,
)
exported_tables.append(exported_table)
@ -597,3 +619,70 @@ class AirtableHandler:
)
return databases, id_mapping
@staticmethod
def get_airtable_import_job(user: User, job_id: int) -> AirtableImportJob:
"""
Fetches an Airtable import job from the database if the user has created it.
The properties like `progress_percentage` and `progress_state` are
automatically updated by the task that does the actual import.
:param user: The user on whose behalf the job is requested.
:param job_id: The id of the job that must be fetched.
:raises AirtableImportJobDoesNotExist: If the import job doesn't exist.
:return: The fetched Airtable import job instance related to the provided id.
"""
try:
return AirtableImportJob.objects.select_related(
"user", "group", "database", "database__group"
).get(id=job_id, user_id=user.id)
except AirtableImportJob.DoesNotExist:
raise AirtableImportJobDoesNotExist(
f"The job with id {job_id} does not exist."
)
@staticmethod
def create_and_start_airtable_import_job(
user: User,
group: Group,
share_id: str,
timezone: Optional[str] = None,
) -> AirtableImportJob:
"""
Creates a new Airtable import jobs and starts the asynchronous task that
actually does the import.
:param user: The user on whose behalf the import is started.
:param group: The group where the Airtable base be imported to.
:param share_id: The Airtable share id of the page that must be fetched. Note
that the base must be shared publicly. The id stars with `shr`.
:param timezone: The main timezone used for date conversions if needed.
:raises AirtableImportJobAlreadyRunning: If another import job is already
running. A user can only have one job running simultaneously.
:raises UnknownTimeZoneError: When the provided timezone string is incorrect.
:return: The newly created Airtable import job.
"""
# Validate the provided timezone.
if timezone is not None:
pytz_timezone(timezone)
group.has_user(user, raise_error=True)
# A user can only have one Airtable import job running simultaneously. If one
# is already running, we don't want to start a new one.
running_jobs = AirtableImportJob.objects.filter(user_id=user.id).is_running()
if len(running_jobs) > 0:
raise AirtableImportJobAlreadyRunning(
f"Another job is already running with id {running_jobs[0].id}."
)
job = AirtableImportJob.objects.create(
user=user,
group=group,
airtable_share_id=share_id,
timezone=timezone,
)
transaction.on_commit(lambda: run_import_from_airtable.delay(job.id))
return job

View file

@ -0,0 +1,92 @@
from django.db import models
from django.db.models import Q
from django.contrib.auth import get_user_model
from django.core.cache import cache
from baserow.core.models import Group
from baserow.core.mixins import CreatedAndUpdatedOnMixin
from baserow.core.models import Application
from .constants import (
AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING,
AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED,
AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED,
)
from .cache import airtable_import_job_progress_key
User = get_user_model()
class AirtableImportJobQuerySet(models.QuerySet):
def is_running(self):
return self.filter(
~Q(state=AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED),
~Q(state=AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED),
)
class AirtableImportJob(CreatedAndUpdatedOnMixin, models.Model):
user = models.ForeignKey(
User, on_delete=models.CASCADE, help_text="The user that has created the job"
)
group = models.ForeignKey(
Group,
on_delete=models.CASCADE,
help_text="The group where the Airtable base must be imported into.",
)
airtable_share_id = models.CharField(
max_length=18,
help_text="Public ID of the shared Airtable base that must be imported.",
)
timezone = models.CharField(null=True, max_length=255)
progress_percentage = models.IntegerField(
default=0,
help_text="A percentage indicating how far along the import job is. 100 means "
"that it's finished.",
)
state = models.CharField(
max_length=128,
default=AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING,
help_text="Indicates the state of the import job.",
)
error = models.TextField(
blank=True, default="", help_text="An error message if something went wrong."
)
human_readable_error = models.TextField(
blank=True,
default="",
help_text="A human readable error message indicating what went wrong.",
)
database = models.ForeignKey(
Application,
null=True,
on_delete=models.SET_NULL,
help_text="The imported Baserow database.",
)
objects = AirtableImportJobQuerySet.as_manager()
def get_from_cached_value_or_from_self(self, name: str) -> any:
"""
Because the `progress_percentage` and `state` are updated via a transaction,
we also temporarily store the progress in the Redis cache. This is needed
because other database connection, for example a gunicorn worker, can't get
the latest progress from the PostgreSQL table because it's updated in a
transaction.
This method tries to get the progress from the cache and if it's not found,
it falls back on the job table entry data.
:param name: The name in the cache entry dict.
:return: The correct value.
"""
return cache.get(airtable_import_job_progress_key(self.id), default={}).get(
name, getattr(self, name)
)
def get_cached_progress_percentage(self) -> int:
return self.get_from_cached_value_or_from_self("progress_percentage")
def get_cached_state(self) -> str:
return self.get_from_cached_value_or_from_self("state")

View file

@ -0,0 +1,129 @@
import logging
from django.conf import settings
from baserow.config.celery import app
logger = logging.getLogger(__name__)
@app.task(
bind=True,
queue="export",
soft_time_limit=settings.BASEROW_AIRTABLE_IMPORT_SOFT_TIME_LIMIT,
)
def run_import_from_airtable(self, job_id: int):
"""
Starts the Airtable import job. This task must run after the job has been created.
:param job_id: The id related to the job that must be started.
"""
from celery.exceptions import SoftTimeLimitExceeded
from pytz import timezone as pytz_timezone
from requests.exceptions import RequestException
from django.db import transaction
from django.core.cache import cache
from baserow.core.signals import application_created
from baserow.core.utils import Progress
from baserow.contrib.database.airtable.models import AirtableImportJob
from baserow.contrib.database.airtable.handler import AirtableHandler
from baserow.contrib.database.airtable.exceptions import AirtableBaseNotPublic
from baserow.contrib.database.airtable.constants import (
AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED,
AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED,
)
from .cache import airtable_import_job_progress_key
job = AirtableImportJob.objects.select_related("group").get(id=job_id)
def progress_updated(percentage, state):
"""
Every time the progress of the import changes, this callback function is
called. If the percentage or the state has changed, the job will be updated.
"""
nonlocal job
if job.progress_percentage != percentage:
job.progress_percentage = percentage
changed = True
if state is not None and job.state != state:
job.state = state
changed = True
if changed:
# The progress must also be stored in the Redis cache. Because we're
# currently in a transaction, other database connections don't know about
# the progress and this way, we can still communite it to the user.
cache.set(
airtable_import_job_progress_key(job.id),
{"progress_percentage": job.progress_percentage, "state": job.state},
timeout=None,
)
job.save()
progress = Progress(100)
progress.register_updated_event(progress_updated)
kwargs = {}
if job.timezone is not None:
kwargs["timezone"] = pytz_timezone(job.timezone)
try:
with transaction.atomic():
databases, id_mapping = AirtableHandler.import_from_airtable_to_group(
job.group,
job.airtable_share_id,
progress_builder=progress.create_child_builder(
represents_progress=progress.total
),
**kwargs
)
# The web-frontend needs to know about the newly created database, so we
# call the application_created signal.
for database in databases:
application_created.send(self, application=database, user=None)
job.state = AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED
job.database = databases[0]
# Don't override the other properties that have been set during the
# progress update.
job.save(update_fields=("state", "database"))
except Exception as e:
exception_mapping = {
SoftTimeLimitExceeded: "The import job took too long and was timed out.",
RequestException: "The Airtable server could not be reached.",
AirtableBaseNotPublic: "The Airtable base is not publicly shared.",
}
error = "Something went wrong while importing the Airtable base."
for exception, error_message in exception_mapping.items():
if isinstance(e, exception):
error = error_message
break
logger.error(e)
job.state = AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
job.error = str(e)
job.human_readable_error = error
# Don't override the other properties that have been set during the
# progress update.
job.save(
update_fields=(
"state",
"error",
"human_readable_error",
)
)
# Delete the import job cached entry because the transaction has been committed
# and the AirtableImportJob entry now contains the latest data.
cache.delete(airtable_import_job_progress_key(job.id))

View file

@ -0,0 +1,22 @@
import re
def extract_share_id_from_url(public_base_url: str) -> str:
"""
Extracts the Airtable share id from the provided URL.
:param public_base_url: The URL where the share id must be extracted from.
:raises ValueError: If the provided URL doesn't match the publicly shared
Airtable URL.
:return: The extracted share id.
"""
result = re.search(r"https:\/\/airtable.com\/shr(.*)$", public_base_url)
if not result:
raise ValueError(
f"Please provide a valid shared Airtable URL (e.g. "
f"https://airtable.com/shrxxxxxxxxxxxxxx)"
)
return f"shr{result.group(1)}"

View file

@ -0,0 +1,13 @@
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
ERROR_AIRTABLE_IMPORT_JOB_DOES_NOT_EXIST = (
"ERROR_AIRTABLE_IMPORT_JOB_DOES_NOT_EXIST",
HTTP_404_NOT_FOUND,
"The requested Airtable import job does not exist.",
)
ERROR_AIRTABLE_JOB_ALREADY_RUNNING = (
"ERROR_AIRTABLE_JOB_ALREADY_RUNNING",
HTTP_400_BAD_REQUEST,
"Another Airtable import job is already running for you.",
)

View file

@ -0,0 +1,54 @@
from pytz import all_timezones
from rest_framework import serializers
from baserow.api.applications.serializers import ApplicationSerializer
from baserow.contrib.database.airtable.models import AirtableImportJob
from .validators import is_publicly_shared_airtable_url
class AirtableImportJobSerializer(serializers.ModelSerializer):
progress_percentage = serializers.IntegerField(
source="get_cached_progress_percentage",
help_text="A percentage indicating how far along the import job is. 100 means "
"that it's finished.",
)
state = serializers.CharField(
source="get_cached_state",
help_text="Indicates the state of the import job.",
)
database = ApplicationSerializer()
class Meta:
model = AirtableImportJob
fields = (
"id",
"group_id",
"airtable_share_id",
"progress_percentage",
"timezone",
"state",
"human_readable_error",
"database",
)
class CreateAirtableImportJobSerializer(serializers.Serializer):
group_id = serializers.IntegerField(
required=True,
help_text="The group ID where the Airtable base must be imported into.",
)
airtable_share_url = serializers.URLField(
required=True,
validators=[is_publicly_shared_airtable_url],
help_text="The publicly shared URL of the Airtable base (e.g. "
"https://airtable.com/shrxxxxxxxxxxxxxx)",
)
timezone = serializers.ChoiceField(
required=False,
choices=all_timezones,
help_text="Optionally a timezone can be provided that must be respected "
"during import. This is for example setting the correct value of the date "
"fields.",
)

View file

@ -0,0 +1,18 @@
from django.urls import re_path
from baserow.contrib.database.api.airtable.views import (
CreateAirtableImportJobView,
AirtableImportJobView,
)
app_name = "baserow.api.airtable"
urlpatterns = [
re_path(
r"import-job/(?P<job_id>[0-9]+)/$", AirtableImportJobView.as_view(), name="item"
),
re_path(
r"create-import-job/$", CreateAirtableImportJobView.as_view(), name="create"
),
]

View file

@ -0,0 +1,12 @@
from rest_framework.serializers import ValidationError
from baserow.contrib.database.airtable.utils import extract_share_id_from_url
def is_publicly_shared_airtable_url(value):
try:
extract_share_id_from_url(value)
except ValueError:
raise ValidationError(
"The publicly shared Airtable URL is invalid.", code="invalid"
)

View file

@ -0,0 +1,101 @@
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from django.db import transaction
from baserow.api.schemas import get_error_schema
from baserow.api.decorators import map_exceptions, validate_body
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP, ERROR_GROUP_DOES_NOT_EXIST
from baserow.core.exceptions import UserNotInGroup, GroupDoesNotExist
from baserow.core.handler import CoreHandler
from baserow.contrib.database.airtable.exceptions import (
AirtableImportJobDoesNotExist,
AirtableImportJobAlreadyRunning,
)
from baserow.contrib.database.airtable.handler import AirtableHandler
from baserow.contrib.database.airtable.utils import extract_share_id_from_url
from .serializers import AirtableImportJobSerializer, CreateAirtableImportJobSerializer
from .errors import (
ERROR_AIRTABLE_IMPORT_JOB_DOES_NOT_EXIST,
ERROR_AIRTABLE_JOB_ALREADY_RUNNING,
)
class CreateAirtableImportJobView(APIView):
permission_classes = (IsAuthenticated,)
@extend_schema(
tags=["Database airtable import"],
operation_id="create_airtable_import_job",
description=(
"Creates a new Airtable import job. This job runs asynchronously in the "
"background and imports the Airtable base related to the provided "
"parameters. The `get_airtable_import_job` can be used to get the state "
"of the import job."
),
request=CreateAirtableImportJobSerializer,
responses={
200: AirtableImportJobSerializer,
400: get_error_schema(
["ERROR_USER_NOT_IN_GROUP", "ERROR_AIRTABLE_JOB_ALREADY_RUNNING"]
),
404: get_error_schema(["ERROR_GROUP_DOES_NOT_EXIST"]),
},
)
@map_exceptions(
{
GroupDoesNotExist: ERROR_GROUP_DOES_NOT_EXIST,
UserNotInGroup: ERROR_USER_NOT_IN_GROUP,
AirtableImportJobAlreadyRunning: ERROR_AIRTABLE_JOB_ALREADY_RUNNING,
}
)
@validate_body(CreateAirtableImportJobSerializer)
@transaction.atomic
def post(self, request, data):
group = CoreHandler().get_group(data["group_id"])
airtable_share_id = extract_share_id_from_url(data["airtable_share_url"])
job = AirtableHandler.create_and_start_airtable_import_job(
request.user,
group,
airtable_share_id,
timezone=data.get("timezone"),
)
return Response(AirtableImportJobSerializer(job).data)
class AirtableImportJobView(APIView):
permission_classes = (IsAuthenticated,)
@extend_schema(
parameters=[
OpenApiParameter(
name="job_id",
location=OpenApiParameter.PATH,
type=OpenApiTypes.INT,
description="The job id to lookup information about.",
)
],
tags=["Database airtable import"],
operation_id="get_airtable_import_job",
description=(
"Returns the information related to the provided Airtable import job id. "
"This endpoint can for example be polled to get the state of the import "
"job in real time."
),
responses={
200: AirtableImportJobSerializer,
404: get_error_schema(["ERROR_AIRTABLE_IMPORT_JOB_DOES_NOT_EXIST"]),
},
)
@map_exceptions(
{
AirtableImportJobDoesNotExist: ERROR_AIRTABLE_IMPORT_JOB_DOES_NOT_EXIST,
}
)
def get(self, request, job_id):
job = AirtableHandler.get_airtable_import_job(request.user, job_id)
return Response(AirtableImportJobSerializer(job).data)

View file

@ -8,6 +8,8 @@ from .rows import urls as row_urls
from .tokens import urls as token_urls
from .export import urls as export_urls
from .formula import urls as formula_urls
from .airtable import urls as airtable_urls
app_name = "baserow.contrib.database.api"
@ -20,4 +22,5 @@ urlpatterns = [
path("tokens/", include(token_urls, namespace="tokens")),
path("export/", include(export_urls, namespace="export")),
path("formula/", include(formula_urls, namespace="formula")),
path("airtable/", include(airtable_urls, namespace="airtable")),
]

View file

@ -1,5 +1,4 @@
import sys
import re
from tqdm import tqdm
from pytz import timezone as pytz_timezone
from pytz.exceptions import UnknownTimeZoneError
@ -12,6 +11,7 @@ from baserow.core.models import Group
from baserow.core.utils import Progress
from baserow.contrib.database.airtable.handler import AirtableHandler
from baserow.contrib.database.airtable.exceptions import AirtableBaseNotPublic
from baserow.contrib.database.airtable.utils import extract_share_id_from_url
class Command(BaseCommand):
@ -67,15 +67,10 @@ class Command(BaseCommand):
)
sys.exit(1)
result = re.search(r"https:\/\/airtable.com\/shr(.*)$", public_base_url)
if not result:
self.stdout.write(
self.style.ERROR(
f"Please provide a valid shared Airtable URL (e.g. "
f"https://airtable.com/shrxxxxxxxxxxxxxx)"
)
)
try:
share_id = extract_share_id_from_url(public_base_url)
except ValueError as e:
self.stdout.write(self.style.ERROR(str(e)))
sys.exit(1)
with tqdm(total=1000) as progress_bar:
@ -87,8 +82,6 @@ class Command(BaseCommand):
progress.register_updated_event(progress_updated)
share_id = f"shr{result.group(1)}"
try:
with NamedTemporaryFile() as download_files_buffer:
AirtableHandler.import_from_airtable_to_group(

View file

@ -0,0 +1,100 @@
# Generated by Django 3.2.12 on 2022-03-02 10:08
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("core", "0015_alter_userprofile_language"),
("database", "0065_rename_old_generated_table_indexes"),
]
operations = [
migrations.CreateModel(
name="AirtableImportJob",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_on", models.DateTimeField(auto_now_add=True)),
("updated_on", models.DateTimeField(auto_now=True)),
(
"airtable_share_id",
models.CharField(
help_text="Public ID of the shared Airtable base that must be imported.",
max_length=18,
),
),
("timezone", models.CharField(max_length=255, null=True)),
(
"progress_percentage",
models.IntegerField(
default=0,
help_text="A percentage indicating how far along the import job is. 100 means that it's finished.",
),
),
(
"state",
models.CharField(
default="pending",
help_text="Indicates the state of the import job.",
max_length=128,
),
),
(
"error",
models.TextField(
blank=True,
default="",
help_text="An error message if something went wrong.",
),
),
(
"human_readable_error",
models.TextField(
blank=True,
default="",
help_text="A human readable error message indicating what went wrong.",
),
),
(
"database",
models.ForeignKey(
help_text="The imported Baserow database.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="core.application",
),
),
(
"group",
models.ForeignKey(
help_text="The group where the Airtable base must be imported into.",
on_delete=django.db.models.deletion.CASCADE,
to="core.group",
),
),
(
"user",
models.ForeignKey(
help_text="The user that has created the job",
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
]

View file

@ -31,9 +31,10 @@ from .webhooks.models import (
TableWebhookCall,
TableWebhookHeader,
)
from baserow.contrib.database.fields.dependencies.models import (
FieldDependency,
)
from .airtable.models import AirtableImportJob
from baserow.contrib.database.fields.dependencies.models import FieldDependency
__all__ = [
"Database",
@ -63,6 +64,7 @@ __all__ = [
"TableWebhookEvent",
"TableWebhookHeader",
"TableWebhookCall",
"AirtableImportJob",
"FieldDependency",
]

View file

@ -12,6 +12,7 @@ from .token import TokenFixtures
from .template import TemplateFixtures
from .row import RowFixture
from .webhook import TableWebhookFixture
from .airtable import AirtableFixtures
class Fixtures(
@ -27,5 +28,6 @@ class Fixtures(
TemplateFixtures,
RowFixture,
TableWebhookFixture,
AirtableFixtures,
):
fake = Faker()

View file

@ -0,0 +1,15 @@
from baserow.contrib.database.airtable.models import AirtableImportJob
class AirtableFixtures:
def create_airtable_import_job(self, **kwargs):
if "user" not in kwargs:
kwargs["user"] = self.create_user()
if "group" not in kwargs:
kwargs["group"] = self.create_group(user=kwargs["user"])
if "airtable_share_id" not in kwargs:
kwargs["airtable_share_id"] = "test"
return AirtableImportJob.objects.create(**kwargs)

View file

@ -3,17 +3,27 @@ import pytest
import responses
import json
from unittest.mock import patch
from copy import deepcopy
from pathlib import Path
from zipfile import ZipFile, ZIP_DEFLATED
from pytz import UTC, timezone as pytz_timezone
from pytz import UTC, timezone as pytz_timezone, UnknownTimeZoneError
from django.core.files.storage import FileSystemStorage
from django.conf import settings
from baserow.core.user_files.models import UserFile
from baserow.core.utils import Progress
from baserow.core.exceptions import UserNotInGroup
from baserow.contrib.database.fields.models import TextField
from baserow.contrib.database.airtable.constants import (
AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING,
)
from baserow.contrib.database.airtable.exceptions import (
AirtableImportJobDoesNotExist,
AirtableImportJobAlreadyRunning,
)
from baserow.contrib.database.airtable.models import AirtableImportJob
from baserow.contrib.database.airtable.handler import AirtableHandler
@ -247,6 +257,20 @@ def test_to_baserow_database_export():
baserow_database_export["tables"][1]["rows"][0]["field_fldEB5dp0mNjVZu0VJI"]
== "2022-01-21T01:00:00+00:00"
)
assert baserow_database_export["tables"][0]["views"] == [
{
"id": 1,
"type": "grid",
"name": "Grid",
"order": 1,
"filter_type": "AND",
"filters_disabled": False,
"filters": [],
"sortings": [],
"public": False,
"field_options": [],
}
]
@pytest.mark.django_db
@ -400,3 +424,85 @@ def test_import_from_airtable_to_group(data_fixture, tmpdir):
rows = data_model.objects.all()
assert rows[0].checkbox is True
assert rows[1].checkbox is False
@pytest.mark.django_db(transaction=True)
@responses.activate
@patch("baserow.contrib.database.airtable.handler.run_import_from_airtable")
def test_create_and_start_airtable_import_job(
mock_run_import_from_airtable, data_fixture
):
user = data_fixture.create_user()
group = data_fixture.create_group(user=user)
group_2 = data_fixture.create_group()
with pytest.raises(UserNotInGroup):
AirtableHandler.create_and_start_airtable_import_job(user, group_2, "test")
job = AirtableHandler.create_and_start_airtable_import_job(user, group, "test")
assert job.user_id == user.id
assert job.group_id == group.id
assert job.airtable_share_id == "test"
assert job.progress_percentage == 0
assert job.timezone is None
assert job.state == "pending"
assert job.error == ""
mock_run_import_from_airtable.delay.assert_called_once()
args = mock_run_import_from_airtable.delay.call_args
assert args[0][0] == job.id
job.delete()
job = AirtableHandler.create_and_start_airtable_import_job(
user, group, "test", timezone="Europe/Amsterdam"
)
assert job.timezone == "Europe/Amsterdam"
@pytest.mark.django_db(transaction=True)
@responses.activate
@patch("baserow.contrib.database.airtable.handler.run_import_from_airtable")
def test_create_and_start_airtable_import_job_with_timezone(
mock_run_import_from_airtable, data_fixture
):
user = data_fixture.create_user()
group = data_fixture.create_group(user=user)
with pytest.raises(UnknownTimeZoneError):
AirtableHandler.create_and_start_airtable_import_job(
user, group, "test", timezone="UNKNOWN"
)
assert AirtableImportJob.objects.all().count() == 0
job = AirtableHandler.create_and_start_airtable_import_job(
user, group, "test", timezone="Europe/Amsterdam"
)
assert job.timezone == "Europe/Amsterdam"
@pytest.mark.django_db
@responses.activate
def test_create_and_start_airtable_import_job_while_other_job_is_running(data_fixture):
user = data_fixture.create_user()
group = data_fixture.create_group(user=user)
data_fixture.create_airtable_import_job(
user=user, state=AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING
)
with pytest.raises(AirtableImportJobAlreadyRunning):
AirtableHandler.create_and_start_airtable_import_job(user, group, "test")
@pytest.mark.django_db
def test_get_airtable_import_job(data_fixture):
user = data_fixture.create_user()
job_1 = data_fixture.create_airtable_import_job(user=user)
job_2 = data_fixture.create_airtable_import_job()
with pytest.raises(AirtableImportJobDoesNotExist):
AirtableHandler.get_airtable_import_job(user, job_2.id)
job = AirtableHandler.get_airtable_import_job(user, job_1.id)
assert isinstance(job, AirtableImportJob)
assert job.id == job_1.id

View file

@ -0,0 +1,61 @@
import pytest
from django.core.cache import cache
from baserow.contrib.database.airtable.models import AirtableImportJob
from baserow.contrib.database.airtable.constants import (
AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED,
AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED,
AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING,
AIRTABLE_EXPORT_JOB_DOWNLOADING_FILES,
AIRTABLE_EXPORT_JOB_CONVERTING,
AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE,
)
from baserow.contrib.database.airtable.cache import airtable_import_job_progress_key
@pytest.mark.django_db
def test_is_running_queryset(data_fixture):
data_fixture.create_airtable_import_job(
state=AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
)
data_fixture.create_airtable_import_job(
state=AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED
)
assert AirtableImportJob.objects.is_running().count() == 0
data_fixture.create_airtable_import_job(
state=AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING
)
data_fixture.create_airtable_import_job(state=AIRTABLE_EXPORT_JOB_DOWNLOADING_FILES)
data_fixture.create_airtable_import_job(state=AIRTABLE_EXPORT_JOB_CONVERTING)
data_fixture.create_airtable_import_job(state=AIRTABLE_EXPORT_JOB_DOWNLOADING_BASE)
assert AirtableImportJob.objects.is_running().count() == 4
@pytest.mark.django_db
def test_cached_values(data_fixture):
job = data_fixture.create_airtable_import_job(
progress_percentage=10,
state=AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED,
)
assert job.progress_percentage == 10
assert job.state == AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
assert job.get_cached_progress_percentage() == 10
assert job.get_cached_state() == AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
key = airtable_import_job_progress_key(0)
cache.set(key, {"progress_percentage": 0, "state": "test"})
assert job.get_cached_progress_percentage() == 10
assert job.get_cached_state() == AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
key = airtable_import_job_progress_key(job.id)
cache.set(key, {"progress_percentage": 20, "state": "something"})
assert job.get_cached_progress_percentage() == 20
assert job.get_cached_state() == "something"

View file

@ -0,0 +1,198 @@
import pytest
import responses
from pytz import BaseTzInfo
from unittest.mock import patch
from celery.exceptions import SoftTimeLimitExceeded
from requests.exceptions import ConnectionError
from django.db import connections
from django.core.cache import cache
from baserow.core.utils import ChildProgressBuilder
from baserow.contrib.database.airtable.tasks import run_import_from_airtable
from baserow.contrib.database.airtable.models import AirtableImportJob
from baserow.contrib.database.airtable.constants import (
AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED,
AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED,
AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING,
)
from baserow.contrib.database.airtable.cache import airtable_import_job_progress_key
@pytest.mark.django_db(transaction=True, databases=["default", "default-copy"])
@responses.activate
@patch(
"baserow.contrib.database.airtable.handler"
".AirtableHandler.import_from_airtable_to_group"
)
@patch("baserow.core.signals.application_created.send")
@pytest.mark.timeout(10)
def test_run_import_from_airtable(
send_mock, mock_import_from_airtable_to_group, data_fixture
):
# Somehow needed to activate the second connection.
connections["default-copy"]
created_database = data_fixture.create_database_application()
def update_progress_slow(*args, **kwargs):
nonlocal job
nonlocal created_database
progress_builder = kwargs["progress_builder"]
progress = ChildProgressBuilder.build(progress_builder, 100)
progress.increment(50, "test")
# Check if the job has updated in the transaction
job.refresh_from_db()
assert job.progress_percentage == 50
assert job.state == "test"
# We're using the second connection to check if we can get the most recent
# progress value while the transaction is still active.
job_copy = AirtableImportJob.objects.using("default-copy").get(pk=job.id)
# Normal progress is expected to be 0
assert job_copy.progress_percentage == 0
assert job_copy.state == AIRTABLE_EXPORT_JOB_DOWNLOADING_PENDING
# Progress stored in Redis is expected to be accurate.
assert job_copy.get_cached_progress_percentage() == 50
assert job_copy.get_cached_state() == "test"
progress.increment(50)
return ([created_database], {})
mock_import_from_airtable_to_group.side_effect = update_progress_slow
job = data_fixture.create_airtable_import_job()
with pytest.raises(AirtableImportJob.DoesNotExist):
run_import_from_airtable(0)
run_import_from_airtable(job.id)
mock_import_from_airtable_to_group.assert_called_once()
args = mock_import_from_airtable_to_group.call_args
assert args[0][0].id == job.group.id
assert args[0][1] == job.airtable_share_id
assert isinstance(args[1]["progress_builder"], ChildProgressBuilder)
assert args[1]["progress_builder"].represents_progress == 100
assert "timezone" not in args[1]
job = AirtableImportJob.objects.get(pk=job.id)
assert job.progress_percentage == 100
assert job.state == AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED
assert job.database_id == created_database.id
# The cache entry will be removed when when job completes.
assert cache.get(airtable_import_job_progress_key(job.id)) is None
job_copy = AirtableImportJob.objects.using("default-copy").get(pk=job.id)
assert job_copy.progress_percentage == 100
assert job_copy.state == AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED
assert job_copy.get_cached_progress_percentage() == 100
assert job_copy.get_cached_state() == AIRTABLE_EXPORT_JOB_DOWNLOADING_FINISHED
assert job_copy.database_id == created_database.id
send_mock.assert_called_once()
assert send_mock.call_args[1]["application"].id == job.database_id
assert send_mock.call_args[1]["user"] is None
@pytest.mark.django_db(transaction=True)
@responses.activate
@patch(
"baserow.contrib.database.airtable.handler.AirtableHandler"
".import_from_airtable_to_group"
)
def test_run_import_from_airtable_failing_import(
mock_import_from_airtable_to_group, data_fixture
):
def update_progress_slow(*args, **kwargs):
raise Exception("test-1")
mock_import_from_airtable_to_group.side_effect = update_progress_slow
job = data_fixture.create_airtable_import_job()
run_import_from_airtable(job.id)
job.refresh_from_db()
assert job.state == AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
assert job.error == "test-1"
assert (
job.human_readable_error
== "Something went wrong while importing the Airtable base."
)
@pytest.mark.django_db(transaction=True)
@responses.activate
@patch(
"baserow.contrib.database.airtable.handler.AirtableHandler"
".import_from_airtable_to_group"
)
def test_run_import_from_airtable_failing_time_limit(
mock_import_from_airtable_to_group, data_fixture
):
def update_progress_slow(*args, **kwargs):
raise SoftTimeLimitExceeded("test")
mock_import_from_airtable_to_group.side_effect = update_progress_slow
job = data_fixture.create_airtable_import_job()
run_import_from_airtable(job.id)
job.refresh_from_db()
assert job.state == AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
assert job.error == "SoftTimeLimitExceeded('test',)"
assert job.human_readable_error == "The import job took too long and was timed out."
@pytest.mark.django_db(transaction=True)
@responses.activate
@patch(
"baserow.contrib.database.airtable.handler.AirtableHandler"
".import_from_airtable_to_group"
)
def test_run_import_from_airtable_failing_connection_error(
mock_import_from_airtable_to_group, data_fixture
):
def update_progress_slow(*args, **kwargs):
raise ConnectionError("connection error")
mock_import_from_airtable_to_group.side_effect = update_progress_slow
job = data_fixture.create_airtable_import_job()
run_import_from_airtable(job.id)
job.refresh_from_db()
assert job.state == AIRTABLE_EXPORT_JOB_DOWNLOADING_FAILED
assert job.error == "connection error"
assert job.human_readable_error == "The Airtable server could not be reached."
@pytest.mark.django_db
@responses.activate
@patch(
"baserow.contrib.database.airtable.handler"
".AirtableHandler.import_from_airtable_to_group"
)
def test_run_import_from_airtable_with_timezone(
mock_import_from_airtable_to_group, data_fixture
):
job = data_fixture.create_airtable_import_job(timezone="Europe/Amsterdam")
with pytest.raises(AirtableImportJob.DoesNotExist):
run_import_from_airtable(0)
run_import_from_airtable(job.id)
mock_import_from_airtable_to_group.assert_called_once()
args = mock_import_from_airtable_to_group.call_args
assert args[0][0].id == job.group.id
assert args[0][1] == job.airtable_share_id
assert isinstance(args[1]["progress_builder"], ChildProgressBuilder)
assert args[1]["progress_builder"].represents_progress == 100
assert isinstance(args[1]["timezone"], BaseTzInfo)
assert str(args[1]["timezone"]) == "Europe/Amsterdam"

View file

@ -0,0 +1,17 @@
import pytest
from baserow.contrib.database.airtable.utils import extract_share_id_from_url
def test_extract_share_id_from_url():
with pytest.raises(ValueError):
extract_share_id_from_url("test")
assert (
extract_share_id_from_url("https://airtable.com/shrxxxxxxxxxxxxxx")
== "shrxxxxxxxxxxxxxx"
)
assert (
extract_share_id_from_url("https://airtable.com/shrXxmp0WmqsTkFWTzv")
== "shrXxmp0WmqsTkFWTzv"
)

View file

@ -0,0 +1,212 @@
from unittest.mock import patch
import pytest
from django.urls import reverse
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND, HTTP_200_OK
from baserow.contrib.database.airtable.models import AirtableImportJob
@pytest.mark.django_db(transaction=True)
@patch("baserow.contrib.database.airtable.handler.run_import_from_airtable")
def test_create_airtable_import_job(
mock_run_import_from_airtable, data_fixture, api_client
):
user, token = data_fixture.create_user_and_token()
group = data_fixture.create_group(user=user)
group_2 = data_fixture.create_group()
response = api_client.post(
reverse("api:database:airtable:create"),
{"group_id": 0, "airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx"},
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_GROUP_DOES_NOT_EXIST"
response = api_client.post(
reverse("api:database:airtable:create"),
{
"group_id": group_2.id,
"airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx",
},
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
response = api_client.post(
reverse("api:database:airtable:create"),
{},
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response.json() == {
"error": "ERROR_REQUEST_BODY_VALIDATION",
"detail": {
"group_id": [{"error": "This field is required.", "code": "required"}],
"airtable_share_url": [
{"error": "This field is required.", "code": "required"}
],
},
}
response = api_client.post(
reverse("api:database:airtable:create"),
{
"group_id": "not_int",
"airtable_share_url": "https://airtable.com/test",
"timezone": "UNKNOWN",
},
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
assert response.json() == {
"error": "ERROR_REQUEST_BODY_VALIDATION",
"detail": {
"group_id": [{"error": "A valid integer is required.", "code": "invalid"}],
"airtable_share_url": [
{
"error": "The publicly shared Airtable URL is invalid.",
"code": "invalid",
}
],
"timezone": [
{"error": '"UNKNOWN" is not a valid choice.', "code": "invalid_choice"}
],
},
}
response = api_client.post(
reverse("api:database:airtable:create"),
{
"group_id": group.id,
"airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx",
},
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
airtable_import_job = AirtableImportJob.objects.all().first()
assert airtable_import_job.group_id == group.id
assert airtable_import_job.airtable_share_id == "shrxxxxxxxxxxxxxx"
assert response.json() == {
"id": airtable_import_job.id,
"group_id": group.id,
"airtable_share_id": "shrxxxxxxxxxxxxxx",
"progress_percentage": 0,
"timezone": None,
"state": "pending",
"human_readable_error": "",
"database": None,
}
mock_run_import_from_airtable.delay.assert_called()
airtable_import_job.delete()
response = api_client.post(
reverse("api:database:airtable:create"),
{
"group_id": group.id,
"airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx",
"timezone": "Europe/Amsterdam",
},
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
airtable_import_job = AirtableImportJob.objects.all().first()
assert airtable_import_job.group_id == group.id
assert airtable_import_job.airtable_share_id == "shrxxxxxxxxxxxxxx"
assert response.json() == {
"id": airtable_import_job.id,
"group_id": group.id,
"airtable_share_id": "shrxxxxxxxxxxxxxx",
"progress_percentage": 0,
"timezone": "Europe/Amsterdam",
"state": "pending",
"human_readable_error": "",
"database": None,
}
response = api_client.post(
reverse("api:database:airtable:create"),
{
"group_id": group.id,
"airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx",
},
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_AIRTABLE_JOB_ALREADY_RUNNING"
@pytest.mark.django_db
def test_get_airtable_import_job(data_fixture, api_client):
user, token = data_fixture.create_user_and_token()
airtable_job_1 = data_fixture.create_airtable_import_job(user=user)
airtable_job_2 = data_fixture.create_airtable_import_job()
response = api_client.get(
reverse(
"api:database:airtable:item",
kwargs={"job_id": airtable_job_2.id},
),
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_AIRTABLE_IMPORT_JOB_DOES_NOT_EXIST"
response = api_client.get(
reverse(
"api:database:airtable:item",
kwargs={"job_id": airtable_job_1.id},
),
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
json = response.json()
assert json == {
"id": airtable_job_1.id,
"group_id": airtable_job_1.group_id,
"airtable_share_id": "test",
"progress_percentage": 0,
"timezone": None,
"state": "pending",
"human_readable_error": "",
"database": None,
}
airtable_job_1.progress_percentage = 50
airtable_job_1.state = "failed"
airtable_job_1.human_readable_error = "Wrong"
airtable_job_1.database = data_fixture.create_database_application()
airtable_job_1.save()
response = api_client.get(
reverse(
"api:database:airtable:item",
kwargs={"job_id": airtable_job_1.id},
),
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
json = response.json()
assert json == {
"id": airtable_job_1.id,
"group_id": airtable_job_1.group_id,
"airtable_share_id": "test",
"progress_percentage": 50,
"timezone": None,
"state": "failed",
"human_readable_error": "Wrong",
"database": {
"id": airtable_job_1.database.id,
"name": airtable_job_1.database.name,
"order": 0,
"type": "database",
"group": {
"id": airtable_job_1.database.group.id,
"name": airtable_job_1.database.group.name,
},
},
}

View file

@ -4,6 +4,7 @@
* Added accept `image/*` attribute to the form cover and logo upload.
* Added management to import a shared Airtable base.
* Added web-frontend interface to import a shared Airtable base.
* Fixed adding new fields in the edit row popup that require refresh in Kanban and Form views.
* Cache model fields when generating model.
* Fixed `'<' not supported between instances of 'NoneType' and 'int'` error. Blank

View file

@ -39,7 +39,8 @@ The installation methods referred to in the variable descriptions are:
| BASEROW\_BACKEND\_LOG\_LEVEL | The default log level used by the backend, supports ERROR, WARNING, INFO, DEBUG, TRACE | INFO |
| BASEROW\_BACKEND\_DATABASE\_LOG\_LEVEL | The default log level used for database related logs in the backend. Supports the same values as the normal log level. If you also enable BASEROW\_BACKEND\_DEBUG and set this to DEBUG you will be able to see all SQL queries in the backend logs. | ERROR |
| BASEROW\_BACKEND\_DEBUG | If set to “on” then will enable the non production safe debug mode for the Baserow django backend. Defaults to “off” | |
| BASEROW\_AMOUNT\_OF\_GUNICORN\_WORKERS | The number of concurrent worker processes used by the Baserow backend gunicorn server to process incoming requests | |
| BASEROW\_AMOUNT\_OF\_GUNICORN\_WORKERS | The number of concurrent worker processes used by the Baserow backend gunicorn server to process incoming requests
| BASEROW\_AIRTABLE\_IMPORT\_SOFT\_TIME\_LIMIT | The maximum amount of seconds an Airtable migration import job can run. | 1800 seconds - 30 minutes |
### Backend Database Configuration
| Name | Description | Defaults |

View file

@ -142,8 +142,12 @@ export class ApplicationType extends Registerable {
* When an application is selected, for example from the dashboard, an action needs to
* be taken. For example when a database is selected the user will be redirected to
* the first table of that database.
*
* @return Whether selecting was successfull.
*/
select(application, context) {}
select(application, context) {
return true
}
/**
*

View file

@ -80,7 +80,7 @@
@import 'templates';
@import 'paginator';
@import 'sortable';
@import 'export';
@import 'modal-progress';
@import 'trash';
@import 'trash_entry';
@import 'infinite_scroll';

View file

@ -1,11 +1,11 @@
.export-table-modal__actions {
.modal-progress__actions {
display: flex;
justify-content: flex-end;
align-items: center;
margin-top: 20px;
}
.export-table-modal__loading-bar {
.modal-progress__loading-bar {
@include fixed-height(9px, 12px);
width: 100%;
@ -14,19 +14,19 @@
margin-right: 30px;
}
.export-table-modal__loading-bar-inner {
.modal-progress__loading-bar-inner {
background-color: $color-success-300;
border-radius: 5px;
height: 100%;
transition-timing-function: linear;
}
.export-table-modal__status-text {
.modal-progress__status-text {
color: $color-neutral-400;
padding-top: 5px;
position: absolute;
}
.export-table-modal__export-button {
.modal-progress__export-button {
flex: 0 0 160px;
}

View file

@ -8,6 +8,7 @@
:is="applicationType.getApplicationFormComponent()"
ref="applicationForm"
@submitted="submitted"
@hidden="hide()"
>
<div class="actions">
<div class="align-right">

View file

@ -4,6 +4,7 @@ import TemplateSidebar from '@baserow/modules/database/components/sidebar/Templa
import TableTemplate from '@baserow/modules/database/components/table/TableTemplate'
import { populateTable } from '@baserow/modules/database/store/table'
import GridViewRowExpandButton from '@baserow/modules/database/components/view/grid/GridViewRowExpandButton'
import DatabaseForm from '@baserow/modules/database/components/form/DatabaseForm'
export class DatabaseApplicationType extends ApplicationType {
static getType() {
@ -106,11 +107,13 @@ export class DatabaseApplicationType extends ApplicationType {
tableId: tables[0].id,
},
})
return true
} else {
$store.dispatch('notification/error', {
title: $i18n.t('applicationType.cantSelectTableTitle'),
message: $i18n.t('applicationType.cantSelectTableDescription'),
})
return false
}
}
@ -137,4 +140,8 @@ export class DatabaseApplicationType extends ApplicationType {
}
return data
}
getApplicationFormComponent() {
return DatabaseForm
}
}

View file

@ -0,0 +1,207 @@
<template>
<div>
<div class="control">
<label class="control__label">
{{ $t('importFromAirtable.airtableShareLinkTitle') }}
</label>
<p class="margin-bottom-2">
{{ $t('importFromAirtable.airtableShareLinkDescription') }}
<br /><br />
{{ $t('importFromAirtable.airtableShareLinkBeta') }}
</p>
<div class="control__elements">
<input
ref="airtableUrl"
v-model="airtableUrl"
:class="{ 'input--error': $v.airtableUrl.$error }"
type="text"
class="input input--large"
:placeholder="$t('importFromAirtable.airtableShareLinkPaste')"
@blur="$v.airtableUrl.$touch()"
/>
<div v-if="$v.airtableUrl.$error" class="error">
The link should look like: https://airtable.com/shrxxxxxxxxxxxxxx
</div>
</div>
</div>
<Error :error="error"></Error>
<div class="modal-progress__actions">
<div
v-if="jobIsRunning || jobHasSucceeded"
class="modal-progress__loading-bar"
>
<div
class="modal-progress__loading-bar-inner"
:style="{
width: `${job.progress_percentage}%`,
'transition-duration': [1, 0].includes(job.progress_percentage)
? '0s'
: '1s',
}"
></div>
<span class="modal-progress__status-text">
{{ humanReadableState }}
</span>
</div>
<button
v-if="!jobHasSucceeded"
class="button button--large modal-progress__export-button"
:class="{ 'button--loading': loading }"
:disabled="loading"
@click="importFromAirtable"
>
{{ $t('importFromAirtable.importButtonLabel') }}
</button>
<button
v-else
class="
button button--large button--success
modal-progress__export-button
"
@click="openDatabase"
>
{{ $t('importFromAirtable.openButtonLabel') }}
</button>
</div>
</div>
</template>
<script>
import { mapGetters } from 'vuex'
import { ResponseErrorMessage } from '@baserow/modules/core/plugins/clientHandler'
import error from '@baserow/modules/core/mixins/error'
import AirtableService from '@baserow/modules/database/services/airtable'
export default {
name: 'ImportFromAirtable',
mixins: [error],
data() {
return {
importType: 'none',
airtableUrl: '',
loading: false,
job: null,
pollInterval: null,
}
},
beforeDestroy() {
this.stopPollIfRunning()
},
methods: {
async importFromAirtable() {
this.$v.$touch()
if (this.$v.$invalid && !this.loading) {
return
}
this.loading = true
this.hideError()
try {
const { data } = await AirtableService(this.$client).create(
this.selectedGroupId,
this.airtableUrl,
new Intl.DateTimeFormat().resolvedOptions().timeZone
)
this.job = data
this.pollInterval = setInterval(this.getLatestJobInfo, 1000)
} catch (error) {
this.stopPollAndHandleError(error, {
ERROR_AIRTABLE_JOB_ALREADY_RUNNING: new ResponseErrorMessage(
this.$t('importFromAirtable.errorJobAlreadyRunningTitle'),
this.$t('importFromAirtable.errorJobAlreadyRunningDescription')
),
})
this.loading = false
}
},
async getLatestJobInfo() {
try {
const { data } = await AirtableService(this.$client).get(this.job.id)
this.job = data
if (this.jobHasFailed) {
const error = new ResponseErrorMessage(
this.$t('importFromAirtable.importError'),
this.job.human_readable_error
)
this.stopPollAndHandleError(error)
} else if (!this.jobIsRunning) {
this.stopPollIfRunning()
}
} catch (error) {
this.stopPollAndHandleError(error)
}
},
stopPollAndHandleError(error, specificErrorMap = null) {
this.loading = false
this.stopPollIfRunning()
error.handler
? this.handleError(error, 'airtable', specificErrorMap)
: this.showError(error)
},
stopPollIfRunning() {
if (this.pollInterval) {
clearInterval(this.pollInterval)
}
},
openDatabase() {
const application = this.$store.getters['application/get'](
this.job.database.id
)
const type = this.$registry.get('application', application.type)
if (type.select(application, this)) {
this.$emit('hidden')
}
},
},
computed: {
jobHasSucceeded() {
return this.job !== null && this.job.state === 'finished'
},
jobIsRunning() {
return (
this.job !== null && !['failed', 'finished'].includes(this.job.state)
)
},
jobHasFailed() {
return this.job !== null && this.job.state === 'failed'
},
humanReadableState() {
if (this.job === null) {
return ''
}
const importingTablePrefix = 'importing-table-'
if (this.job.state.startsWith(importingTablePrefix)) {
const table = this.job.state.replace(importingTablePrefix, '')
return this.$t('importFromAirtable.stateImportingTable', { table })
}
const translations = {
pending: this.$t('importFromAirtable.statePending'),
failed: this.$t('importFromAirtable.stateFailed'),
finished: this.$t('importFromAirtable.stateFinished'),
'downloading-base': this.$t('importFromAirtable.stateDownloadingBase'),
converting: this.$t('importFromAirtable.stateConverting'),
'downloading-files': this.$t(
'importFromAirtable.stateDownloadingFiles'
),
importing: this.$t('importFromAirtable.stateImporting'),
}
return translations[this.job.state]
},
...mapGetters({
selectedGroupId: 'group/selectedId',
}),
},
validations: {
airtableUrl: {
valid(value) {
const regex = /https:\/\/airtable.com\/shr(.*)$/g
return !!value.match(regex)
},
},
},
}
</script>

View file

@ -1,8 +1,8 @@
<template>
<div class="export-table-modal__actions">
<div v-if="job !== null" class="export-table-modal__loading-bar">
<div class="modal-progress__actions">
<div v-if="job !== null" class="modal-progress__loading-bar">
<div
class="export-table-modal__loading-bar-inner"
class="modal-progress__loading-bar-inner"
:style="{
width: `${job.progress_percentage * 100}%`,
'transition-duration': [1, 0].includes(job.progress_percentage)
@ -10,16 +10,13 @@
: '1s',
}"
></div>
<span v-if="jobIsRunning" class="export-table-modal__status-text">
<span v-if="jobIsRunning" class="modal-progress__status-text">
{{ job.status }}
</span>
</div>
<button
v-if="job === null || job.status !== 'complete'"
class="
button button--large button--primary
export-table-modal__export-button
"
class="button button--large button--primary modal-progress__export-button"
:class="{ 'button--loading': loading }"
:disabled="disabled"
>
@ -27,10 +24,7 @@
</button>
<DownloadLink
v-else
class="
button button--large button--success
export-table-modal__export-button
"
class="button button--large button--success modal-progress__export-button"
:url="job.url"
:filename="filename"
:loading-class="'button--loading'"

View file

@ -0,0 +1,88 @@
<template>
<form @submit.prevent="submit">
<div class="control">
<label class="control__label">
{{ $t('databaseForm.importLabel') }}
</label>
<div class="control__elements">
<ul class="choice-items">
<li>
<a
class="choice-items__link"
:class="{ active: importType === 'none' }"
@click="importType = 'none'"
>
<i class="choice-items__icon fas fa-clone"></i>
{{ $t('databaseForm.emptyLabel') }}
</a>
</li>
<li>
<a
class="choice-items__link"
:class="{ active: importType === 'airtable' }"
@click="importType = 'airtable'"
>
<i class="choice-items__icon fas fa-clone"></i>
{{ $t('databaseForm.airtableLabel') }}
</a>
</li>
</ul>
</div>
</div>
<template v-if="importType !== 'airtable'">
<div class="control">
<label class="control__label">
<i class="fas fa-font"></i>
{{ $t('applicationForm.nameLabel') }}
</label>
<div class="control__elements">
<input
ref="name"
v-model="values.name"
:class="{ 'input--error': $v.values.name.$error }"
type="text"
class="input input--large"
@blur="$v.values.name.$touch()"
/>
<div v-if="$v.values.name.$error" class="error">
{{ $t('error.requiredField') }}
</div>
</div>
<slot></slot>
</div>
</template>
<ImportFromAirtable
v-else
@hidden="$emit('hidden', $event)"
></ImportFromAirtable>
</form>
</template>
<script>
import { required } from 'vuelidate/lib/validators'
import form from '@baserow/modules/core/mixins/form'
import ImportFromAirtable from '@baserow/modules/database/components/airtable/ImportFromAirtable'
export default {
name: 'DatabaseForm',
components: { ImportFromAirtable },
mixins: [form],
data() {
return {
values: {
name: '',
},
importType: 'none',
}
},
mounted() {
this.$refs.name.focus()
},
validations: {
values: {
name: { required },
},
},
}
</script>

View file

@ -513,5 +513,29 @@
"stdDev": "Standard deviation",
"stdDevShort": "Std dev",
"variance": "Variance"
},
"databaseForm": {
"importLabel": "Would you like to import existing data?",
"emptyLabel": "Start from scratch",
"airtableLabel": "Import from Airtable (beta)"
},
"importFromAirtable": {
"airtableShareLinkTitle": "Share a link to your Base",
"airtableShareLinkDescription": "To import your Airtable base, you need to have a shared link to your entire base. In Airtable, click on the share button in the top right corner after opening your base. After that you must choose the \"Access to base\" option. In the share modal you can click on the \"Create a shared link to the whole base\" button and then on “Private read-only link”. Copy the public link and paste it in the input below.",
"airtableShareLinkBeta": "Note that this feature is in beta, your tables, fields (except formula, lookup and count) and data will be imported. Your views will not be imported.",
"airtableShareLinkPaste": "Paste the link here",
"importButtonLabel": "Import from Airtable",
"openButtonLabel": "Open imported database",
"importError": "Something went wrong",
"statePending": "Waiting to start",
"stateFailed": "Failed",
"stateFinished": "Finished",
"stateDownloadingBase": "Downloading base schema",
"stateConverting": "Converting to Baserow",
"stateDownloadingFiles": "Downloading files",
"stateImporting": "Importing",
"stateImportingTable": "Importing table {table}",
"errorJobAlreadyRunningTitle": "Already running",
"errorJobAlreadyRunningDescription": "Another import job is already running. You need to wait for that one to finish before starting another."
}
}

View file

@ -0,0 +1,14 @@
export default (client) => {
return {
create(groupId, shareURL, timezone) {
return client.post(`/database/airtable/create-import-job/`, {
group_id: groupId,
airtable_share_url: shareURL,
timezone,
})
},
get(jobId) {
return client.get(`/database/airtable/import-job/${jobId}/`)
},
}
}

View file

@ -1043,15 +1043,12 @@ exports[`Preview exportTableModal Modal with no view 1`] = `
</div>
<div
class="export-table-modal__actions"
class="modal-progress__actions"
>
<!---->
<button
class="
button button--large button--primary
export-table-modal__export-button
"
class="button button--large button--primary modal-progress__export-button"
>
exportTableLoadingBar.export
@ -2111,15 +2108,12 @@ exports[`Preview exportTableModal Modal with view 1`] = `
</div>
<div
class="export-table-modal__actions"
class="modal-progress__actions"
>
<!---->
<button
class="
button button--large button--primary
export-table-modal__export-button
"
class="button button--large button--primary modal-progress__export-button"
>
exportTableLoadingBar.export