mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-18 03:13:47 +00:00
Allow changing existing data sync visible fields and settings
This commit is contained in:
parent
181467b0b0
commit
32d7981572
47 changed files with 2358 additions and 220 deletions
backend
changelog/entries/unreleased/feature
enterprise
backend
src/baserow_enterprise
data_sync
baserow_table_data_sync.pygithub_issues_data_sync.pygitlab_issues_data_sync.pyjira_issues_data_sync.py
role
tests/baserow_enterprise_tests/data_sync
web-frontend/modules/baserow_enterprise/components/dataSync
web-frontend
locales
modules
core
database
test/unit
builder/components/elements/components/__snapshots__
database/components/export/__snapshots__
|
@ -47,6 +47,16 @@ class CreateDataSyncSerializer(serializers.ModelSerializer):
|
|||
fields = ("synced_properties", "type", "table_name")
|
||||
|
||||
|
||||
class UpdateDataSyncSerializer(serializers.ModelSerializer):
|
||||
synced_properties = serializers.ListField(
|
||||
child=serializers.CharField(), required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DataSync
|
||||
fields = ("synced_properties",)
|
||||
|
||||
|
||||
class ListDataSyncPropertiesRequestSerializer(serializers.ModelSerializer):
|
||||
type = serializers.ChoiceField(
|
||||
choices=lazy(data_sync_type_registry.get_types, list)(),
|
||||
|
|
|
@ -1,12 +1,19 @@
|
|||
from django.urls import re_path
|
||||
|
||||
from .views import DataSyncPropertiesView, DataSyncsView, SyncDataSyncTableView
|
||||
from .views import (
|
||||
DataSyncPropertiesView,
|
||||
DataSyncsView,
|
||||
DataSyncTypePropertiesView,
|
||||
DataSyncView,
|
||||
SyncDataSyncTableView,
|
||||
)
|
||||
|
||||
app_name = "baserow.contrib.database.api.data_sync"
|
||||
urlpatterns = [
|
||||
re_path(
|
||||
r"database/(?P<database_id>[0-9]+)/$", DataSyncsView.as_view(), name="list"
|
||||
),
|
||||
re_path(r"(?P<data_sync_id>[0-9]+)/$", DataSyncView.as_view(), name="item"),
|
||||
re_path(
|
||||
r"(?P<data_sync_id>[0-9]+)/sync/async/$",
|
||||
SyncDataSyncTableView.as_view(),
|
||||
|
@ -14,7 +21,12 @@ urlpatterns = [
|
|||
),
|
||||
re_path(
|
||||
r"properties/$",
|
||||
DataSyncPropertiesView.as_view(),
|
||||
DataSyncTypePropertiesView.as_view(),
|
||||
name="properties",
|
||||
),
|
||||
re_path(
|
||||
r"(?P<data_sync_id>[0-9]+)/properties/$",
|
||||
DataSyncPropertiesView.as_view(),
|
||||
name="properties_of_data_sync",
|
||||
),
|
||||
]
|
||||
|
|
|
@ -18,19 +18,32 @@ from baserow.api.schemas import (
|
|||
CLIENT_UNDO_REDO_ACTION_GROUP_ID_SCHEMA_PARAMETER,
|
||||
get_error_schema,
|
||||
)
|
||||
from baserow.api.utils import DiscriminatorCustomFieldsMappingSerializer
|
||||
from baserow.api.utils import (
|
||||
DiscriminatorCustomFieldsMappingSerializer,
|
||||
validate_data_custom_fields,
|
||||
)
|
||||
from baserow.contrib.database.api.tables.serializers import TableSerializer
|
||||
from baserow.contrib.database.data_sync.actions import CreateDataSyncTableActionType
|
||||
from baserow.contrib.database.data_sync.actions import (
|
||||
CreateDataSyncTableActionType,
|
||||
UpdateDataSyncTableActionType,
|
||||
)
|
||||
from baserow.contrib.database.data_sync.exceptions import (
|
||||
DataSyncDoesNotExist,
|
||||
PropertyNotFound,
|
||||
SyncError,
|
||||
)
|
||||
from baserow.contrib.database.data_sync.handler import DataSyncHandler
|
||||
from baserow.contrib.database.data_sync.job_types import SyncDataSyncTableJobType
|
||||
from baserow.contrib.database.data_sync.models import DataSync
|
||||
from baserow.contrib.database.data_sync.operations import (
|
||||
GetIncludingPublicValuesOperationType,
|
||||
ListPropertiesOperationType,
|
||||
)
|
||||
from baserow.contrib.database.data_sync.registries import data_sync_type_registry
|
||||
from baserow.contrib.database.handler import DatabaseHandler
|
||||
from baserow.core.action.registries import action_type_registry
|
||||
from baserow.core.exceptions import ApplicationDoesNotExist, UserNotInWorkspace
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.jobs.exceptions import MaxJobCountExceeded
|
||||
from baserow.core.jobs.handler import JobHandler
|
||||
from baserow.core.jobs.registries import job_type_registry
|
||||
|
@ -43,8 +56,10 @@ from .errors import (
|
|||
)
|
||||
from .serializers import (
|
||||
CreateDataSyncSerializer,
|
||||
DataSyncSerializer,
|
||||
ListDataSyncPropertiesRequestSerializer,
|
||||
ListDataSyncPropertySerializer,
|
||||
UpdateDataSyncSerializer,
|
||||
)
|
||||
|
||||
|
||||
|
@ -81,6 +96,7 @@ class DataSyncsView(APIView):
|
|||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"ERROR_SYNC_ERROR",
|
||||
"ERROR_PROPERTY_NOT_FOUND",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_APPLICATION_DOES_NOT_EXIST"]),
|
||||
|
@ -118,6 +134,119 @@ class DataSyncsView(APIView):
|
|||
return Response(serializer.data)
|
||||
|
||||
|
||||
class DataSyncView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="data_sync_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="The data sync that must be fetched.",
|
||||
),
|
||||
],
|
||||
tags=["Database tables"],
|
||||
operation_id="get_table_data_sync",
|
||||
description=(
|
||||
"Responds with the data sync, including the data sync type specific "
|
||||
"properties, if the user has the right permissions."
|
||||
),
|
||||
responses={
|
||||
200: DiscriminatorCustomFieldsMappingSerializer(
|
||||
data_sync_type_registry, DataSyncSerializer
|
||||
),
|
||||
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
|
||||
404: get_error_schema(["ERROR_DATA_SYNC_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
DataSyncDoesNotExist: ERROR_DATA_SYNC_DOES_NOT_EXIST,
|
||||
UserNotInWorkspace: ERROR_USER_NOT_IN_GROUP,
|
||||
}
|
||||
)
|
||||
def get(self, request, data_sync_id):
|
||||
"""Responds with the data sync if the user belongs to the workspace."""
|
||||
|
||||
data_sync = DataSyncHandler().get_data_sync(data_sync_id)
|
||||
|
||||
CoreHandler().check_permissions(
|
||||
request.user,
|
||||
GetIncludingPublicValuesOperationType.type,
|
||||
workspace=data_sync.table.database.workspace,
|
||||
context=data_sync.table,
|
||||
)
|
||||
|
||||
data_sync_type = data_sync_type_registry.get_by_model(data_sync)
|
||||
serializer = data_sync_type.get_serializer(data_sync, DataSyncSerializer)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="data_sync_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Updates the data sync related to the provided value.",
|
||||
),
|
||||
],
|
||||
tags=["Database tables"],
|
||||
operation_id="update_table_data_sync",
|
||||
description=(
|
||||
"Updates the properties of the provided data sync, if the user has the "
|
||||
"right permissions. Note that if the `synced_properties` is not provided, "
|
||||
"the available properties change, then the unavailable ones will "
|
||||
"automatically be removed."
|
||||
),
|
||||
request=DiscriminatorCustomFieldsMappingSerializer(
|
||||
data_sync_type_registry, UpdateDataSyncSerializer
|
||||
),
|
||||
responses={
|
||||
200: DataSyncSerializer,
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_USER_NOT_IN_GROUP",
|
||||
"ERROR_PROPERTY_NOT_FOUND",
|
||||
"ERROR_SYNC_ERROR",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_DATA_SYNC_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions(
|
||||
{
|
||||
DataSyncDoesNotExist: ERROR_DATA_SYNC_DOES_NOT_EXIST,
|
||||
UserNotInWorkspace: ERROR_USER_NOT_IN_GROUP,
|
||||
PropertyNotFound: ERROR_PROPERTY_NOT_FOUND,
|
||||
SyncError: ERROR_SYNC_ERROR,
|
||||
}
|
||||
)
|
||||
def patch(self, request, data_sync_id):
|
||||
"""Updates the data sync if the user belongs to the workspace."""
|
||||
|
||||
data_sync = DataSyncHandler().get_data_sync(
|
||||
data_sync_id, base_queryset=DataSync.objects.select_for_update(of=("self",))
|
||||
)
|
||||
data_sync_type = data_sync_type_registry.get_by_model(data_sync)
|
||||
|
||||
data = validate_data_custom_fields(
|
||||
data_sync_type.type,
|
||||
data_sync_type_registry,
|
||||
request.data,
|
||||
base_serializer_class=UpdateDataSyncSerializer,
|
||||
partial=True,
|
||||
return_validated=True,
|
||||
)
|
||||
|
||||
data_sync = action_type_registry.get_by_type(UpdateDataSyncTableActionType).do(
|
||||
request.user, data_sync, **data
|
||||
)
|
||||
|
||||
return Response(DataSyncSerializer(data_sync).data)
|
||||
|
||||
|
||||
class SyncDataSyncTableView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
|
@ -130,7 +259,6 @@ class SyncDataSyncTableView(APIView):
|
|||
description="Starts a job to sync the data sync table related to the "
|
||||
"provided value.",
|
||||
),
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
],
|
||||
tags=["Database tables"],
|
||||
operation_id="sync_data_sync_table_async",
|
||||
|
@ -170,12 +298,12 @@ class SyncDataSyncTableView(APIView):
|
|||
return Response(serializer.data, status=HTTP_202_ACCEPTED)
|
||||
|
||||
|
||||
class DataSyncPropertiesView(APIView):
|
||||
class DataSyncTypePropertiesView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Database tables"],
|
||||
operation_id="get_database_data_sync_properties",
|
||||
operation_id="get_table_data_sync_type_properties",
|
||||
description=(
|
||||
"Lists all the properties of the provided data sync type given the request "
|
||||
"data. This can be used to choose which properties should be included when "
|
||||
|
@ -228,3 +356,55 @@ class DataSyncPropertiesView(APIView):
|
|||
|
||||
serializer = ListDataSyncPropertySerializer(data_sync_properties, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class DataSyncPropertiesView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="data_sync_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.INT,
|
||||
description="Lists properties related to the provided ID.",
|
||||
),
|
||||
CLIENT_SESSION_ID_SCHEMA_PARAMETER,
|
||||
],
|
||||
tags=["Database tables"],
|
||||
operation_id="get_table_data_sync_properties",
|
||||
description="Lists all the available properties of the provided data sync.",
|
||||
responses={
|
||||
200: ListDataSyncPropertySerializer(many=True),
|
||||
400: get_error_schema(["ERROR_SYNC_ERROR"]),
|
||||
404: get_error_schema(["ERROR_DATA_SYNC_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
DataSyncDoesNotExist: ERROR_DATA_SYNC_DOES_NOT_EXIST,
|
||||
SyncError: ERROR_SYNC_ERROR,
|
||||
}
|
||||
)
|
||||
def get(
|
||||
self,
|
||||
request: Request,
|
||||
data_sync_id: int,
|
||||
):
|
||||
"""
|
||||
Lists the properties of the related data sync.
|
||||
"""
|
||||
|
||||
data_sync = DataSyncHandler().get_data_sync(data_sync_id)
|
||||
|
||||
CoreHandler().check_permissions(
|
||||
request.user,
|
||||
ListPropertiesOperationType.type,
|
||||
workspace=data_sync.table.database.workspace,
|
||||
context=data_sync.table,
|
||||
)
|
||||
|
||||
data_sync_type = data_sync_type_registry.get_by_model(data_sync)
|
||||
data_sync_properties = data_sync_type.get_properties(data_sync)
|
||||
serializer = ListDataSyncPropertySerializer(data_sync_properties, many=True)
|
||||
return Response(serializer.data)
|
||||
|
|
|
@ -150,9 +150,11 @@ class DatabaseConfig(AppConfig):
|
|||
from baserow.contrib.database.data_sync.actions import (
|
||||
CreateDataSyncTableActionType,
|
||||
SyncDataSyncTableActionType,
|
||||
UpdateDataSyncTableActionType,
|
||||
)
|
||||
|
||||
action_type_registry.register(CreateDataSyncTableActionType())
|
||||
action_type_registry.register(UpdateDataSyncTableActionType())
|
||||
action_type_registry.register(SyncDataSyncTableActionType())
|
||||
|
||||
from .airtable.registry import airtable_column_type_registry
|
||||
|
@ -668,7 +670,11 @@ class DatabaseConfig(AppConfig):
|
|||
)
|
||||
|
||||
from .airtable.operations import RunAirtableImportJobOperationType
|
||||
from .data_sync.operations import SyncTableOperationType
|
||||
from .data_sync.operations import (
|
||||
GetIncludingPublicValuesOperationType,
|
||||
ListPropertiesOperationType,
|
||||
SyncTableOperationType,
|
||||
)
|
||||
from .export.operations import ExportTableOperationType
|
||||
from .fields.operations import (
|
||||
CreateFieldOperationType,
|
||||
|
@ -846,6 +852,8 @@ class DatabaseConfig(AppConfig):
|
|||
operation_type_registry.register(DeleteViewFilterGroupOperationType())
|
||||
operation_type_registry.register(ReadViewFilterGroupOperationType())
|
||||
operation_type_registry.register(SyncTableOperationType())
|
||||
operation_type_registry.register(ListPropertiesOperationType())
|
||||
operation_type_registry.register(GetIncludingPublicValuesOperationType())
|
||||
|
||||
from baserow.core.registries import permission_manager_type_registry
|
||||
|
||||
|
|
|
@ -98,6 +98,68 @@ class CreateDataSyncTableActionType(UndoableActionType):
|
|||
)
|
||||
|
||||
|
||||
class UpdateDataSyncTableActionType(ActionType):
|
||||
type = "update_data_sync_table"
|
||||
description = ActionTypeDescription(
|
||||
_("Update data sync table"),
|
||||
_('Data sync table "%(table_name)s" (%(table_id)s) updated'),
|
||||
DATABASE_ACTION_CONTEXT,
|
||||
)
|
||||
analytics_params = [
|
||||
"database_id",
|
||||
"table_id",
|
||||
"data_sync_id",
|
||||
]
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Params:
|
||||
database_id: int
|
||||
database_name: str
|
||||
table_id: int
|
||||
table_name: str
|
||||
data_sync_id: int
|
||||
|
||||
@classmethod
|
||||
def do(
|
||||
cls,
|
||||
user: AbstractUser,
|
||||
data_sync: DataSync,
|
||||
synced_properties: Optional[List[str]] = None,
|
||||
**kwargs: dict,
|
||||
) -> DataSync:
|
||||
data_sync = data_sync.specific
|
||||
|
||||
if not synced_properties:
|
||||
synced_properties = list(
|
||||
data_sync.synced_properties.all().values_list("key", flat=True)
|
||||
)
|
||||
|
||||
data_sync = DataSyncHandler().update_data_sync_table(
|
||||
user=user,
|
||||
data_sync=data_sync,
|
||||
synced_properties=synced_properties,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
table = data_sync.table
|
||||
database = table.database
|
||||
workspace = database.workspace
|
||||
params = cls.Params(
|
||||
database.id,
|
||||
database.name,
|
||||
table.id,
|
||||
table.name,
|
||||
data_sync.id,
|
||||
)
|
||||
cls.register_action(user, params, cls.scope(database.id), workspace=workspace)
|
||||
|
||||
return data_sync
|
||||
|
||||
@classmethod
|
||||
def scope(cls, database_id) -> ActionScopeStr:
|
||||
return ApplicationActionScopeType.value(database_id)
|
||||
|
||||
|
||||
class SyncDataSyncTableActionType(ActionType):
|
||||
type = "sync_data_sync_table"
|
||||
description = ActionTypeDescription(
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
from copy import deepcopy
|
||||
from typing import List, Optional
|
||||
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Prefetch
|
||||
from django.db.models import Prefetch, QuerySet
|
||||
from django.utils import timezone, translation
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from baserow.contrib.database.db.schema import safe_django_schema_editor
|
||||
from baserow.contrib.database.fields.constants import DeleteFieldStrategyEnum
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
|
@ -15,12 +17,18 @@ from baserow.contrib.database.operations import CreateTableDatabaseTableOperatio
|
|||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.search.handler import SearchHandler
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.table.operations import UpdateDatabaseTableOperationType
|
||||
from baserow.contrib.database.table.signals import table_created, table_updated
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.views.view_types import GridViewType
|
||||
from baserow.core.db import specific_queryset
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.utils import ChildProgressBuilder, extract_allowed, remove_duplicates
|
||||
from baserow.core.utils import (
|
||||
ChildProgressBuilder,
|
||||
extract_allowed,
|
||||
remove_duplicates,
|
||||
set_allowed_attrs,
|
||||
)
|
||||
|
||||
from .exceptions import (
|
||||
DataSyncDoesNotExist,
|
||||
|
@ -35,7 +43,9 @@ from .registries import data_sync_type_registry
|
|||
|
||||
|
||||
class DataSyncHandler:
|
||||
def get_data_sync(self, data_sync_id: int) -> DataSync:
|
||||
def get_data_sync(
|
||||
self, data_sync_id: int, base_queryset: Optional[QuerySet] = None
|
||||
) -> DataSync:
|
||||
"""
|
||||
Returns the data sync matching the provided ID.
|
||||
|
||||
|
@ -43,9 +53,14 @@ class DataSyncHandler:
|
|||
:return: The fetched data sync object.
|
||||
"""
|
||||
|
||||
if base_queryset is None:
|
||||
base_queryset = DataSync.objects
|
||||
|
||||
try:
|
||||
return (
|
||||
DataSync.objects.select_related("table")
|
||||
base_queryset.select_related(
|
||||
"table", "table__database", "table__database__workspace"
|
||||
)
|
||||
.prefetch_related("synced_properties")
|
||||
.get(pk=data_sync_id)
|
||||
.specific
|
||||
|
@ -180,6 +195,61 @@ class DataSyncHandler:
|
|||
|
||||
return data_sync_instance
|
||||
|
||||
def update_data_sync_table(
|
||||
self,
|
||||
user: AbstractUser,
|
||||
data_sync: DataSync,
|
||||
synced_properties: List[str],
|
||||
**kwargs: dict,
|
||||
) -> DataSync:
|
||||
"""
|
||||
Updates the synced properties and data sync properties.
|
||||
|
||||
:param user: The user on whose behalf the data sync is updated.
|
||||
:param data_sync: The data sync that must be updated.
|
||||
:param synced_properties: A list of all properties that must be in data sync
|
||||
table. New ones will be created, and removed ones will be deleted.
|
||||
:return: The updated data sync.
|
||||
"""
|
||||
|
||||
if not isinstance(data_sync, DataSync):
|
||||
raise ValueError("The table is not an instance of Table")
|
||||
|
||||
CoreHandler().check_permissions(
|
||||
user,
|
||||
UpdateDatabaseTableOperationType.type,
|
||||
workspace=data_sync.table.database.workspace,
|
||||
context=data_sync.table,
|
||||
)
|
||||
|
||||
data_sync = data_sync.specific
|
||||
data_sync_type = data_sync_type_registry.get_by_model(data_sync)
|
||||
|
||||
allowed_fields = [] + data_sync_type.allowed_fields
|
||||
data_sync = set_allowed_attrs(kwargs, allowed_fields, data_sync)
|
||||
data_sync.save()
|
||||
|
||||
data_sync_properties = data_sync_type.get_properties(data_sync)
|
||||
data_sync_property_keys = [p.key for p in data_sync_properties]
|
||||
# Omit properties that are not available anymore to prevent the backend from
|
||||
# failing hard.
|
||||
synced_properties = [
|
||||
p for p in synced_properties if p in data_sync_property_keys
|
||||
]
|
||||
|
||||
self.set_data_sync_synced_properties(
|
||||
user,
|
||||
data_sync,
|
||||
synced_properties=synced_properties,
|
||||
data_sync_properties=data_sync_properties,
|
||||
)
|
||||
|
||||
table_updated.send(
|
||||
self, table=data_sync.table, user=user, force_table_refresh=False
|
||||
)
|
||||
|
||||
return data_sync
|
||||
|
||||
def get_table_sync_lock_key(self, data_sync_id):
|
||||
return f"data_sync_{data_sync_id}_syncing_table"
|
||||
|
||||
|
@ -294,6 +364,9 @@ class DataSyncHandler:
|
|||
existing_rows_in_table = {
|
||||
tuple(row[key_to_field_id[key]] for key in unique_primary_keys): row
|
||||
for row in existing_rows_queryset
|
||||
# Unique primaries can't be empty. If they are, then they're left dangling
|
||||
# because the primary was removed. They will be deleted later.
|
||||
if all(row[key_to_field_id[key]] for key in unique_primary_keys)
|
||||
}
|
||||
progress.increment(by=1) # makes the total `10`
|
||||
|
||||
|
@ -337,8 +410,13 @@ class DataSyncHandler:
|
|||
|
||||
row_ids_to_delete = []
|
||||
for existing_id in existing_rows_in_table.keys():
|
||||
if existing_id not in rows_of_data_sync:
|
||||
if existing_id is None or existing_id not in rows_of_data_sync:
|
||||
row_ids_to_delete.append(existing_rows_in_table[existing_id]["id"])
|
||||
# Loop over the dangling rows and delete those because they can't be identified
|
||||
# anymore.
|
||||
for row in existing_rows_queryset:
|
||||
if any(not row[key_to_field_id[key]] for key in unique_primary_keys):
|
||||
row_ids_to_delete.append(row["id"])
|
||||
progress.increment(by=1) # makes the total `70`
|
||||
|
||||
if len(rows_to_create) > 0:
|
||||
|
@ -407,6 +485,12 @@ class DataSyncHandler:
|
|||
fetched, they can be provided as argument to avoid fetching them again.
|
||||
"""
|
||||
|
||||
# Remove the web_socket_id, so that the client receives the real-time messages
|
||||
# when a field is created or deleted. These fields are not exposed to the user
|
||||
# when making the API call, so this informs the user about those changes.
|
||||
user = deepcopy(user)
|
||||
user.web_socket_id = None
|
||||
|
||||
# No need to do a permission check because that's handled in the FieldHandler
|
||||
# create and delete methods.
|
||||
|
||||
|
@ -482,6 +566,18 @@ class DataSyncHandler:
|
|||
|
||||
handler = FieldHandler()
|
||||
|
||||
for data_sync_property_instance in properties_to_be_removed:
|
||||
field = data_sync_property_instance.field
|
||||
data_sync_property_instance.delete()
|
||||
handler.delete_field(
|
||||
user=user,
|
||||
field=field,
|
||||
allow_deleting_primary=True,
|
||||
delete_strategy=DeleteFieldStrategyEnum.PERMANENTLY_DELETE,
|
||||
)
|
||||
|
||||
has_primary = data_sync.table.field_set.filter(primary=True).exists()
|
||||
|
||||
for data_sync_property in properties_to_be_added:
|
||||
baserow_field = data_sync_property.to_baserow_field()
|
||||
baserow_field_type = field_type_registry.get_by_model(baserow_field)
|
||||
|
@ -491,6 +587,9 @@ class DataSyncHandler:
|
|||
field_kwargs[
|
||||
"immutable_properties"
|
||||
] = data_sync_property.immutable_properties
|
||||
if data_sync_property.unique_primary and not has_primary:
|
||||
has_primary = True
|
||||
field_kwargs["primary"] = True
|
||||
# It could be that a field with the same name already exists. In that case,
|
||||
# we don't want to block the creation of the field, but rather find a name
|
||||
# that works.
|
||||
|
@ -538,12 +637,3 @@ class DataSyncHandler:
|
|||
"metadata",
|
||||
)
|
||||
)
|
||||
|
||||
for data_sync_property_instance in properties_to_be_removed:
|
||||
field = data_sync_property_instance.field
|
||||
data_sync_property_instance.delete()
|
||||
handler.delete_field(
|
||||
user=user,
|
||||
field=field,
|
||||
permanently_delete_field=True,
|
||||
)
|
||||
|
|
|
@ -66,6 +66,7 @@ class ICalCalendarDataSyncType(DataSyncType):
|
|||
model_class = ICalCalendarDataSync
|
||||
allowed_fields = ["ical_url"]
|
||||
serializer_field_names = ["ical_url"]
|
||||
request_serializer_field_names = ["ical_url"]
|
||||
|
||||
def get_properties(self, instance) -> List[DataSyncProperty]:
|
||||
return [
|
||||
|
|
|
@ -3,3 +3,14 @@ from baserow.contrib.database.table.operations import DatabaseTableOperationType
|
|||
|
||||
class SyncTableOperationType(DatabaseTableOperationType):
|
||||
type = "database.data_sync.sync_table"
|
||||
|
||||
|
||||
class ListPropertiesOperationType(DatabaseTableOperationType):
|
||||
type = "database.data_sync.list_properties"
|
||||
|
||||
|
||||
# If a user has permissions to this operation, then it will expose the saved properties,
|
||||
# which include data like PostgreSQL hosts, API tokens, etc. Only fields that are in
|
||||
# `public_fields` will be exposed.
|
||||
class GetIncludingPublicValuesOperationType(DatabaseTableOperationType):
|
||||
type = "database.data_sync.get"
|
||||
|
|
|
@ -130,7 +130,7 @@ class PostgreSQLDataSyncType(DataSyncType):
|
|||
"postgresql_table",
|
||||
"postgresql_sslmode",
|
||||
]
|
||||
serializer_field_names = [
|
||||
request_serializer_field_names = [
|
||||
"postgresql_host",
|
||||
"postgresql_username",
|
||||
"postgresql_password",
|
||||
|
@ -140,6 +140,17 @@ class PostgreSQLDataSyncType(DataSyncType):
|
|||
"postgresql_table",
|
||||
"postgresql_sslmode",
|
||||
]
|
||||
# The `postgresql_password` should not be included because it's a secret value that
|
||||
# must only be possible to set and not get.
|
||||
serializer_field_names = [
|
||||
"postgresql_host",
|
||||
"postgresql_username",
|
||||
"postgresql_port",
|
||||
"postgresql_database",
|
||||
"postgresql_schema",
|
||||
"postgresql_table",
|
||||
"postgresql_sslmode",
|
||||
]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _connection(self, instance):
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from enum import Enum
|
||||
|
||||
# Please keep in sync with the web-frontend version of this constant found in
|
||||
# web-frontend/modules/database/utils/constants.js
|
||||
RESERVED_BASEROW_FIELD_NAMES = {"id", "order"}
|
||||
|
@ -28,3 +30,14 @@ BASEROW_BOOLEAN_FIELD_FALSE_VALUES = [
|
|||
"unchecked",
|
||||
False
|
||||
]
|
||||
|
||||
|
||||
class DeleteFieldStrategyEnum(Enum):
|
||||
"""
|
||||
This enum value can be passed into the `FieldHandler::delete_field`
|
||||
`delete_strategy` argument.
|
||||
"""
|
||||
|
||||
TRASH = "TRASH" # default value that trashes the field.
|
||||
DELETE_OBJECT = "DELETE_OBJECT" # just deletes the object in the database.
|
||||
PERMANENTLY_DELETE = "PERMANENTLY_DELETE" # permanently deletes the object using the trash.
|
||||
|
|
|
@ -127,7 +127,11 @@ from baserow.core.user_files.exceptions import UserFileDoesNotExist
|
|||
from baserow.core.user_files.handler import UserFileHandler
|
||||
from baserow.core.utils import list_to_comma_separated_string
|
||||
|
||||
from .constants import BASEROW_BOOLEAN_FIELD_TRUE_VALUES, UPSERT_OPTION_DICT_KEY
|
||||
from .constants import (
|
||||
BASEROW_BOOLEAN_FIELD_TRUE_VALUES,
|
||||
UPSERT_OPTION_DICT_KEY,
|
||||
DeleteFieldStrategyEnum,
|
||||
)
|
||||
from .dependencies.exceptions import (
|
||||
CircularFieldDependencyError,
|
||||
SelfReferenceFieldDependencyError,
|
||||
|
@ -2734,7 +2738,7 @@ class LinkRowFieldType(ManyToManyFieldTypeSerializeToInputValueMixin, FieldType)
|
|||
field=from_field.link_row_related_field,
|
||||
# Prevent the deletion of from_field itself as normally both link row
|
||||
# fields are deleted together.
|
||||
permanently_delete_field=True,
|
||||
delete_strategy=DeleteFieldStrategyEnum.DELETE_OBJECT,
|
||||
)
|
||||
if to_instance:
|
||||
to_field.link_row_related_field = None
|
||||
|
|
|
@ -35,6 +35,7 @@ from baserow.contrib.database.db.sql_queries import (
|
|||
from baserow.contrib.database.fields.constants import (
|
||||
RESERVED_BASEROW_FIELD_NAMES,
|
||||
UPSERT_OPTION_DICT_KEY,
|
||||
DeleteFieldStrategyEnum,
|
||||
)
|
||||
from baserow.contrib.database.fields.field_converters import (
|
||||
MultipleSelectConversionConfig,
|
||||
|
@ -56,6 +57,7 @@ from baserow.core.models import TrashEntry
|
|||
from baserow.core.telemetry.utils import baserow_trace_methods
|
||||
from baserow.core.trash.exceptions import RelatedTableTrashedException
|
||||
from baserow.core.trash.handler import TrashHandler
|
||||
from baserow.core.trash.registries import trash_item_type_registry
|
||||
from baserow.core.utils import (
|
||||
ChildProgressBuilder,
|
||||
extract_allowed,
|
||||
|
@ -841,7 +843,7 @@ class FieldHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field_cache: Optional[FieldCache] = None,
|
||||
apply_and_send_updates: Optional[bool] = True,
|
||||
allow_deleting_primary: Optional[bool] = False,
|
||||
permanently_delete_field: Optional[bool] = False,
|
||||
delete_strategy: DeleteFieldStrategyEnum = DeleteFieldStrategyEnum.TRASH,
|
||||
) -> List[Field]:
|
||||
"""
|
||||
Deletes an existing field if it is not a primary field.
|
||||
|
@ -859,11 +861,9 @@ class FieldHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
updates being applied and any signals from being sent.
|
||||
:param allow_deleting_primary: Set to true if its OK for a primary field
|
||||
to be deleted.
|
||||
:param permanently_delete_field: If True, avoids to use
|
||||
the trash system and directly calls delete for the field metadata
|
||||
instead. In case we're not using the trash but just deleting the
|
||||
field, we also skip the deletion of other related fields defined by
|
||||
field_type.get_other_fields_to_trash_restore_always_together.
|
||||
:param delete_strategy: Indicates how to delete the field. By default it's
|
||||
trashed, but depending on the value is van also just delete the object, or
|
||||
permanently delete it immediately.
|
||||
:raises ValueError: When the provided field is not an instance of Field.
|
||||
:raises CannotDeletePrimaryField: When we try to delete the primary
|
||||
field which cannot be deleted.
|
||||
|
@ -909,7 +909,15 @@ class FieldHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
|
||||
FieldDependencyHandler.break_dependencies_delete_dependants(field)
|
||||
|
||||
if permanently_delete_field:
|
||||
if delete_strategy == DeleteFieldStrategyEnum.PERMANENTLY_DELETE:
|
||||
from baserow.contrib.database.trash.trash_types import (
|
||||
FieldTrashableItemType,
|
||||
)
|
||||
|
||||
trash_item_type_registry.get(
|
||||
FieldTrashableItemType.type
|
||||
).permanently_delete_item(field)
|
||||
elif delete_strategy == DeleteFieldStrategyEnum.DELETE_OBJECT:
|
||||
field.delete()
|
||||
else:
|
||||
existing_trash_entry = TrashHandler.trash(
|
||||
|
@ -926,7 +934,7 @@ class FieldHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
field, update_collector, field_cache, all_dependent_fields_grouped_by_depth
|
||||
)
|
||||
|
||||
if not permanently_delete_field:
|
||||
if delete_strategy == DeleteFieldStrategyEnum.TRASH:
|
||||
field_type = field_type_registry.get_by_model(field)
|
||||
related_fields_to_trash = [
|
||||
f
|
||||
|
|
|
@ -17,6 +17,7 @@ from baserow.contrib.database.data_sync.exceptions import (
|
|||
SyncDataSyncTableAlreadyRunning,
|
||||
SyncError,
|
||||
)
|
||||
from baserow.contrib.database.data_sync.handler import DataSyncHandler
|
||||
from baserow.contrib.database.data_sync.models import (
|
||||
DataSync,
|
||||
DataSyncSyncedProperty,
|
||||
|
@ -316,6 +317,214 @@ def test_can_undo_redo_create_data_sync(api_client, data_fixture):
|
|||
assert data_sync.table.trashed is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_data_sync_no_permissions(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
user_2, token_2 = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid", "dtstart", "dtend"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.patch(
|
||||
url,
|
||||
{
|
||||
"synced_properties": ["uid", "dtstart", "summary"],
|
||||
"ical_url": "http://localhost/ical.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token_2}",
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
response_json = response.json()
|
||||
assert response_json["error"] == "ERROR_USER_NOT_IN_GROUP"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_data_sync_invalid_synced_properties(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid", "dtstart", "dtend"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.patch(
|
||||
url,
|
||||
{
|
||||
"synced_properties": ["TEST"],
|
||||
"ical_url": "http://localhost/ical.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
assert len(response_json["synced_properties"]) == 1
|
||||
assert response_json["synced_properties"][0]["key"] == "uid"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_data_sync_not_existing_data_sync(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": 0})
|
||||
response = api_client.patch(
|
||||
url,
|
||||
{
|
||||
"synced_properties": ["TEST"],
|
||||
"ical_url": "http://localhost/ical.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
response_json = response.json()
|
||||
assert response_json["error"] == "ERROR_DATA_SYNC_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_data_sync_invalid_kwargs(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid", "dtstart", "dtend"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.patch(
|
||||
url,
|
||||
{
|
||||
"synced_properties": ["TEST"],
|
||||
"ical_url": "TEST",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
response_json = response.json()
|
||||
assert response_json["error"] == "ERROR_REQUEST_BODY_VALIDATION"
|
||||
assert response_json["detail"]["ical_url"][0]["code"] == "invalid"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_data_sync_not_providing_anything(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.patch(
|
||||
url,
|
||||
{},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json == {
|
||||
"id": data_sync.id,
|
||||
"type": "ical_calendar",
|
||||
"synced_properties": [
|
||||
{
|
||||
"field_id": data_sync.table.field_set.all().first().id,
|
||||
"key": "uid",
|
||||
"unique_primary": True,
|
||||
}
|
||||
],
|
||||
"last_sync": None,
|
||||
"last_error": None,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_data_sync(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid", "dtstart", "dtend"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.patch(
|
||||
url,
|
||||
{
|
||||
"synced_properties": ["uid", "dtstart", "summary"],
|
||||
"ical_url": "http://localhost/ics.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
properties = DataSyncSyncedProperty.objects.filter(data_sync=data_sync).order_by(
|
||||
"id"
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == {
|
||||
"id": data_sync.id,
|
||||
"type": "ical_calendar",
|
||||
"synced_properties": [
|
||||
{
|
||||
"field_id": properties[0].field_id,
|
||||
"key": "uid",
|
||||
"unique_primary": True,
|
||||
},
|
||||
{
|
||||
"field_id": properties[1].field_id,
|
||||
"key": "dtstart",
|
||||
"unique_primary": False,
|
||||
},
|
||||
{
|
||||
"field_id": properties[2].field_id,
|
||||
"key": "summary",
|
||||
"unique_primary": False,
|
||||
},
|
||||
],
|
||||
"last_sync": None,
|
||||
"last_error": None,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@responses.activate
|
||||
def test_async_sync_data_sync_table_invalid_data_sync(api_client, data_fixture):
|
||||
|
@ -770,3 +979,248 @@ def test_get_data_sync_properties(data_fixture, api_client):
|
|||
"field_type": "text",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_data_sync_properties_of_data_sync_unauthorized(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="test_1@test.nl", password="password", first_name="Test1"
|
||||
)
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
url = reverse("api:database:data_sync:list", kwargs={"database_id": database.id})
|
||||
response = api_client.post(
|
||||
url,
|
||||
{
|
||||
"table_name": "Test 1",
|
||||
"type": "ical_calendar",
|
||||
"synced_properties": ["uid", "dtstart", "dtend", "summary"],
|
||||
"ical_url": "https://baserow.io/ical.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
data_sync_id = response.json()["data_sync"]["id"]
|
||||
|
||||
url = reverse(
|
||||
"api:database:data_sync:properties_of_data_sync",
|
||||
kwargs={"data_sync_id": data_sync_id},
|
||||
)
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_data_sync_properties_of_data_sync_no_permissions(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="test_1@test.nl", password="password", first_name="Test1"
|
||||
)
|
||||
user_2, token_2 = data_fixture.create_user_and_token(
|
||||
email="test_2@test.nl", password="password", first_name="Test1"
|
||||
)
|
||||
database = data_fixture.create_database_application(user=user_2)
|
||||
|
||||
url = reverse("api:database:data_sync:list", kwargs={"database_id": database.id})
|
||||
response = api_client.post(
|
||||
url,
|
||||
{
|
||||
"table_name": "Test 1",
|
||||
"type": "ical_calendar",
|
||||
"synced_properties": ["uid", "dtstart", "dtend", "summary"],
|
||||
"ical_url": "https://baserow.io/ical.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token_2}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
data_sync_id = response.json()["data_sync"]["id"]
|
||||
|
||||
url = reverse(
|
||||
"api:database:data_sync:properties_of_data_sync",
|
||||
kwargs={"data_sync_id": data_sync_id},
|
||||
)
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.json()["error"] == "PERMISSION_DENIED"
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_data_sync_properties_of_data_sync_does_not_exist(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="test_1@test.nl", password="password", first_name="Test1"
|
||||
)
|
||||
|
||||
url = reverse(
|
||||
"api:database:data_sync:properties_of_data_sync", kwargs={"data_sync_id": 0}
|
||||
)
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_get_data_sync_properties_of_data_sync(data_fixture, api_client):
|
||||
responses.add(
|
||||
responses.GET,
|
||||
"https://baserow.io/ical.ics",
|
||||
status=200,
|
||||
body=ICAL_FEED_WITH_ONE_ITEMS,
|
||||
)
|
||||
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
url = reverse("api:database:data_sync:list", kwargs={"database_id": database.id})
|
||||
response = api_client.post(
|
||||
url,
|
||||
{
|
||||
"table_name": "Test 1",
|
||||
"type": "ical_calendar",
|
||||
"synced_properties": ["uid", "dtstart", "dtend", "summary"],
|
||||
"ical_url": "https://baserow.io/ical.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
data_sync_id = response.json()["data_sync"]["id"]
|
||||
|
||||
url = reverse(
|
||||
"api:database:data_sync:properties_of_data_sync",
|
||||
kwargs={"data_sync_id": data_sync_id},
|
||||
)
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json() == [
|
||||
{
|
||||
"unique_primary": True,
|
||||
"key": "uid",
|
||||
"name": "Unique ID",
|
||||
"field_type": "text",
|
||||
},
|
||||
{
|
||||
"unique_primary": False,
|
||||
"key": "dtstart",
|
||||
"name": "Start date",
|
||||
"field_type": "date",
|
||||
},
|
||||
{
|
||||
"unique_primary": False,
|
||||
"key": "dtend",
|
||||
"name": "End date",
|
||||
"field_type": "date",
|
||||
},
|
||||
{
|
||||
"unique_primary": False,
|
||||
"key": "summary",
|
||||
"name": "Summary",
|
||||
"field_type": "text",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_data_sync_not_found(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": 0})
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
response_json = response.json()
|
||||
assert response_json["error"] == "ERROR_DATA_SYNC_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_data_sync_no_permission(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
email="test_1@test.nl", password="password", first_name="Test1"
|
||||
)
|
||||
user_2, token_2 = data_fixture.create_user_and_token(
|
||||
email="test_2@test.nl", password="password", first_name="Test1"
|
||||
)
|
||||
database = data_fixture.create_database_application(user=user_2)
|
||||
|
||||
url = reverse("api:database:data_sync:list", kwargs={"database_id": database.id})
|
||||
response = api_client.post(
|
||||
url,
|
||||
{
|
||||
"table_name": "Test 1",
|
||||
"type": "ical_calendar",
|
||||
"synced_properties": ["uid", "dtstart", "dtend", "summary"],
|
||||
"ical_url": "https://baserow.io/ical.ics",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token_2}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
data_sync_id = response.json()["data_sync"]["id"]
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync_id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
response_json = response.json()
|
||||
assert response_json["error"] == "ERROR_USER_NOT_IN_GROUP"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_data_sync(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json == {
|
||||
"id": data_sync.id,
|
||||
"type": "ical_calendar",
|
||||
"synced_properties": [
|
||||
{
|
||||
"field_id": data_sync.table.field_set.all().first().id,
|
||||
"key": "uid",
|
||||
"unique_primary": True,
|
||||
}
|
||||
],
|
||||
"last_sync": None,
|
||||
"last_error": None,
|
||||
"ical_url": "https://baserow.io",
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ from datetime import datetime, timezone
|
|||
from unittest.mock import patch
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
|
||||
import pytest
|
||||
import responses
|
||||
|
@ -25,10 +26,11 @@ from baserow.contrib.database.data_sync.models import (
|
|||
from baserow.contrib.database.data_sync.registries import DataSyncTypeRegistry
|
||||
from baserow.contrib.database.fields.exceptions import CannotDeletePrimaryField
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import LongTextField, TextField
|
||||
from baserow.contrib.database.fields.models import Field, LongTextField, TextField
|
||||
from baserow.contrib.database.views.models import GridView
|
||||
from baserow.core.db import specific_iterator
|
||||
from baserow.core.exceptions import UserNotInWorkspace
|
||||
from baserow.core.models import TrashEntry
|
||||
|
||||
ICAL_FEED_WITH_ONE_ITEMS = """BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
|
@ -442,6 +444,98 @@ def test_create_data_sync_table_automatically_add_unique_properties(
|
|||
assert fields[1].primary is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_data_sync_table_without_permissions(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
user_2 = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid", "dtstart", "dtend"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
with pytest.raises(UserNotInWorkspace):
|
||||
DataSyncHandler().update_data_sync_table(
|
||||
user=user_2,
|
||||
data_sync=data_sync,
|
||||
synced_properties=["uid", "dtstart", "summary"],
|
||||
ical_url="http://localhost/ics.ics",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.contrib.database.table.signals.table_updated.send")
|
||||
def test_update_data_sync_table(send_mock, data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid", "dtstart", "dtend"],
|
||||
ical_url="https://baserow.io",
|
||||
)
|
||||
|
||||
data_sync = handler.update_data_sync_table(
|
||||
user=user,
|
||||
data_sync=data_sync,
|
||||
synced_properties=["uid", "dtstart", "summary"],
|
||||
ical_url="http://localhost/ics.ics",
|
||||
)
|
||||
|
||||
assert isinstance(data_sync, ICalCalendarDataSync)
|
||||
assert data_sync.id
|
||||
assert data_sync.table.name == "Test"
|
||||
assert data_sync.table.database_id == database.id
|
||||
assert data_sync.ical_url == "http://localhost/ics.ics"
|
||||
|
||||
fields = specific_iterator(data_sync.table.field_set.all().order_by("id"))
|
||||
assert len(fields) == 3
|
||||
assert fields[0].name == "Unique ID"
|
||||
assert isinstance(fields[0], TextField)
|
||||
assert fields[0].primary is True
|
||||
assert fields[0].read_only is True
|
||||
assert fields[0].immutable_type is True
|
||||
assert fields[0].immutable_properties is True
|
||||
assert fields[1].name == "Start date"
|
||||
assert fields[1].primary is False
|
||||
assert fields[1].date_format == "ISO"
|
||||
assert fields[1].date_include_time is True
|
||||
assert fields[1].date_time_format == "24"
|
||||
assert fields[1].date_show_tzinfo is True
|
||||
assert fields[1].read_only is True
|
||||
assert fields[1].immutable_properties is False
|
||||
assert fields[2].name == "Summary"
|
||||
assert fields[2].primary is False
|
||||
assert fields[2].read_only is True
|
||||
|
||||
properties = DataSyncSyncedProperty.objects.filter(data_sync=data_sync).order_by(
|
||||
"id"
|
||||
)
|
||||
assert len(properties) == 3
|
||||
assert properties[0].key == "uid"
|
||||
assert properties[0].field_id == fields[0].id
|
||||
assert properties[1].key == "dtstart"
|
||||
assert properties[1].field_id == fields[1].id
|
||||
assert properties[2].key == "summary"
|
||||
assert properties[2].field_id == fields[2].id
|
||||
|
||||
send_mock.assert_called_once()
|
||||
assert send_mock.call_args[1]["table"].id == data_sync.table_id
|
||||
assert send_mock.call_args[1]["user"] == user
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_sync_data_sync_table_create_update_delete_row(data_fixture):
|
||||
|
@ -1433,6 +1527,58 @@ def test_set_data_sync_synced_properties(data_fixture):
|
|||
assert properties[2].field_id == fields[2].id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_set_data_sync_synced_properties_correctly_removing_field(data_fixture):
|
||||
responses.add(
|
||||
responses.GET,
|
||||
"https://baserow.io/ical.ics",
|
||||
status=200,
|
||||
body=ICAL_FEED_WITH_TWO_ITEMS,
|
||||
)
|
||||
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="ical_calendar",
|
||||
synced_properties=["uid", "dtstart"],
|
||||
ical_url="https://baserow.io/ical.ics",
|
||||
)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"""
|
||||
SELECT COUNT(*) FROM information_schema.columns
|
||||
WHERE table_name = 'database_table_{data_sync.table_id}'"""
|
||||
)
|
||||
before_number_of_columns = cursor.fetchone()[0]
|
||||
|
||||
handler.set_data_sync_synced_properties(
|
||||
user=user,
|
||||
data_sync=data_sync,
|
||||
synced_properties=["uid"],
|
||||
)
|
||||
|
||||
assert Field.objects_and_trash.filter(table=data_sync.table).count() == 1
|
||||
assert TrashEntry.objects.all().count() == 0
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"""
|
||||
SELECT COUNT(*) FROM information_schema.columns
|
||||
WHERE table_name = 'database_table_{data_sync.table_id}'"""
|
||||
)
|
||||
after_number_of_columns = cursor.fetchone()[0]
|
||||
|
||||
assert after_number_of_columns == before_number_of_columns - 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_delete_sync_data_sync_table(data_fixture):
|
||||
|
|
|
@ -16,6 +16,7 @@ from baserow.contrib.database.data_sync.models import PostgreSQLDataSync
|
|||
from baserow.contrib.database.data_sync.postgresql_data_sync_type import (
|
||||
TextPostgreSQLSyncProperty,
|
||||
)
|
||||
from baserow.contrib.database.fields.handler import FieldHandler
|
||||
from baserow.contrib.database.fields.models import NumberField
|
||||
from baserow.core.db import specific_iterator
|
||||
|
||||
|
@ -203,8 +204,6 @@ def test_sync_postgresql_data_sync(data_fixture, create_postgresql_test_table):
|
|||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
fields = specific_iterator(data_sync.table.field_set.all().order_by("id"))
|
||||
for f in fields:
|
||||
print(f.name)
|
||||
id_field = fields[0]
|
||||
text_field = fields[1]
|
||||
char_field = fields[2]
|
||||
|
@ -617,3 +616,204 @@ def test_postgresql_data_sync_initial_table_limit(
|
|||
|
||||
assert data_sync.last_sync is None
|
||||
assert data_sync.last_error == "The table can't contain more than 1 records."
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_get_data_sync(data_fixture, api_client, create_postgresql_test_table):
|
||||
default_database = settings.DATABASES["default"]
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="postgresql",
|
||||
synced_properties=["id"],
|
||||
postgresql_host=default_database["HOST"],
|
||||
postgresql_username=default_database["USER"],
|
||||
postgresql_password=default_database["PASSWORD"],
|
||||
postgresql_port=default_database["PORT"],
|
||||
postgresql_database=default_database["NAME"],
|
||||
postgresql_table=create_postgresql_test_table,
|
||||
postgresql_sslmode=default_database["OPTIONS"].get("sslmode", "prefer"),
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert "postgresql_password" not in response_json
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_update_data_sync_table_changing_primary_key(
|
||||
data_fixture, create_postgresql_test_table
|
||||
):
|
||||
default_database = settings.DATABASES["default"]
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="postgresql",
|
||||
synced_properties=["id"],
|
||||
postgresql_host=default_database["HOST"],
|
||||
postgresql_username=default_database["USER"],
|
||||
postgresql_password=default_database["PASSWORD"],
|
||||
postgresql_port=default_database["PORT"],
|
||||
postgresql_database=default_database["NAME"],
|
||||
postgresql_table=create_postgresql_test_table,
|
||||
postgresql_sslmode=default_database["OPTIONS"].get("sslmode", "prefer"),
|
||||
)
|
||||
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"""
|
||||
ALTER TABLE {create_postgresql_test_table}
|
||||
RENAME COLUMN id TO new_id;
|
||||
"""
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
fields = specific_iterator(data_sync.table.field_set.all().order_by("id"))
|
||||
assert len(fields) == 1
|
||||
assert fields[0].name == "new_id"
|
||||
assert fields[0].primary is True
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_update_data_sync_table_changing_primary_key_with_different_primary_field(
|
||||
data_fixture, create_postgresql_test_table
|
||||
):
|
||||
default_database = settings.DATABASES["default"]
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="postgresql",
|
||||
synced_properties=["id", "text_col"],
|
||||
postgresql_host=default_database["HOST"],
|
||||
postgresql_username=default_database["USER"],
|
||||
postgresql_password=default_database["PASSWORD"],
|
||||
postgresql_port=default_database["PORT"],
|
||||
postgresql_database=default_database["NAME"],
|
||||
postgresql_table=create_postgresql_test_table,
|
||||
postgresql_sslmode=default_database["OPTIONS"].get("sslmode", "prefer"),
|
||||
)
|
||||
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
fields = specific_iterator(data_sync.table.field_set.all().order_by("id"))
|
||||
|
||||
with transaction.atomic():
|
||||
FieldHandler().change_primary_field(
|
||||
user=user, table=data_sync.table, new_primary_field=fields[1]
|
||||
)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"""
|
||||
ALTER TABLE {create_postgresql_test_table}
|
||||
RENAME COLUMN id TO new_id;
|
||||
"""
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
fields = specific_iterator(data_sync.table.field_set.all().order_by("id"))
|
||||
assert len(fields) == 2
|
||||
assert fields[0].name == "text_col"
|
||||
assert fields[0].primary is True
|
||||
assert fields[1].name == "new_id"
|
||||
assert fields[1].primary is False
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_update_data_sync_table_changing_table_with_different_primary_key(
|
||||
data_fixture, create_postgresql_test_table
|
||||
):
|
||||
default_database = settings.DATABASES["default"]
|
||||
user = data_fixture.create_user()
|
||||
database = data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="postgresql",
|
||||
synced_properties=["id", "text_col"],
|
||||
postgresql_host=default_database["HOST"],
|
||||
postgresql_username=default_database["USER"],
|
||||
postgresql_password=default_database["PASSWORD"],
|
||||
postgresql_port=default_database["PORT"],
|
||||
postgresql_database=default_database["NAME"],
|
||||
postgresql_table=create_postgresql_test_table,
|
||||
postgresql_sslmode=default_database["OPTIONS"].get("sslmode", "prefer"),
|
||||
)
|
||||
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"""
|
||||
CREATE TABLE {create_postgresql_test_table}_2 (
|
||||
car_id SERIAL PRIMARY KEY,
|
||||
make VARCHAR(50) NOT NULL,
|
||||
model VARCHAR(50) NOT NULL
|
||||
);
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
f"""
|
||||
INSERT INTO {create_postgresql_test_table}_2
|
||||
(make, model)
|
||||
VALUES
|
||||
('make 1', 'model 2'),
|
||||
('make 2', 'model 2'),
|
||||
('make 3', 'model 3')
|
||||
"""
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
data_sync = handler.update_data_sync_table(
|
||||
user=user,
|
||||
data_sync=data_sync,
|
||||
synced_properties=["car_id"],
|
||||
postgresql_table=f"{create_postgresql_test_table}_2",
|
||||
)
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
fields = specific_iterator(data_sync.table.field_set.all().order_by("id"))
|
||||
assert len(fields) == 1
|
||||
assert fields[0].name == "car_id"
|
||||
assert fields[0].primary is True
|
||||
|
||||
model = data_sync.table.get_model()
|
||||
rows = model.objects.all()
|
||||
assert len(rows) == 3
|
||||
|
||||
assert getattr(rows[0], f"field_{fields[0].id}") == Decimal("1")
|
||||
assert getattr(rows[1], f"field_{fields[0].id}") == Decimal("2")
|
||||
assert getattr(rows[2], f"field_{fields[0].id}") == Decimal("3")
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "feature",
|
||||
"message": "Introduced ability to configure exising data sync.",
|
||||
"issue_number": 3072,
|
||||
"bullet_points": [],
|
||||
"created_at": "2024-11-18"
|
||||
}
|
|
@ -275,7 +275,8 @@ class LocalBaserowTableDataSyncType(DataSyncType):
|
|||
def get_properties(self, instance) -> List[DataSyncProperty]:
|
||||
table = self._get_table(instance)
|
||||
# The `table_id` is not set if when just listing the properties using the
|
||||
# `DataSyncPropertiesView` endpoint, but it will be set when creating the view.
|
||||
# `DataSyncTypePropertiesView` endpoint, but it will be set when creating the
|
||||
# view.
|
||||
if instance.table_id:
|
||||
LicenseHandler.raise_if_workspace_doesnt_have_feature(
|
||||
DATA_SYNC, instance.table.database.workspace
|
||||
|
|
|
@ -157,11 +157,17 @@ class GitHubIssuesDataSyncType(DataSyncType):
|
|||
"github_issues_repo",
|
||||
"github_issues_api_token",
|
||||
]
|
||||
serializer_field_names = [
|
||||
request_serializer_field_names = [
|
||||
"github_issues_owner",
|
||||
"github_issues_repo",
|
||||
"github_issues_api_token",
|
||||
]
|
||||
# The `github_issues_api_token` should not be included because it's a secret value
|
||||
# that must only be possible to set and not get.
|
||||
serializer_field_names = [
|
||||
"github_issues_owner",
|
||||
"github_issues_repo",
|
||||
]
|
||||
|
||||
def prepare_sync_job_values(self, instance):
|
||||
# Raise the error so that the job doesn't start and the user is informed with
|
||||
|
@ -172,7 +178,8 @@ class GitHubIssuesDataSyncType(DataSyncType):
|
|||
|
||||
def get_properties(self, instance) -> List[DataSyncProperty]:
|
||||
# The `table_id` is not set if when just listing the properties using the
|
||||
# `DataSyncPropertiesView` endpoint, but it will be set when creating the view.
|
||||
# `DataSyncTypePropertiesView` endpoint, but it will be set when creating the
|
||||
# view.
|
||||
if instance.table_id:
|
||||
LicenseHandler.raise_if_workspace_doesnt_have_feature(
|
||||
DATA_SYNC, instance.table.database.workspace
|
||||
|
|
|
@ -202,11 +202,17 @@ class GitLabIssuesDataSyncType(DataSyncType):
|
|||
"gitlab_project_id",
|
||||
"gitlab_access_token",
|
||||
]
|
||||
serializer_field_names = [
|
||||
request_serializer_field_names = [
|
||||
"gitlab_url",
|
||||
"gitlab_project_id",
|
||||
"gitlab_access_token",
|
||||
]
|
||||
# The `gitlab_access_token` should not be included because it's a secret value
|
||||
# that must only be possible to set and not get.
|
||||
serializer_field_names = [
|
||||
"gitlab_url",
|
||||
"gitlab_project_id",
|
||||
]
|
||||
|
||||
def prepare_sync_job_values(self, instance):
|
||||
# Raise the error so that the job doesn't start and the user is informed with
|
||||
|
@ -217,7 +223,8 @@ class GitLabIssuesDataSyncType(DataSyncType):
|
|||
|
||||
def get_properties(self, instance) -> List[DataSyncProperty]:
|
||||
# The `table_id` is not set if when just listing the properties using the
|
||||
# `DataSyncPropertiesView` endpoint, but it will be set when creating the view.
|
||||
# `DataSyncTypePropertiesView` endpoint, but it will be set when creating the
|
||||
# view.
|
||||
if instance.table_id:
|
||||
LicenseHandler.raise_if_workspace_doesnt_have_feature(
|
||||
DATA_SYNC, instance.table.database.workspace
|
||||
|
|
|
@ -156,12 +156,19 @@ class JiraIssuesDataSyncType(DataSyncType):
|
|||
type = "jira_issues"
|
||||
model_class = JiraIssuesDataSync
|
||||
allowed_fields = ["jira_url", "jira_project_key", "jira_username", "jira_api_token"]
|
||||
serializer_field_names = [
|
||||
request_serializer_field_names = [
|
||||
"jira_url",
|
||||
"jira_project_key",
|
||||
"jira_username",
|
||||
"jira_api_token",
|
||||
]
|
||||
# The `jira_api_token` should not be included because it's a secret value that must
|
||||
# only be possible to set and not get.
|
||||
serializer_field_names = [
|
||||
"jira_url",
|
||||
"jira_project_key",
|
||||
"jira_username",
|
||||
]
|
||||
|
||||
def prepare_sync_job_values(self, instance):
|
||||
# Raise the error so that the job doesn't start and the user is informed with
|
||||
|
@ -172,7 +179,8 @@ class JiraIssuesDataSyncType(DataSyncType):
|
|||
|
||||
def get_properties(self, instance) -> List[DataSyncProperty]:
|
||||
# The `table_id` is not set if when just listing the properties using the
|
||||
# `DataSyncPropertiesView` endpoint, but it will be set when creating the view.
|
||||
# `DataSyncTypePropertiesView` endpoint, but it will be set when creating the
|
||||
# view.
|
||||
if instance.table_id:
|
||||
LicenseHandler.raise_if_workspace_doesnt_have_feature(
|
||||
DATA_SYNC, instance.table.database.workspace
|
||||
|
|
|
@ -60,7 +60,11 @@ from baserow.contrib.builder.workflow_actions.operations import (
|
|||
from baserow.contrib.database.airtable.operations import (
|
||||
RunAirtableImportJobOperationType,
|
||||
)
|
||||
from baserow.contrib.database.data_sync.operations import SyncTableOperationType
|
||||
from baserow.contrib.database.data_sync.operations import (
|
||||
GetIncludingPublicValuesOperationType,
|
||||
ListPropertiesOperationType,
|
||||
SyncTableOperationType,
|
||||
)
|
||||
from baserow.contrib.database.export.operations import ExportTableOperationType
|
||||
from baserow.contrib.database.fields.operations import (
|
||||
CreateFieldOperationType,
|
||||
|
@ -433,6 +437,8 @@ default_roles[BUILDER_ROLE_UID].extend(
|
|||
UpdateBuilderWorkflowActionOperationType,
|
||||
OrderBuilderWorkflowActionOperationType,
|
||||
SyncTableOperationType,
|
||||
ListPropertiesOperationType,
|
||||
GetIncludingPublicValuesOperationType,
|
||||
]
|
||||
)
|
||||
default_roles[ADMIN_ROLE_UID].extend(
|
||||
|
|
|
@ -702,3 +702,48 @@ def test_async_sync_data_sync_table_without_license(
|
|||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_402_PAYMENT_REQUIRED
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_get_data_sync(enterprise_data_fixture, api_client):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
user, token = enterprise_data_fixture.create_user_and_token()
|
||||
database = enterprise_data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="github_issues",
|
||||
synced_properties=["id"],
|
||||
github_issues_owner="baserow_owner",
|
||||
github_issues_repo="baserow_repo",
|
||||
github_issues_api_token="test",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json == {
|
||||
"id": data_sync.id,
|
||||
"type": "github_issues",
|
||||
"synced_properties": [
|
||||
{
|
||||
"field_id": data_sync.table.field_set.all().first().id,
|
||||
"key": "id",
|
||||
"unique_primary": True,
|
||||
}
|
||||
],
|
||||
"last_sync": None,
|
||||
"last_error": None,
|
||||
# The `github_issues_api_token` should not be in here.
|
||||
"github_issues_owner": "baserow_owner",
|
||||
"github_issues_repo": "baserow_repo",
|
||||
}
|
||||
|
|
|
@ -773,3 +773,48 @@ def test_async_sync_data_sync_table_without_license(
|
|||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_402_PAYMENT_REQUIRED
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_get_data_sync(enterprise_data_fixture, api_client):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
user, token = enterprise_data_fixture.create_user_and_token()
|
||||
database = enterprise_data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="gitlab_issues",
|
||||
synced_properties=["id"],
|
||||
gitlab_url="https://gitlab.com",
|
||||
gitlab_project_id="1",
|
||||
gitlab_access_token="test",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json == {
|
||||
"id": data_sync.id,
|
||||
"type": "gitlab_issues",
|
||||
"synced_properties": [
|
||||
{
|
||||
"field_id": data_sync.table.field_set.all().first().id,
|
||||
"key": "id",
|
||||
"unique_primary": True,
|
||||
}
|
||||
],
|
||||
"last_sync": None,
|
||||
"last_error": None,
|
||||
# The `gitlab_access_token` should not be in here.
|
||||
"gitlab_url": "https://gitlab.com",
|
||||
"gitlab_project_id": "1",
|
||||
}
|
||||
|
|
|
@ -1092,3 +1092,50 @@ def test_async_sync_data_sync_table_without_license(
|
|||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_402_PAYMENT_REQUIRED
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_get_data_sync(enterprise_data_fixture, api_client):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
user, token = enterprise_data_fixture.create_user_and_token()
|
||||
database = enterprise_data_fixture.create_database_application(user=user)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="jira_issues",
|
||||
synced_properties=["jira_id"],
|
||||
jira_url="https://test.atlassian.net",
|
||||
jira_project_key="",
|
||||
jira_username="test@test.nl",
|
||||
jira_api_token="test_token",
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.get(
|
||||
url,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response_json == {
|
||||
"id": data_sync.id,
|
||||
"type": "jira_issues",
|
||||
"synced_properties": [
|
||||
{
|
||||
"field_id": data_sync.table.field_set.all().first().id,
|
||||
"key": "jira_id",
|
||||
"unique_primary": True,
|
||||
}
|
||||
],
|
||||
"last_sync": None,
|
||||
"last_error": None,
|
||||
# The `jira_api_token` should not be in here.
|
||||
"jira_url": "https://test.atlassian.net",
|
||||
"jira_project_key": "",
|
||||
"jira_username": "test@test.nl",
|
||||
}
|
||||
|
|
|
@ -1216,3 +1216,52 @@ def test_sync_data_sync_table_single_select_get_metadata_delete(
|
|||
assert metadata == {
|
||||
"select_options_mapping": {str(source_option_b.id): target_select_options[0].id}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_change_source_table_with_changing_synced_fields(
|
||||
enterprise_data_fixture, api_client
|
||||
):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
user, token = enterprise_data_fixture.create_user_and_token()
|
||||
database = enterprise_data_fixture.create_database_application(user=user)
|
||||
|
||||
source_table = enterprise_data_fixture.create_database_table(
|
||||
user=user, name="Source", database=database
|
||||
)
|
||||
source_text_field = enterprise_data_fixture.create_text_field(
|
||||
table=source_table, name="Text"
|
||||
)
|
||||
|
||||
source_2_table = enterprise_data_fixture.create_database_table(
|
||||
user=user, name="Source 2", database=database
|
||||
)
|
||||
source_2_text_field = enterprise_data_fixture.create_text_field(
|
||||
table=source_2_table, name="Text"
|
||||
)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="local_baserow_table",
|
||||
synced_properties=["id", f"field_{source_text_field.id}"],
|
||||
source_table_id=source_table.id,
|
||||
)
|
||||
|
||||
url = reverse("api:database:data_sync:item", kwargs={"data_sync_id": data_sync.id})
|
||||
response = api_client.patch(
|
||||
url,
|
||||
{
|
||||
"source_table_id": source_2_table.id,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
|
||||
# Expect the other field to be removed.
|
||||
assert len(response_json["synced_properties"]) == 1
|
||||
assert response_json["synced_properties"][0]["key"] == "id"
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
<FormInput
|
||||
v-model="values.github_issues_owner"
|
||||
:error="fieldHasErrors('github_issues_owner')"
|
||||
:disabled="disabled"
|
||||
size="large"
|
||||
@blur="$v.values.github_issues_owner.$touch()"
|
||||
/>
|
||||
|
@ -37,6 +38,7 @@
|
|||
<FormInput
|
||||
v-model="values.github_issues_repo"
|
||||
:error="fieldHasErrors('github_issues_repo')"
|
||||
:disabled="disabled"
|
||||
size="large"
|
||||
@blur="$v.values.github_issues_repo.$touch()"
|
||||
/>
|
||||
|
@ -58,10 +60,22 @@
|
|||
required
|
||||
:helper-text="$t('githubIssuesDataSync.apiTokenHelper')"
|
||||
small-label
|
||||
:protected-edit="update"
|
||||
@enabled-protected-edit="allowedValues.push('github_issues_api_token')"
|
||||
@disable-protected-edit="
|
||||
;[
|
||||
allowedValues.splice(
|
||||
allowedValues.indexOf('github_issues_api_token'),
|
||||
1
|
||||
),
|
||||
delete values['github_issues_api_token'],
|
||||
]
|
||||
"
|
||||
>
|
||||
<FormInput
|
||||
v-model="values.github_issues_api_token"
|
||||
:error="fieldHasErrors('github_issues_api_token')"
|
||||
:disabled="disabled"
|
||||
size="large"
|
||||
@blur="$v.values.github_issues_api_token.$touch()"
|
||||
/>
|
||||
|
@ -80,32 +94,49 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import { required } from 'vuelidate/lib/validators'
|
||||
import { required, requiredIf } from 'vuelidate/lib/validators'
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
|
||||
export default {
|
||||
name: 'GitHubIssuesDataSyncForm',
|
||||
mixins: [form],
|
||||
props: {
|
||||
update: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
const allowedValues = ['github_issues_owner', 'github_issues_repo']
|
||||
if (!this.update) {
|
||||
allowedValues.push('github_issues_api_token')
|
||||
}
|
||||
return {
|
||||
allowedValues: [
|
||||
'github_issues_owner',
|
||||
'github_issues_repo',
|
||||
'github_issues_api_token',
|
||||
],
|
||||
allowedValues: ['github_issues_owner', 'github_issues_repo'],
|
||||
values: {
|
||||
github_issues_owner: '',
|
||||
github_issues_repo: '',
|
||||
github_issues_api_token: '',
|
||||
},
|
||||
}
|
||||
},
|
||||
validations: {
|
||||
values: {
|
||||
github_issues_owner: { required },
|
||||
github_issues_repo: { required },
|
||||
github_issues_api_token: { required },
|
||||
},
|
||||
validations() {
|
||||
return {
|
||||
values: {
|
||||
github_issues_owner: { required },
|
||||
github_issues_repo: { required },
|
||||
github_issues_api_token: {
|
||||
required: requiredIf(() => {
|
||||
return this.allowedValues.includes('github_issues_api_token')
|
||||
}),
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
v-model="values.gitlab_url"
|
||||
size="large"
|
||||
:error="fieldHasErrors('gitlab_url')"
|
||||
:disabled="disabled"
|
||||
@focus.once="$event.target.select()"
|
||||
@blur="$v.values.gitlab_url.$touch()"
|
||||
/>
|
||||
|
@ -41,6 +42,7 @@
|
|||
<FormInput
|
||||
v-model="values.gitlab_project_id"
|
||||
:error="fieldHasErrors('gitlab_project_id')"
|
||||
:disabled="disabled"
|
||||
size="large"
|
||||
@blur="$v.values.gitlab_project_id.$touch()"
|
||||
/>
|
||||
|
@ -63,10 +65,19 @@
|
|||
class="margin-bottom-2"
|
||||
:helper-text="$t('gitlabIssuesDataSync.accessTokenHelper')"
|
||||
small-label
|
||||
:protected-edit="update"
|
||||
@enabled-protected-edit="allowedValues.push('gitlab_access_token')"
|
||||
@disable-protected-edit="
|
||||
;[
|
||||
allowedValues.splice(allowedValues.indexOf('gitlab_access_token'), 1),
|
||||
delete values['gitlab_access_token'],
|
||||
]
|
||||
"
|
||||
>
|
||||
<FormInput
|
||||
v-model="values.gitlab_access_token"
|
||||
:error="fieldHasErrors('gitlab_access_token')"
|
||||
:disabled="disabled"
|
||||
size="large"
|
||||
@blur="$v.values.gitlab_access_token.$touch()"
|
||||
/>
|
||||
|
@ -85,28 +96,49 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import { required, url } from 'vuelidate/lib/validators'
|
||||
import { required, requiredIf, url } from 'vuelidate/lib/validators'
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
|
||||
export default {
|
||||
name: 'GitLabIssuesDataSyncForm',
|
||||
mixins: [form],
|
||||
props: {
|
||||
update: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
const allowedValues = ['gitlab_url', 'gitlab_project_id']
|
||||
if (!this.update) {
|
||||
allowedValues.push('gitlab_access_token')
|
||||
}
|
||||
return {
|
||||
allowedValues: ['gitlab_url', 'gitlab_project_id', 'gitlab_access_token'],
|
||||
allowedValues,
|
||||
values: {
|
||||
gitlab_url: 'https://gitlab.com',
|
||||
gitlab_project_id: '',
|
||||
gitlab_access_token: '',
|
||||
},
|
||||
}
|
||||
},
|
||||
validations: {
|
||||
values: {
|
||||
gitlab_url: { required, url },
|
||||
gitlab_project_id: { required },
|
||||
gitlab_access_token: { required },
|
||||
},
|
||||
validations() {
|
||||
return {
|
||||
values: {
|
||||
gitlab_url: { required, url },
|
||||
gitlab_project_id: { required },
|
||||
gitlab_access_token: {
|
||||
required: requiredIf(() => {
|
||||
return this.allowedValues.includes('gitlab_access_token')
|
||||
}),
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
v-model="values.jira_url"
|
||||
size="large"
|
||||
:error="fieldHasErrors('jira_url')"
|
||||
:disabled="disabled"
|
||||
@focus.once="$event.target.select()"
|
||||
@blur="$v.values.jira_url.$touch()"
|
||||
/>
|
||||
|
@ -39,6 +40,7 @@
|
|||
v-model="values.jira_username"
|
||||
size="large"
|
||||
:error="fieldHasErrors('jira_username')"
|
||||
:disabled="disabled"
|
||||
@focus.once="$event.target.select()"
|
||||
@blur="$v.values.jira_username.$touch()"
|
||||
/>
|
||||
|
@ -59,6 +61,14 @@
|
|||
required
|
||||
small-label
|
||||
class="margin-bottom-2"
|
||||
:protected-edit="update"
|
||||
@enabled-protected-edit="allowedValues.push('jira_api_token')"
|
||||
@disable-protected-edit="
|
||||
;[
|
||||
allowedValues.splice(allowedValues.indexOf('jira_api_token'), 1),
|
||||
delete values['jira_api_token'],
|
||||
]
|
||||
"
|
||||
>
|
||||
<template #label>{{ $t('jiraIssuesDataSync.apiToken') }}</template>
|
||||
<FormInput
|
||||
|
@ -66,6 +76,7 @@
|
|||
v-model="values.jira_api_token"
|
||||
size="large"
|
||||
:error="fieldHasErrors('jira_api_token')"
|
||||
:disabled="disabled"
|
||||
@focus.once="$event.target.select()"
|
||||
@blur="$v.values.jira_api_token.$touch()"
|
||||
/>
|
||||
|
@ -83,7 +94,6 @@
|
|||
|
||||
<FormGroup
|
||||
:helper-text="$t('jiraIssuesDataSync.projectKeyHelper')"
|
||||
required
|
||||
small-label
|
||||
>
|
||||
<template #label>{{ $t('jiraIssuesDataSync.projectKey') }}</template>
|
||||
|
@ -91,6 +101,7 @@
|
|||
ref="jira_project_key"
|
||||
v-model="values.jira_project_key"
|
||||
size="large"
|
||||
:disabled="disabled"
|
||||
@focus.once="$event.target.select()"
|
||||
/>
|
||||
</FormGroup>
|
||||
|
@ -98,34 +109,50 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import { required, url } from 'vuelidate/lib/validators'
|
||||
import { required, url, requiredIf } from 'vuelidate/lib/validators'
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
|
||||
export default {
|
||||
name: 'JiraIssuesDataSyncForm',
|
||||
mixins: [form],
|
||||
props: {
|
||||
update: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
const allowedValues = ['jira_url', 'jira_username', 'jira_project_key']
|
||||
if (!this.update) {
|
||||
allowedValues.push('jira_api_token')
|
||||
}
|
||||
return {
|
||||
allowedValues: [
|
||||
'jira_url',
|
||||
'jira_username',
|
||||
'jira_api_token',
|
||||
'jira_project_key',
|
||||
],
|
||||
allowedValues,
|
||||
values: {
|
||||
jira_url: '',
|
||||
jira_username: '',
|
||||
jira_api_token: '',
|
||||
jira_project_key: '',
|
||||
},
|
||||
}
|
||||
},
|
||||
validations: {
|
||||
values: {
|
||||
jira_url: { required, url },
|
||||
jira_username: { required },
|
||||
jira_api_token: { required },
|
||||
},
|
||||
validations() {
|
||||
return {
|
||||
values: {
|
||||
jira_url: { required, url },
|
||||
jira_username: { required },
|
||||
jira_api_token: {
|
||||
required: requiredIf(() => {
|
||||
return this.allowedValues.includes('jira_api_token')
|
||||
}),
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -15,7 +15,11 @@
|
|||
:label="$t('localBaserowTableDataSync.workspace')"
|
||||
required
|
||||
>
|
||||
<Dropdown :value="selectedWorkspaceId" @input="workspaceChanged">
|
||||
<Dropdown
|
||||
:value="selectedWorkspaceId"
|
||||
:disabled="disabled"
|
||||
@input="workspaceChanged"
|
||||
>
|
||||
<DropdownItem
|
||||
v-for="workspace in workspaces"
|
||||
:key="workspace.id"
|
||||
|
@ -31,7 +35,11 @@
|
|||
:label="$t('localBaserowTableDataSync.database')"
|
||||
required
|
||||
>
|
||||
<Dropdown :value="selectedDatabaseId" @input="databaseChanged">
|
||||
<Dropdown
|
||||
:value="selectedDatabaseId"
|
||||
:disabled="disabled"
|
||||
@input="databaseChanged"
|
||||
>
|
||||
<DropdownItem
|
||||
v-for="database in databases"
|
||||
:key="database.id"
|
||||
|
@ -51,6 +59,7 @@
|
|||
<Dropdown
|
||||
v-model="values.source_table_id"
|
||||
:error="fieldHasErrors('source_table_id')"
|
||||
:disabled="disabled"
|
||||
@input="$v.values.source_table_id.$touch()"
|
||||
>
|
||||
<DropdownItem
|
||||
|
@ -86,6 +95,18 @@ import { DatabaseApplicationType } from '@baserow/modules/database/applicationTy
|
|||
export default {
|
||||
name: 'LocalBaserowTableDataSync',
|
||||
mixins: [form],
|
||||
props: {
|
||||
update: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
allowedValues: ['source_table_id'],
|
||||
|
@ -129,6 +150,28 @@ export default {
|
|||
userName: 'auth/getName',
|
||||
}),
|
||||
},
|
||||
mounted() {
|
||||
// If the source table id is set, the database and workspace ID must be selected
|
||||
// in the dropdown.
|
||||
if (this.values.source_table_id) {
|
||||
const databaseType = DatabaseApplicationType.getType()
|
||||
for (const application of this.$store.getters['application/getAll']) {
|
||||
if (application.type !== databaseType) {
|
||||
continue
|
||||
}
|
||||
|
||||
const foundTable = application.tables.find(
|
||||
({ id }) => id === this.values.source_table_id
|
||||
)
|
||||
|
||||
if (foundTable) {
|
||||
this.selectedWorkspaceId = application.workspace.id
|
||||
this.selectedDatabaseId = application.id
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
validations: {
|
||||
values: {
|
||||
source_table_id: { required, numeric },
|
||||
|
|
|
@ -47,7 +47,8 @@
|
|||
"deactivate": "Deactivate",
|
||||
"download": "Download",
|
||||
"copyToClipboard": "Copy to clipboard",
|
||||
"reset": "Reset"
|
||||
"reset": "Reset",
|
||||
"hide": "Hide"
|
||||
},
|
||||
"adminType": {
|
||||
"settings": "Settings",
|
||||
|
|
|
@ -45,12 +45,20 @@
|
|||
/></span>
|
||||
</span>
|
||||
|
||||
<div class="control__wrapper">
|
||||
<div v-if="protectedEdit && !protectedEditValue">
|
||||
<a @click="enableProtectedEdit">{{ $t('formGroup.protectedField') }}</a>
|
||||
</div>
|
||||
<div v-else class="control__wrapper">
|
||||
<div
|
||||
class="control__elements"
|
||||
:class="{ 'control__elements--flex': $slots['after-input'] }"
|
||||
>
|
||||
<div class="flex-grow-1"><slot /></div>
|
||||
<div v-if="protectedEdit && protectedEditValue" class="margin-top-1">
|
||||
<a @click="disableProtectedEdit">{{
|
||||
$t('formGroup.cancelProtectedField')
|
||||
}}</a>
|
||||
</div>
|
||||
<slot name="after-input"></slot>
|
||||
</div>
|
||||
|
||||
|
@ -162,6 +170,20 @@ export default {
|
|||
required: false,
|
||||
default: '',
|
||||
},
|
||||
/**
|
||||
* If set to `true`, then it's not possible to change the value unless the user
|
||||
* clicks a link first.
|
||||
*/
|
||||
protectedEdit: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
protectedEditValue: false,
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
hasError() {
|
||||
|
@ -194,5 +216,15 @@ export default {
|
|||
)
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
enableProtectedEdit() {
|
||||
this.protectedEditValue = true
|
||||
this.$emit('enabled-protected-edit')
|
||||
},
|
||||
disableProtectedEdit() {
|
||||
this.protectedEditValue = false
|
||||
this.$emit('disable-protected-edit')
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -802,5 +802,9 @@
|
|||
},
|
||||
"dataExplorerNode": {
|
||||
"showMore": "Show more repetitions"
|
||||
},
|
||||
"formGroup": {
|
||||
"protectedField": "This field is protected. Click to change.",
|
||||
"cancelProtectedField": "Cancel change"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
<template>
|
||||
<Modal :left-sidebar="true" :left-sidebar-scrollable="true">
|
||||
<template #sidebar>
|
||||
<div class="modal-sidebar__head">
|
||||
<div class="modal-sidebar__head-name">
|
||||
{{ table.name }}
|
||||
</div>
|
||||
</div>
|
||||
<ul class="modal-sidebar__nav">
|
||||
<li v-for="page in pages" :key="page.type">
|
||||
<a
|
||||
class="modal-sidebar__nav-link"
|
||||
:class="{ active: selectedPage === page.type }"
|
||||
@click="setPage(page.type)"
|
||||
>
|
||||
<i class="modal-sidebar__nav-icon" :class="page.iconClass"></i>
|
||||
{{ page.name }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</template>
|
||||
<template #content>
|
||||
<component
|
||||
:is="selectedPageObject.component"
|
||||
:database="database"
|
||||
:table="table"
|
||||
@hide="hide()"
|
||||
></component>
|
||||
</template>
|
||||
</Modal>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import modal from '@baserow/modules/core/mixins/modal'
|
||||
|
||||
import ConfigureDataSyncVisibleFields from '@baserow/modules/database/components/dataSync/ConfigureDataSyncVisibleFields'
|
||||
import ConfigureDataSyncSettings from '@baserow/modules/database/components/dataSync/ConfigureDataSyncSettings'
|
||||
|
||||
export default {
|
||||
name: 'ConfigureDataSyncModal',
|
||||
components: { ConfigureDataSyncVisibleFields, ConfigureDataSyncSettings },
|
||||
mixins: [modal],
|
||||
props: {
|
||||
database: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
table: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
pages: [
|
||||
{
|
||||
type: 'visible-fields',
|
||||
name: this.$t('configureDataSyncModal.syncedFields'),
|
||||
iconClass: 'iconoir-switch-on',
|
||||
component: ConfigureDataSyncVisibleFields,
|
||||
},
|
||||
{
|
||||
type: 'settings',
|
||||
name: this.$t('configureDataSyncModal.syncSettings'),
|
||||
iconClass: 'iconoir-settings',
|
||||
component: ConfigureDataSyncSettings,
|
||||
},
|
||||
],
|
||||
selectedPage: 'visible-fields',
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
selectedPageObject() {
|
||||
return this.pages.find((page) => page.type === this.selectedPage)
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
setPage(page) {
|
||||
this.selectedPage = page
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -0,0 +1,128 @@
|
|||
<template>
|
||||
<div>
|
||||
<h2 class="box__title">{{ $t('configureDataSyncSettings.title') }}</h2>
|
||||
<div v-if="fetchLoading">
|
||||
<div class="loading"></div>
|
||||
</div>
|
||||
<div v-if="!fetchLoaded">
|
||||
<Error :error="error"></Error>
|
||||
</div>
|
||||
<div v-else-if="fetchLoaded">
|
||||
<component
|
||||
:is="dataSyncComponent"
|
||||
ref="form"
|
||||
:default-values="dataSync"
|
||||
:update="true"
|
||||
:disabled="updateLoading || jobIsRunning"
|
||||
class="margin-bottom-2"
|
||||
@submitted="submitted"
|
||||
@values-changed="completed = false"
|
||||
/>
|
||||
|
||||
<Error :error="error"></Error>
|
||||
<div class="modal-progress__actions">
|
||||
<ProgressBar
|
||||
v-if="jobIsRunning || jobHasSucceeded"
|
||||
:value="job.progress_percentage"
|
||||
:status="jobHumanReadableState"
|
||||
/>
|
||||
<div class="align-right">
|
||||
<div class="flex">
|
||||
<Button
|
||||
v-if="completed"
|
||||
tag="a"
|
||||
type="secondary"
|
||||
size="large"
|
||||
@click="$emit('hide')"
|
||||
>{{ $t('action.hide') }}</Button
|
||||
>
|
||||
<template v-if="!completed">
|
||||
<Checkbox
|
||||
v-model="syncTableValue"
|
||||
:disabled="updateLoading || jobIsRunning"
|
||||
>{{ $t('configureDataSyncSettings.syncTable') }}</Checkbox
|
||||
>
|
||||
<Button
|
||||
type="primary"
|
||||
size="large"
|
||||
:loading="updateLoading || jobIsRunning"
|
||||
:disabled="updateLoading || jobIsRunning"
|
||||
@click="$refs.form.submit()"
|
||||
>
|
||||
{{ $t('action.save') }}
|
||||
</Button>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import dataSync from '@baserow/modules/database/mixins/dataSync'
|
||||
import DataSyncService from '@baserow/modules/database/services/dataSync'
|
||||
import TableForm from '@baserow/modules/database/components/table/TableForm.vue'
|
||||
|
||||
export default {
|
||||
name: 'ConfigureDataSyncSettings',
|
||||
components: { TableForm },
|
||||
mixins: [dataSync],
|
||||
props: {
|
||||
database: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
table: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
fetchLoading: false,
|
||||
fetchLoaded: false,
|
||||
dataSync: null,
|
||||
completed: false,
|
||||
syncTableValue: true,
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
dataSyncComponent() {
|
||||
return this.$registry
|
||||
.get('dataSync', this.dataSync.type)
|
||||
.getFormComponent()
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
this.hideError()
|
||||
this.fetchDataSource(this.table)
|
||||
},
|
||||
methods: {
|
||||
onJobDone() {
|
||||
this.completed = true
|
||||
},
|
||||
async fetchDataSource(table) {
|
||||
this.fetchLoading = true
|
||||
|
||||
try {
|
||||
const { data } = await DataSyncService(this.$client).get(
|
||||
table.data_sync.id
|
||||
)
|
||||
this.dataSync = data
|
||||
this.fetchLoaded = true
|
||||
} catch (error) {
|
||||
this.handleError(error)
|
||||
} finally {
|
||||
this.fetchLoading = false
|
||||
}
|
||||
},
|
||||
async submitted(values) {
|
||||
await this.update(this.table, values, this.syncTableValue)
|
||||
if (!this.syncTableValue) {
|
||||
this.completed = true
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -0,0 +1,115 @@
|
|||
<template>
|
||||
<div>
|
||||
<h2 class="box__title">{{ $t('configureDataSyncVisibleFields.title') }}</h2>
|
||||
<div v-if="loadingProperties">
|
||||
<div class="loading"></div>
|
||||
</div>
|
||||
<div v-if="!loadedProperties">
|
||||
<Error :error="error"></Error>
|
||||
</div>
|
||||
<div v-else-if="loadedProperties">
|
||||
<FormGroup small-label>
|
||||
<template #label>
|
||||
{{ $t('configureDataSyncVisibleFields.fields') }}</template
|
||||
>
|
||||
<SwitchInput
|
||||
v-for="property in properties"
|
||||
:key="property.key"
|
||||
class="margin-top-2"
|
||||
small
|
||||
:value="syncedProperties.includes(property.key)"
|
||||
:disabled="property.unique_primary || updateLoading"
|
||||
@input=";[toggleVisibleField(property.key), (completed = false)]"
|
||||
>
|
||||
<i :class="getFieldTypeIconClass(property.field_type)"></i>
|
||||
{{ property.name }}</SwitchInput
|
||||
>
|
||||
</FormGroup>
|
||||
<Error :error="error"></Error>
|
||||
<div class="modal-progress__actions margin-top-2">
|
||||
<ProgressBar
|
||||
v-if="jobIsRunning || jobHasSucceeded"
|
||||
:value="job.progress_percentage"
|
||||
:status="jobHumanReadableState"
|
||||
/>
|
||||
<div class="align-right">
|
||||
<div class="flex">
|
||||
<Button
|
||||
v-if="completed"
|
||||
tag="a"
|
||||
type="secondary"
|
||||
size="large"
|
||||
@click="$emit('hide')"
|
||||
>{{ $t('action.hide') }}</Button
|
||||
>
|
||||
<template v-if="!completed">
|
||||
<Checkbox
|
||||
v-model="syncTableValue"
|
||||
:disabled="updateLoading || jobIsRunning"
|
||||
>{{ $t('configureDataSyncVisibleFields.syncTable') }}</Checkbox
|
||||
>
|
||||
<Button
|
||||
type="primary"
|
||||
size="large"
|
||||
:loading="updateLoading || jobIsRunning"
|
||||
:disabled="updateLoading || jobIsRunning"
|
||||
@click="submit"
|
||||
>
|
||||
{{ $t('action.save') }}
|
||||
</Button>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import dataSync from '@baserow/modules/database/mixins/dataSync'
|
||||
|
||||
export default {
|
||||
name: 'ConfigureDataSyncVisibleFields',
|
||||
mixins: [dataSync],
|
||||
props: {
|
||||
database: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
table: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
completed: false,
|
||||
syncTableValue: true,
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.hideError()
|
||||
this.syncedProperties = this.table.data_sync.synced_properties.map(
|
||||
(p) => p.key
|
||||
)
|
||||
this.fetchExistingProperties(this.table)
|
||||
},
|
||||
methods: {
|
||||
onJobDone() {
|
||||
this.completed = true
|
||||
},
|
||||
async submit() {
|
||||
await this.update(
|
||||
this.table,
|
||||
{
|
||||
synced_properties: this.syncedProperties,
|
||||
},
|
||||
this.syncTableValue
|
||||
)
|
||||
if (!this.syncTableValue) {
|
||||
this.completed = true
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -13,6 +13,7 @@
|
|||
v-model="values.ical_url"
|
||||
size="large"
|
||||
:error="fieldHasErrors('ical_url')"
|
||||
:disabled="disabled"
|
||||
@focus.once="$event.target.select()"
|
||||
@blur="$v.values.ical_url.$touch()"
|
||||
>
|
||||
|
@ -37,6 +38,18 @@ import form from '@baserow/modules/core/mixins/form'
|
|||
export default {
|
||||
name: 'ICalCalendarDataSync',
|
||||
mixins: [form],
|
||||
props: {
|
||||
update: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
allowedValues: ['ical_url'],
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
name: 'postgresql_password',
|
||||
translationPrefix: 'password',
|
||||
type: 'password',
|
||||
protectedEdit: true,
|
||||
},
|
||||
{
|
||||
name: 'postgresql_database',
|
||||
|
@ -32,7 +33,15 @@
|
|||
:error="fieldHasErrors(field.name)"
|
||||
required
|
||||
small-label
|
||||
:protected-edit="update && field.protectedEdit"
|
||||
class="margin-bottom-2"
|
||||
@enabled-protected-edit="allowedValues.push(field.name)"
|
||||
@disable-protected-edit="
|
||||
;[
|
||||
allowedValues.splice(allowedValues.indexOf(field.name), 1),
|
||||
delete values[field.name],
|
||||
]
|
||||
"
|
||||
>
|
||||
<template #label>{{
|
||||
$t(`postgreSQLDataSync.${field.translationPrefix}`)
|
||||
|
@ -42,6 +51,7 @@
|
|||
size="large"
|
||||
:type="field.type"
|
||||
:error="fieldHasErrors(field.name)"
|
||||
:disabled="disabled"
|
||||
@blur="$v.values[field.name].$touch()"
|
||||
>
|
||||
</FormInput>
|
||||
|
@ -66,6 +76,7 @@
|
|||
v-model="values.postgresql_port"
|
||||
size="large"
|
||||
:error="fieldHasErrors('postgresql_port')"
|
||||
:disabled="disabled"
|
||||
@blur="$v.values.postgresql_port.$touch()"
|
||||
>
|
||||
</FormInput>
|
||||
|
@ -92,7 +103,11 @@
|
|||
<div class="col col-7">
|
||||
<FormGroup required small-label class="margin-bottom-2">
|
||||
<template #label>{{ $t('postgreSQLDataSync.sslMode') }}</template>
|
||||
<Dropdown v-model="values.postgresql_sslmode" size="large">
|
||||
<Dropdown
|
||||
v-model="values.postgresql_sslmode"
|
||||
size="large"
|
||||
:disabled="disabled"
|
||||
>
|
||||
<DropdownItem
|
||||
v-for="option in sslModeOptions"
|
||||
:key="option"
|
||||
|
@ -107,29 +122,43 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import { required, numeric } from 'vuelidate/lib/validators'
|
||||
import { required, numeric, requiredIf } from 'vuelidate/lib/validators'
|
||||
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
|
||||
export default {
|
||||
name: 'PostgreSQLDataSync',
|
||||
mixins: [form],
|
||||
props: {
|
||||
update: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
const allowedValues = [
|
||||
'postgresql_host',
|
||||
'postgresql_username',
|
||||
'postgresql_port',
|
||||
'postgresql_database',
|
||||
'postgresql_schema',
|
||||
'postgresql_table',
|
||||
'postgresql_sslmode',
|
||||
]
|
||||
if (!this.update) {
|
||||
allowedValues.push('postgresql_password')
|
||||
}
|
||||
return {
|
||||
allowedValues: [
|
||||
'postgresql_host',
|
||||
'postgresql_username',
|
||||
'postgresql_password',
|
||||
'postgresql_port',
|
||||
'postgresql_database',
|
||||
'postgresql_schema',
|
||||
'postgresql_table',
|
||||
'postgresql_sslmode',
|
||||
],
|
||||
allowedValues,
|
||||
values: {
|
||||
postgresql_host: '',
|
||||
postgresql_username: '',
|
||||
postgresql_password: '',
|
||||
postgresql_port: '5432',
|
||||
postgresql_database: '',
|
||||
postgresql_schema: 'public',
|
||||
|
@ -146,17 +175,23 @@ export default {
|
|||
],
|
||||
}
|
||||
},
|
||||
validations: {
|
||||
values: {
|
||||
postgresql_host: { required },
|
||||
postgresql_username: { required },
|
||||
postgresql_password: { required },
|
||||
postgresql_database: { required },
|
||||
postgresql_schema: { required },
|
||||
postgresql_table: { required },
|
||||
postgresql_sslmode: { required },
|
||||
postgresql_port: { required, numeric },
|
||||
},
|
||||
validations() {
|
||||
return {
|
||||
values: {
|
||||
postgresql_host: { required },
|
||||
postgresql_username: { required },
|
||||
postgresql_password: {
|
||||
required: requiredIf(() => {
|
||||
return this.allowedValues.includes('postgresql_password')
|
||||
}),
|
||||
},
|
||||
postgresql_database: { required },
|
||||
postgresql_schema: { required },
|
||||
postgresql_table: { required },
|
||||
postgresql_sslmode: { required },
|
||||
postgresql_port: { required, numeric },
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -21,9 +21,9 @@
|
|||
v-if="!jobHasSucceeded"
|
||||
type="primary"
|
||||
size="large"
|
||||
:disabled="creatingJob || jobIsRunning"
|
||||
:loading="creatingJob || jobIsRunning"
|
||||
@click="sync"
|
||||
:disabled="jobIsRunning"
|
||||
:loading="jobIsRunning"
|
||||
@click="syncTable(table)"
|
||||
>
|
||||
{{ $t('syncTableModal.sync') }}
|
||||
</Button>
|
||||
|
@ -38,14 +38,11 @@
|
|||
|
||||
<script>
|
||||
import modal from '@baserow/modules/core/mixins/modal'
|
||||
import error from '@baserow/modules/core/mixins/error'
|
||||
import jobProgress from '@baserow/modules/core/mixins/jobProgress'
|
||||
import DataSyncService from '@baserow/modules/database/services/dataSync'
|
||||
import { ResponseErrorMessage } from '@baserow/modules/core/plugins/clientHandler'
|
||||
import dataSync from '@baserow/modules/database/mixins/dataSync'
|
||||
|
||||
export default {
|
||||
name: 'SyncTableModal',
|
||||
mixins: [modal, error, jobProgress],
|
||||
mixins: [modal, dataSync],
|
||||
props: {
|
||||
table: {
|
||||
type: Object,
|
||||
|
@ -57,9 +54,6 @@ export default {
|
|||
creatingJob: false,
|
||||
}
|
||||
},
|
||||
beforeDestroy() {
|
||||
this.stopPollIfRunning()
|
||||
},
|
||||
methods: {
|
||||
show() {
|
||||
this.job = null
|
||||
|
@ -68,40 +62,6 @@ export default {
|
|||
hidden() {
|
||||
this.stopPollIfRunning()
|
||||
},
|
||||
async sync() {
|
||||
if (this.jobIsRunning) {
|
||||
return
|
||||
}
|
||||
|
||||
this.hideError()
|
||||
this.job = null
|
||||
this.creatingJob = true
|
||||
|
||||
try {
|
||||
const { data: job } = await DataSyncService(this.$client).syncTable(
|
||||
this.table.data_sync.id
|
||||
)
|
||||
this.startJobPoller(job)
|
||||
} catch (error) {
|
||||
this.handleError(error)
|
||||
} finally {
|
||||
this.creatingJob = false
|
||||
}
|
||||
},
|
||||
onJobFailed() {
|
||||
const error = new ResponseErrorMessage(
|
||||
this.$t('createDataSync.error'),
|
||||
this.job.human_readable_error
|
||||
)
|
||||
this.stopPollAndHandleError(error)
|
||||
},
|
||||
onJobPollingError(error) {
|
||||
this.stopPollAndHandleError(error)
|
||||
},
|
||||
stopPollAndHandleError(error) {
|
||||
this.stopPollIfRunning()
|
||||
error.handler ? this.handleError(error) : this.showError(error)
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -103,6 +103,38 @@
|
|||
:name="dataSyncType.getName()"
|
||||
></component>
|
||||
</li>
|
||||
<li
|
||||
v-if="
|
||||
table.data_sync &&
|
||||
$hasPermission(
|
||||
'database.table.update',
|
||||
table,
|
||||
database.workspace.id
|
||||
) &&
|
||||
$hasPermission(
|
||||
'database.data_sync.get',
|
||||
table,
|
||||
database.workspace.id
|
||||
)
|
||||
"
|
||||
class="context__menu-item"
|
||||
>
|
||||
<a
|
||||
class="context__menu-item-link"
|
||||
@click="openConfigureDataSyncModal()"
|
||||
>
|
||||
<i class="context__menu-item-icon iconoir-settings"></i>
|
||||
{{ $t('sidebarItem.updateSyncConfig') }}
|
||||
<div v-if="dataSyncDeactivated" class="deactivated-label">
|
||||
<i class="iconoir-lock"></i>
|
||||
</div>
|
||||
</a>
|
||||
<ConfigureDataSyncModal
|
||||
ref="configureDataSyncModal"
|
||||
:database="database"
|
||||
:table="table"
|
||||
></ConfigureDataSyncModal>
|
||||
</li>
|
||||
<li
|
||||
v-if="
|
||||
$hasPermission(
|
||||
|
@ -173,10 +205,12 @@ import ExportTableModal from '@baserow/modules/database/components/export/Export
|
|||
import WebhookModal from '@baserow/modules/database/components/webhook/WebhookModal'
|
||||
import SidebarDuplicateTableContextItem from '@baserow/modules/database/components/sidebar/table/SidebarDuplicateTableContextItem'
|
||||
import SyncTableModal from '@baserow/modules/database/components/dataSync/SyncTableModal'
|
||||
import ConfigureDataSyncModal from '@baserow/modules/database/components/dataSync/ConfigureDataSyncModal.vue'
|
||||
|
||||
export default {
|
||||
name: 'SidebarItem',
|
||||
components: {
|
||||
ConfigureDataSyncModal,
|
||||
ExportTableModal,
|
||||
WebhookModal,
|
||||
SyncTableModal,
|
||||
|
@ -298,6 +332,10 @@ export default {
|
|||
this.$refs.syncModal.show()
|
||||
}
|
||||
},
|
||||
openConfigureDataSyncModal() {
|
||||
this.$refs.context.hide()
|
||||
this.$refs.configureDataSyncModal.show()
|
||||
},
|
||||
enableRename() {
|
||||
this.$refs.context.hide()
|
||||
this.$refs.rename.edit()
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
:default-name="getDefaultName()"
|
||||
@submitted="submitted"
|
||||
>
|
||||
<component :is="dataSyncComponent" />
|
||||
<component :is="dataSyncComponent" :disabled="loadingProperties" />
|
||||
</TableForm>
|
||||
<Error :error="error"></Error>
|
||||
<div class="align-right">
|
||||
|
@ -60,18 +60,16 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import error from '@baserow/modules/core/mixins/error'
|
||||
import jobProgress from '@baserow/modules/core/mixins/jobProgress'
|
||||
import TableForm from '@baserow/modules/database/components/table/TableForm'
|
||||
import { getNextAvailableNameInSequence } from '@baserow/modules/core/utils/string'
|
||||
import DataSyncService from '@baserow/modules/database/services/dataSync'
|
||||
import { clone } from '@baserow/modules/core/utils/object'
|
||||
import { ResponseErrorMessage } from '@baserow/modules/core/plugins/clientHandler'
|
||||
import dataSync from '@baserow/modules/database/mixins/dataSync'
|
||||
|
||||
export default {
|
||||
name: 'CreateDataSync',
|
||||
components: { TableForm },
|
||||
mixins: [error, jobProgress],
|
||||
mixins: [dataSync],
|
||||
props: {
|
||||
database: {
|
||||
type: Object,
|
||||
|
@ -84,11 +82,8 @@ export default {
|
|||
},
|
||||
data() {
|
||||
return {
|
||||
loadingProperties: false,
|
||||
loadedProperties: false,
|
||||
formValues: null,
|
||||
properties: null,
|
||||
syncedProperties: null,
|
||||
creatingTable: false,
|
||||
createdTable: null,
|
||||
}
|
||||
|
@ -114,9 +109,6 @@ export default {
|
|||
}
|
||||
},
|
||||
},
|
||||
beforeDestroy() {
|
||||
this.stopPollIfRunning()
|
||||
},
|
||||
methods: {
|
||||
hide() {
|
||||
this.stopPollIfRunning()
|
||||
|
@ -126,44 +118,9 @@ export default {
|
|||
const baseName = this.$t('createTableModal.defaultName')
|
||||
return getNextAvailableNameInSequence(baseName, excludeNames)
|
||||
},
|
||||
getFieldTypeIconClass(fieldType) {
|
||||
return this.$registry.get('field', fieldType).getIconClass()
|
||||
},
|
||||
async submitted(formValues) {
|
||||
formValues.type = this.chosenType
|
||||
this.formValues = formValues
|
||||
|
||||
this.loadingProperties = true
|
||||
this.hideError()
|
||||
|
||||
try {
|
||||
const { data } = await DataSyncService(this.$client).fetchProperties(
|
||||
formValues
|
||||
)
|
||||
this.loadedProperties = true
|
||||
this.properties = data
|
||||
this.syncedProperties = data.map((p) => p.key)
|
||||
} catch (error) {
|
||||
if (error.handler && error.handler.code === 'ERROR_SYNC_ERROR') {
|
||||
this.showError(
|
||||
this.$t('dataSyncType.syncError'),
|
||||
error.handler.detail
|
||||
)
|
||||
error.handler.handled()
|
||||
return
|
||||
}
|
||||
this.handleError(error)
|
||||
} finally {
|
||||
this.loadingProperties = false
|
||||
}
|
||||
},
|
||||
toggleVisibleField(key) {
|
||||
const index = this.syncedProperties.findIndex((f) => key === f)
|
||||
if (index > -1) {
|
||||
this.syncedProperties.splice(index, 1)
|
||||
} else {
|
||||
this.syncedProperties.push(key)
|
||||
}
|
||||
await this.fetchNonExistingProperties(this.chosenType, formValues)
|
||||
},
|
||||
async create() {
|
||||
this.hideError()
|
||||
|
@ -187,10 +144,7 @@ export default {
|
|||
database: this.database,
|
||||
data: this.createdTable,
|
||||
})
|
||||
const { data: job } = await DataSyncService(this.$client).syncTable(
|
||||
this.createdTable.data_sync.id
|
||||
)
|
||||
this.startJobPoller(job)
|
||||
await this.syncTable(this.createdTable)
|
||||
} catch (error) {
|
||||
this.handleError(error)
|
||||
} finally {
|
||||
|
@ -207,20 +161,6 @@ export default {
|
|||
})
|
||||
this.$emit('hide')
|
||||
},
|
||||
onJobFailed() {
|
||||
const error = new ResponseErrorMessage(
|
||||
this.$t('createDataSync.error'),
|
||||
this.job.human_readable_error
|
||||
)
|
||||
this.stopPollAndHandleError(error)
|
||||
},
|
||||
onJobPollingError(error) {
|
||||
this.stopPollAndHandleError(error)
|
||||
},
|
||||
stopPollAndHandleError(error) {
|
||||
this.stopPollIfRunning()
|
||||
error.handler ? this.handleError(error) : this.showError(error)
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -67,7 +67,8 @@
|
|||
"webhooks": "Webhooks",
|
||||
"lastSynced": "Last synced",
|
||||
"notSynced": "not synced",
|
||||
"sync": "Sync table"
|
||||
"sync": "Sync table",
|
||||
"updateSyncConfig": "Update sync configuration"
|
||||
},
|
||||
"duplicateTableJobType": {
|
||||
"duplicating": "Duplicating",
|
||||
|
@ -1009,5 +1010,19 @@
|
|||
"small": "Small",
|
||||
"medium": "Medium",
|
||||
"large": "Large"
|
||||
},
|
||||
"configureDataSyncModal": {
|
||||
"title": "Data sync",
|
||||
"syncedFields": "Synced fields",
|
||||
"syncSettings": "Sync settings"
|
||||
},
|
||||
"configureDataSyncVisibleFields": {
|
||||
"title": "Change synced fields",
|
||||
"fields": "Synced fields",
|
||||
"syncTable": "Sync when save"
|
||||
},
|
||||
"configureDataSyncSettings": {
|
||||
"title": "Change data sync",
|
||||
"syncTable": "Sync when save"
|
||||
}
|
||||
}
|
||||
|
|
144
web-frontend/modules/database/mixins/dataSync.js
Normal file
144
web-frontend/modules/database/mixins/dataSync.js
Normal file
|
@ -0,0 +1,144 @@
|
|||
import DataSyncService from '@baserow/modules/database/services/dataSync'
|
||||
import error from '@baserow/modules/core/mixins/error'
|
||||
import jobProgress from '@baserow/modules/core/mixins/jobProgress'
|
||||
import { ResponseErrorMessage } from '@baserow/modules/core/plugins/clientHandler'
|
||||
|
||||
export default {
|
||||
mixins: [error, jobProgress],
|
||||
data() {
|
||||
return {
|
||||
loadingProperties: false,
|
||||
loadedProperties: false,
|
||||
properties: null,
|
||||
syncedProperties: [],
|
||||
updateLoading: false,
|
||||
}
|
||||
},
|
||||
beforeDestroy() {
|
||||
this.stopPollIfRunning()
|
||||
},
|
||||
methods: {
|
||||
toggleVisibleField(key) {
|
||||
const index = this.syncedProperties.findIndex((f) => key === f)
|
||||
if (index > -1) {
|
||||
this.syncedProperties.splice(index, 1)
|
||||
} else {
|
||||
this.syncedProperties.push(key)
|
||||
}
|
||||
},
|
||||
getFieldTypeIconClass(fieldType) {
|
||||
return this.$registry.get('field', fieldType).getIconClass()
|
||||
},
|
||||
async fetchExistingProperties(table) {
|
||||
this.loadingProperties = true
|
||||
|
||||
try {
|
||||
const { data } = await DataSyncService(
|
||||
this.$client
|
||||
).fetchPropertiesOfDataSync(table.data_sync.id)
|
||||
this.loadedProperties = true
|
||||
this.properties = data
|
||||
} catch (error) {
|
||||
if (error.handler && error.handler.code === 'ERROR_SYNC_ERROR') {
|
||||
this.showError(
|
||||
this.$t('dataSyncType.syncError'),
|
||||
error.handler.detail
|
||||
)
|
||||
error.handler.handled()
|
||||
return
|
||||
}
|
||||
this.handleError(error)
|
||||
} finally {
|
||||
this.loadingProperties = false
|
||||
}
|
||||
},
|
||||
async fetchNonExistingProperties(type, values) {
|
||||
values.type = type
|
||||
|
||||
this.loadingProperties = true
|
||||
this.hideError()
|
||||
|
||||
try {
|
||||
const { data } = await DataSyncService(this.$client).fetchProperties(
|
||||
values
|
||||
)
|
||||
this.loadedProperties = true
|
||||
this.properties = data
|
||||
this.syncedProperties = data.map((p) => p.key)
|
||||
} catch (error) {
|
||||
if (error.handler && error.handler.code === 'ERROR_SYNC_ERROR') {
|
||||
this.showError(
|
||||
this.$t('dataSyncType.syncError'),
|
||||
error.handler.detail
|
||||
)
|
||||
error.handler.handled()
|
||||
return
|
||||
}
|
||||
this.handleError(error)
|
||||
} finally {
|
||||
this.loadingProperties = false
|
||||
}
|
||||
},
|
||||
async syncTable(table) {
|
||||
if (this.jobIsRunning) {
|
||||
return
|
||||
}
|
||||
|
||||
this.hideError()
|
||||
this.job = null
|
||||
|
||||
try {
|
||||
const { data: job } = await DataSyncService(this.$client).syncTable(
|
||||
table.data_sync.id
|
||||
)
|
||||
this.startJobPoller(job)
|
||||
} catch (error) {
|
||||
this.handleError(error)
|
||||
}
|
||||
},
|
||||
async update(table, values, syncTable = true) {
|
||||
this.updateLoading = true
|
||||
|
||||
try {
|
||||
const { data } = await DataSyncService(this.$client).update(
|
||||
this.table.data_sync.id,
|
||||
values
|
||||
)
|
||||
await this.$store.dispatch('table/forceUpdate', {
|
||||
database: this.database,
|
||||
table: this.table,
|
||||
values: { data_sync: data },
|
||||
})
|
||||
if (syncTable) {
|
||||
await this.syncTable(this.table)
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.handler && error.handler.code === 'ERROR_SYNC_ERROR') {
|
||||
this.showError(
|
||||
this.$t('dataSyncType.syncError'),
|
||||
error.handler.detail
|
||||
)
|
||||
error.handler.handled()
|
||||
return
|
||||
}
|
||||
this.handleError(error)
|
||||
} finally {
|
||||
this.updateLoading = false
|
||||
}
|
||||
},
|
||||
onJobFailed() {
|
||||
const error = new ResponseErrorMessage(
|
||||
this.$t('createDataSync.error'),
|
||||
this.job.human_readable_error
|
||||
)
|
||||
this.stopPollAndHandleError(error)
|
||||
},
|
||||
onJobPollingError(error) {
|
||||
this.stopPollAndHandleError(error)
|
||||
},
|
||||
stopPollAndHandleError(error) {
|
||||
this.stopPollIfRunning()
|
||||
error.handler ? this.handleError(error) : this.showError(error)
|
||||
},
|
||||
},
|
||||
}
|
|
@ -1,13 +1,22 @@
|
|||
export default (client) => {
|
||||
return {
|
||||
get(dataSyncId) {
|
||||
return client.get(`/database/data-sync/${dataSyncId}/`)
|
||||
},
|
||||
create(databaseId, values) {
|
||||
return client.post(`/database/data-sync/database/${databaseId}/`, values)
|
||||
},
|
||||
update(dataSyncId, values) {
|
||||
return client.patch(`/database/data-sync/${dataSyncId}/`, values)
|
||||
},
|
||||
syncTable(dataSyncId) {
|
||||
return client.post(`/database/data-sync/${dataSyncId}/sync/async/`)
|
||||
},
|
||||
fetchProperties(values) {
|
||||
return client.post(`/database/data-sync/properties/`, values)
|
||||
},
|
||||
fetchPropertiesOfDataSync(dataSyncId) {
|
||||
return client.get(`/database/data-sync/${dataSyncId}/properties/`)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,6 +67,8 @@ exports[`ChoiceElement as default 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -133,6 +135,8 @@ exports[`ChoiceElement as manual checkboxes 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -254,6 +258,8 @@ exports[`ChoiceElement as manual dropdown 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -320,6 +326,8 @@ exports[`ChoiceElement as manual radio 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
|
|
@ -46,6 +46,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -99,6 +101,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -152,6 +156,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -209,6 +215,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -266,6 +274,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -323,6 +333,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -380,6 +392,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -437,6 +451,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -494,6 +510,8 @@ exports[`DateTimePickerElement placeholder corresponds to date and time formats
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
|
|
@ -208,6 +208,8 @@ exports[`RecordSelectorElement does not paginate if API returns 400/404 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -423,6 +425,8 @@ exports[`RecordSelectorElement does not paginate if API returns 400/404 2`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -638,6 +642,8 @@ exports[`RecordSelectorElement does not paginate if API returns 400/404 3`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -775,6 +781,8 @@ exports[`RecordSelectorElement resolves suffix formulas 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -912,6 +920,8 @@ exports[`RecordSelectorElement resolves suffix formulas 2`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
|
|
@ -174,6 +174,8 @@ exports[`Preview exportTableModal Modal with no view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -235,6 +237,8 @@ exports[`Preview exportTableModal Modal with no view 1`] = `
|
|||
</ul>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -520,6 +524,8 @@ exports[`Preview exportTableModal Modal with no view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -1635,6 +1641,8 @@ exports[`Preview exportTableModal Modal with no view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -1744,6 +1752,8 @@ exports[`Preview exportTableModal Modal with no view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -1968,6 +1978,8 @@ exports[`Preview exportTableModal Modal with view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -2029,6 +2041,8 @@ exports[`Preview exportTableModal Modal with view 1`] = `
|
|||
</ul>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -2314,6 +2328,8 @@ exports[`Preview exportTableModal Modal with view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -3429,6 +3445,8 @@ exports[`Preview exportTableModal Modal with view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
@ -3538,6 +3556,8 @@ exports[`Preview exportTableModal Modal with view 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
|
|
Loading…
Add table
Reference in a new issue