mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-14 17:18:33 +00:00
Public view export
This commit is contained in:
parent
758030b005
commit
3613d395ad
46 changed files with 1802 additions and 160 deletions
backend
src/baserow
config/settings
contrib/database
api
export
migrations
views
core
tests/baserow/contrib/database
api/views
field
import_export
view
changelog/entries/unreleased/feature
enterprise/backend/src/baserow_enterprise/audit_log
premium
backend
src/baserow_premium
tests/baserow_premium_tests/api/views/views
web-frontend/modules/baserow_premium
web-frontend
modules
core
database
test/unit/database/__snapshots__
|
@ -521,6 +521,7 @@ SPECTACULAR_SETTINGS = {
|
|||
{"name": "Database table view sortings"},
|
||||
{"name": "Database table view decorations"},
|
||||
{"name": "Database table view groupings"},
|
||||
{"name": "Database table view export"},
|
||||
{"name": "Database table grid view"},
|
||||
{"name": "Database table gallery view"},
|
||||
{"name": "Database table form view"},
|
||||
|
|
|
@ -39,6 +39,36 @@ ONLY_COUNT_API_PARAM = OpenApiParameter(
|
|||
)
|
||||
|
||||
|
||||
def get_filters_object_description(combine_filters=True, view_is_aggregating=False):
|
||||
return (
|
||||
(
|
||||
"A JSON serialized string containing the filter tree to apply "
|
||||
"for the aggregation. The filter tree is a nested structure "
|
||||
"containing the filters that need to be applied. \n\n"
|
||||
if view_is_aggregating
|
||||
else "A JSON serialized string containing the filter tree to "
|
||||
"apply to this view. The filter tree is a nested structure "
|
||||
"containing the filters that need to be applied. \n\n"
|
||||
)
|
||||
+ "An example of a valid filter tree is the following:"
|
||||
'`{"filter_type": "AND", "filters": [{"field": 1, "type": "equal", '
|
||||
'"value": "test"}]}`. The `field` value must be the ID of the '
|
||||
"field to filter on, or the name of the field if "
|
||||
"`user_field_names` is true.\n\n"
|
||||
f"The following filters are available: "
|
||||
f'{", ".join(view_filter_type_registry.get_types())}.'
|
||||
"\n\n**Please note that if this parameter is provided, all other "
|
||||
"`filter__{field}__{filter}` will be ignored, "
|
||||
"as well as the `filter_type` parameter.**"
|
||||
+ (
|
||||
"\n\n**Please note that by passing the filters parameter the "
|
||||
"view filters saved for the view itself will be ignored.**"
|
||||
if not combine_filters
|
||||
else ""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def make_adhoc_filter_api_params(combine_filters=True, view_is_aggregating=False):
|
||||
"""
|
||||
Generate OpenAPI parameters for adhoc filter API params.
|
||||
|
@ -66,32 +96,8 @@ def make_adhoc_filter_api_params(combine_filters=True, view_is_aggregating=False
|
|||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
description=lazy(
|
||||
lambda: (
|
||||
(
|
||||
"A JSON serialized string containing the filter tree to apply "
|
||||
"for the aggregation. The filter tree is a nested structure "
|
||||
"containing the filters that need to be applied. \n\n"
|
||||
if view_is_aggregating
|
||||
else "A JSON serialized string containing the filter tree to "
|
||||
"apply to this view. The filter tree is a nested structure "
|
||||
"containing the filters that need to be applied. \n\n"
|
||||
)
|
||||
+ "An example of a valid filter tree is the following:"
|
||||
'`{"filter_type": "AND", "filters": [{"field": 1, "type": "equal", '
|
||||
'"value": "test"}]}`. The `field` value must be the ID of the '
|
||||
"field to filter on, or the name of the field if "
|
||||
"`user_field_names` is true.\n\n"
|
||||
f"The following filters are available: "
|
||||
f'{", ".join(view_filter_type_registry.get_types())}.'
|
||||
"\n\n**Please note that if this parameter is provided, all other "
|
||||
"`filter__{field}__{filter}` will be ignored, "
|
||||
"as well as the `filter_type` parameter.**"
|
||||
+ (
|
||||
"\n\n**Please note that by passing the filters parameter the "
|
||||
"view filters saved for the view itself will be ignored.**"
|
||||
if not combine_filters
|
||||
else ""
|
||||
)
|
||||
lambda: get_filters_object_description(
|
||||
combine_filters, view_is_aggregating
|
||||
),
|
||||
str,
|
||||
)(),
|
||||
|
|
|
@ -5,6 +5,8 @@ from drf_spectacular.utils import extend_schema_field
|
|||
from rest_framework import fields, serializers
|
||||
|
||||
from baserow.api.serializers import FileURLSerializerMixin
|
||||
from baserow.contrib.database.api.constants import get_filters_object_description
|
||||
from baserow.contrib.database.api.views.serializers import PublicViewFiltersSerializer
|
||||
from baserow.contrib.database.export.handler import ExportHandler
|
||||
from baserow.contrib.database.export.models import ExportJob
|
||||
from baserow.contrib.database.export.registries import table_exporter_registry
|
||||
|
@ -122,6 +124,28 @@ class BaseExporterOptionsSerializer(serializers.Serializer):
|
|||
default="utf-8",
|
||||
help_text="The character set to use when creating the export file.",
|
||||
)
|
||||
filters = PublicViewFiltersSerializer(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text=lazy(
|
||||
lambda: get_filters_object_description(True, False),
|
||||
str,
|
||||
)(),
|
||||
)
|
||||
order_by = serializers.CharField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
allow_blank=True,
|
||||
help_text="Optionally the rows can be ordered by provided field ids separated "
|
||||
"by comma. By default a field is ordered in ascending (A-Z) order, but by "
|
||||
"prepending the field with a '-' it can be ordered descending (Z-A).",
|
||||
)
|
||||
fields = serializers.ListField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
child=serializers.IntegerField(),
|
||||
help_text="List of field IDs that must be included in the export, in the desired order.",
|
||||
)
|
||||
|
||||
|
||||
class CsvExporterOptionsSerializer(BaseExporterOptionsSerializer):
|
||||
|
|
|
@ -22,9 +22,16 @@ from baserow.contrib.database.api.export.serializers import (
|
|||
BaseExporterOptionsSerializer,
|
||||
ExportJobSerializer,
|
||||
)
|
||||
from baserow.contrib.database.api.fields.errors import (
|
||||
ERROR_FILTER_FIELD_NOT_FOUND,
|
||||
ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
|
||||
)
|
||||
from baserow.contrib.database.api.tables.errors import ERROR_TABLE_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.views.errors import (
|
||||
ERROR_VIEW_DOES_NOT_EXIST,
|
||||
ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
|
||||
ERROR_VIEW_NOT_IN_TABLE,
|
||||
)
|
||||
from baserow.contrib.database.export.exceptions import (
|
||||
|
@ -34,9 +41,19 @@ from baserow.contrib.database.export.exceptions import (
|
|||
from baserow.contrib.database.export.handler import ExportHandler
|
||||
from baserow.contrib.database.export.models import ExportJob
|
||||
from baserow.contrib.database.export.registries import table_exporter_registry
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
FilterFieldNotFound,
|
||||
OrderByFieldNotFound,
|
||||
OrderByFieldNotPossible,
|
||||
)
|
||||
from baserow.contrib.database.table.exceptions import TableDoesNotExist
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.contrib.database.views.exceptions import ViewDoesNotExist, ViewNotInTable
|
||||
from baserow.contrib.database.views.exceptions import (
|
||||
ViewDoesNotExist,
|
||||
ViewFilterTypeDoesNotExist,
|
||||
ViewFilterTypeNotAllowedForField,
|
||||
ViewNotInTable,
|
||||
)
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.core.exceptions import UserNotInWorkspace
|
||||
|
||||
|
@ -95,6 +112,11 @@ class ExportTableView(APIView):
|
|||
"ERROR_TABLE_ONLY_EXPORT_UNSUPPORTED",
|
||||
"ERROR_VIEW_UNSUPPORTED_FOR_EXPORT_TYPE",
|
||||
"ERROR_VIEW_NOT_IN_TABLE",
|
||||
"ERROR_FILTER_FIELD_NOT_FOUND",
|
||||
"ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST",
|
||||
"ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD",
|
||||
"ERROR_ORDER_BY_FIELD_NOT_FOUND",
|
||||
"ERROR_ORDER_BY_FIELD_NOT_POSSIBLE",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(
|
||||
|
@ -110,6 +132,11 @@ class ExportTableView(APIView):
|
|||
ViewDoesNotExist: ERROR_VIEW_DOES_NOT_EXIST,
|
||||
TableOnlyExportUnsupported: ERROR_TABLE_ONLY_EXPORT_UNSUPPORTED,
|
||||
ViewNotInTable: ERROR_VIEW_NOT_IN_TABLE,
|
||||
FilterFieldNotFound: ERROR_FILTER_FIELD_NOT_FOUND,
|
||||
ViewFilterTypeDoesNotExist: ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ViewFilterTypeNotAllowedForField: ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
|
||||
OrderByFieldNotFound: ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
OrderByFieldNotPossible: ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
|
||||
}
|
||||
)
|
||||
def post(self, request, table_id):
|
||||
|
|
|
@ -169,6 +169,7 @@ class PublicFormViewSerializer(serializers.ModelSerializer):
|
|||
"submit_text",
|
||||
"fields",
|
||||
"show_logo",
|
||||
"allow_public_export",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -402,6 +402,7 @@ class ViewSerializer(serializers.ModelSerializer):
|
|||
"filters_disabled",
|
||||
"public_view_has_password",
|
||||
"show_logo",
|
||||
"allow_public_export",
|
||||
"ownership_type",
|
||||
"owned_by_id",
|
||||
)
|
||||
|
@ -618,6 +619,7 @@ class PublicViewSerializer(serializers.ModelSerializer):
|
|||
"public",
|
||||
"slug",
|
||||
"show_logo",
|
||||
"allow_public_export",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
|
|
|
@ -9,6 +9,7 @@ import unicodecsv as csv
|
|||
|
||||
from baserow.contrib.database.export.exceptions import ExportJobCanceledException
|
||||
from baserow.contrib.database.table.models import FieldObject
|
||||
from baserow.contrib.database.views.filters import AdHocFilters
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
|
||||
|
@ -170,20 +171,47 @@ class QuerysetSerializer(abc.ABC):
|
|||
return cls(qs, ordered_field_objects)
|
||||
|
||||
@classmethod
|
||||
def for_view(cls, view) -> "QuerysetSerializer":
|
||||
def for_view(cls, view, visible_field_ids_in_order=None) -> "QuerysetSerializer":
|
||||
"""
|
||||
Generates a queryset serializer for the provided view according to it's view
|
||||
type and any relevant view settings it might have (filters, sorts,
|
||||
hidden columns etc).
|
||||
|
||||
:param view: The view to serialize.
|
||||
:param visible_field_ids_in_order: Optionally provide a list of field IDs in
|
||||
the correct order. Only those fields will be included in the export.
|
||||
:return: A QuerysetSerializer ready to serialize the table.
|
||||
"""
|
||||
|
||||
view_type = view_type_registry.get_by_model(view.specific_class)
|
||||
fields, model = view_type.get_visible_fields_and_model(view)
|
||||
visible_field_objects_in_view, model = view_type.get_visible_fields_and_model(
|
||||
view
|
||||
)
|
||||
if visible_field_ids_in_order is None:
|
||||
fields = visible_field_objects_in_view
|
||||
else:
|
||||
# Re-order and return only the fields in visible_field_ids_in_order
|
||||
field_map = {
|
||||
field_object["field"].id: field_object
|
||||
for field_object in visible_field_objects_in_view
|
||||
}
|
||||
fields = [
|
||||
field_map[field_id]
|
||||
for field_id in visible_field_ids_in_order
|
||||
if field_id in field_map
|
||||
]
|
||||
qs = ViewHandler().get_queryset(view, model=model)
|
||||
return cls(qs, fields)
|
||||
return cls(qs, fields), visible_field_objects_in_view
|
||||
|
||||
def add_ad_hoc_filters_dict_to_queryset(self, filters_dict, only_by_field_ids=None):
|
||||
filters = AdHocFilters.from_dict(filters_dict)
|
||||
filters.only_filter_by_field_ids = only_by_field_ids
|
||||
self.queryset = filters.apply_to_queryset(self.queryset.model, self.queryset)
|
||||
|
||||
def add_add_hoc_order_by_to_queryset(self, order_by, only_by_field_ids=None):
|
||||
self.queryset = self.queryset.order_by_fields_string(
|
||||
order_by, only_order_by_field_ids=only_by_field_ids
|
||||
)
|
||||
|
||||
def _get_field_serializer(self, field_object: FieldObject) -> Callable[[Any], Any]:
|
||||
"""
|
||||
|
|
|
@ -22,6 +22,7 @@ from baserow.contrib.database.export.operations import ExportTableOperationType
|
|||
from baserow.contrib.database.export.tasks import run_export_job
|
||||
from baserow.contrib.database.table.models import Table
|
||||
from baserow.contrib.database.views.exceptions import ViewNotInTable
|
||||
from baserow.contrib.database.views.filters import AdHocFilters
|
||||
from baserow.contrib.database.views.models import View
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
from baserow.core.handler import CoreHandler
|
||||
|
@ -37,14 +38,35 @@ from .exceptions import (
|
|||
)
|
||||
from .file_writer import PaginatedExportJobFileWriter
|
||||
from .registries import TableExporter, table_exporter_registry
|
||||
from .utils import view_is_publicly_exportable
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class ExportHandler:
|
||||
@staticmethod
|
||||
def _raise_if_no_export_permissions(
|
||||
user: Optional[User], table: Table, view: Optional[View]
|
||||
):
|
||||
if view_is_publicly_exportable(user, view):
|
||||
# No need to do the permission check if no user is provided, the view is
|
||||
# public, and allowed to export from publicly shared view because this
|
||||
# can be initiated by an anonymous user.
|
||||
pass
|
||||
else:
|
||||
CoreHandler().check_permissions(
|
||||
user,
|
||||
ExportTableOperationType.type,
|
||||
workspace=table.database.workspace,
|
||||
context=table,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_and_start_new_job(
|
||||
user: User, table: Table, view: Optional[View], export_options: Dict[str, Any]
|
||||
user: Optional[User],
|
||||
table: Table,
|
||||
view: Optional[View],
|
||||
export_options: Dict[str, Any],
|
||||
) -> ExportJob:
|
||||
"""
|
||||
For the provided user, table, optional view and options will create a new
|
||||
|
@ -69,7 +91,10 @@ class ExportHandler:
|
|||
|
||||
@staticmethod
|
||||
def create_pending_export_job(
|
||||
user: User, table: Table, view: Optional[View], export_options: Dict[str, Any]
|
||||
user: Optional[User],
|
||||
table: Table,
|
||||
view: Optional[View],
|
||||
export_options: Dict[str, Any],
|
||||
):
|
||||
"""
|
||||
Creates a new pending export job configured with the providing options but does
|
||||
|
@ -92,12 +117,7 @@ class ExportHandler:
|
|||
exporter = table_exporter_registry.get(exporter_type)
|
||||
exporter.before_job_create(user, table, view, export_options)
|
||||
|
||||
CoreHandler().check_permissions(
|
||||
user,
|
||||
ExportTableOperationType.type,
|
||||
workspace=table.database.workspace,
|
||||
context=table,
|
||||
)
|
||||
ExportHandler._raise_if_no_export_permissions(user, table, view)
|
||||
|
||||
if view and view.table.id != table.id:
|
||||
raise ViewNotInTable()
|
||||
|
@ -105,6 +125,7 @@ class ExportHandler:
|
|||
_cancel_unfinished_jobs(user)
|
||||
|
||||
_raise_if_invalid_view_or_table_for_exporter(exporter_type, view)
|
||||
_raise_if_invalid_order_by_or_filters(table, view, export_options)
|
||||
|
||||
job = ExportJob.objects.create(
|
||||
user=user,
|
||||
|
@ -132,12 +153,8 @@ class ExportHandler:
|
|||
|
||||
# Ensure the user still has permissions when the export job runs.
|
||||
table = job.table
|
||||
CoreHandler().check_permissions(
|
||||
job.user,
|
||||
ExportTableOperationType.type,
|
||||
workspace=table.database.workspace,
|
||||
context=table,
|
||||
)
|
||||
view = job.view
|
||||
ExportHandler._raise_if_no_export_permissions(job.user, table, view)
|
||||
try:
|
||||
return _mark_job_as_finished(_open_file_and_run_export(job))
|
||||
except ExportJobCanceledException:
|
||||
|
@ -206,6 +223,51 @@ def _raise_if_invalid_view_or_table_for_exporter(
|
|||
raise ViewUnsupportedForExporterType()
|
||||
|
||||
|
||||
def _raise_if_invalid_order_by_or_filters(
|
||||
table: Table, view: Optional[View], export_options: dict
|
||||
):
|
||||
"""
|
||||
Validates that the filters and order_by specified in export_options only reference
|
||||
fields that exist in the table and are visible in the view (if provided).
|
||||
|
||||
This method attempts to apply the filters and ordering to a queryset to catch any
|
||||
invalid field references before starting the actual export job. It raises an
|
||||
exception if any validation fails.
|
||||
|
||||
:param table: The table where to check the IDs in.
|
||||
:param view: Optionally provide a view to check the visible fields off.
|
||||
:param export_options: The export options where to extract the filters and order_by
|
||||
from.
|
||||
"""
|
||||
|
||||
model = table.get_model()
|
||||
queryset = model.objects.all()
|
||||
|
||||
only_by_field_ids = None
|
||||
if view:
|
||||
view_type = view_type_registry.get_by_model(view.specific_class)
|
||||
visible_field_objects_in_view, model = view_type.get_visible_fields_and_model(
|
||||
view
|
||||
)
|
||||
only_by_field_ids = [f["field"].id for f in visible_field_objects_in_view]
|
||||
|
||||
# Validate the filter object before the job start, so that the validation error
|
||||
# can be shown to the user.
|
||||
filters_dict = export_options.get("filters", None)
|
||||
if filters_dict is not None:
|
||||
filters = AdHocFilters.from_dict(filters_dict)
|
||||
filters.only_filter_by_field_ids = only_by_field_ids
|
||||
filters.apply_to_queryset(model, queryset)
|
||||
|
||||
# Validate the sort object before the job start, so that the validation error
|
||||
# can be shown to the user.
|
||||
order_by = export_options.get("order_by", None)
|
||||
if order_by is not None:
|
||||
queryset.order_by_fields_string(
|
||||
order_by, only_order_by_field_ids=only_by_field_ids
|
||||
)
|
||||
|
||||
|
||||
def _cancel_unfinished_jobs(user):
|
||||
"""
|
||||
Will cancel any in progress jobs by setting their state to cancelled. Any
|
||||
|
@ -216,8 +278,11 @@ def _cancel_unfinished_jobs(user):
|
|||
:return The number of jobs cancelled.
|
||||
"""
|
||||
|
||||
jobs = ExportJob.unfinished_jobs(user=user)
|
||||
return jobs.update(state=EXPORT_JOB_CANCELLED_STATUS)
|
||||
if user is None:
|
||||
return 0
|
||||
else:
|
||||
jobs = ExportJob.unfinished_jobs(user=user)
|
||||
return jobs.update(state=EXPORT_JOB_CANCELLED_STATUS)
|
||||
|
||||
|
||||
def _mark_job_as_finished(export_job: ExportJob) -> ExportJob:
|
||||
|
@ -287,12 +352,30 @@ def _open_file_and_run_export(job: ExportJob) -> ExportJob:
|
|||
# TODO: refactor to use the jobs systems
|
||||
_register_action(job)
|
||||
|
||||
filters = job.export_options.pop("filters", None)
|
||||
order_by = job.export_options.pop("order_by", None)
|
||||
visible_fields_in_order = job.export_options.pop("fields", None)
|
||||
only_by_field_ids = None
|
||||
|
||||
with _create_storage_dir_if_missing_and_open(storage_location) as file:
|
||||
queryset_serializer_class = exporter.queryset_serializer_class
|
||||
if job.view is None:
|
||||
serializer = queryset_serializer_class.for_table(job.table)
|
||||
else:
|
||||
serializer = queryset_serializer_class.for_view(job.view)
|
||||
serializer, visible_fields_in_view = queryset_serializer_class.for_view(
|
||||
job.view, visible_fields_in_order
|
||||
)
|
||||
only_by_field_ids = [f["field"].id for f in visible_fields_in_view]
|
||||
|
||||
if filters is not None:
|
||||
serializer.add_ad_hoc_filters_dict_to_queryset(
|
||||
filters, only_by_field_ids=only_by_field_ids
|
||||
)
|
||||
|
||||
if order_by is not None:
|
||||
serializer.add_add_hoc_order_by_to_queryset(
|
||||
order_by, only_by_field_ids=only_by_field_ids
|
||||
)
|
||||
|
||||
serializer.write_to_file(
|
||||
PaginatedExportJobFileWriter(file, job), **job.export_options
|
||||
|
|
17
backend/src/baserow/contrib/database/export/utils.py
Normal file
17
backend/src/baserow/contrib/database/export/utils.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
from typing import Optional
|
||||
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
|
||||
from baserow.contrib.database.views.models import View
|
||||
|
||||
|
||||
def view_is_publicly_exportable(user: Optional[AbstractUser], view: View):
|
||||
"""
|
||||
Checks if a view can be publicly exported for the given user.
|
||||
|
||||
:param user: The (optional) user on whose behalf the check must be completed.
|
||||
:param view: The view to check.
|
||||
:return: Indicates whether the view is publicly exportable
|
||||
"""
|
||||
|
||||
return user is None and view and view.allow_public_export and view.public
|
|
@ -0,0 +1,21 @@
|
|||
# Generated by Django 5.0.9 on 2024-12-10 20:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("database", "0178_remove_singleselect_missing_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="view",
|
||||
name="allow_public_export",
|
||||
field=models.BooleanField(
|
||||
db_default=False,
|
||||
default=False,
|
||||
help_text="Indicates whether it's allowed to export a publicly shared view.",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -967,6 +967,7 @@ class ViewHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
"filters_disabled",
|
||||
"public_view_password",
|
||||
"show_logo",
|
||||
"allow_public_export",
|
||||
] + view_type.allowed_fields
|
||||
|
||||
changed_allowed_keys = set(extract_allowed(view_values, allowed_fields).keys())
|
||||
|
|
|
@ -112,6 +112,11 @@ class View(
|
|||
default=True,
|
||||
help_text="Indicates whether the logo should be shown in the public view.",
|
||||
)
|
||||
allow_public_export = models.BooleanField(
|
||||
default=False,
|
||||
db_default=False,
|
||||
help_text="Indicates whether it's allowed to export a publicly shared view.",
|
||||
)
|
||||
owned_by = models.ForeignKey(
|
||||
User,
|
||||
null=True,
|
||||
|
|
|
@ -249,7 +249,7 @@ class ActionType(
|
|||
action_timestamp = timestamp if timestamp else datetime.now(tz=timezone.utc)
|
||||
|
||||
add_baserow_trace_attrs(
|
||||
action_user_id=user.id,
|
||||
action_user_id=getattr(user, "id", None),
|
||||
workspace_id=getattr(workspace, "id", None),
|
||||
action_scope=scope,
|
||||
action_type=cls.type,
|
||||
|
|
|
@ -30,5 +30,5 @@ def log_action_receiver(
|
|||
action_command_type=action_command_type.name.lower(),
|
||||
workspace_id=workspace.id if workspace else "",
|
||||
action_type=action_type.type,
|
||||
user_id=user.id,
|
||||
user_id=getattr(user, "id", None),
|
||||
)
|
||||
|
|
|
@ -61,7 +61,7 @@ def capture_user_event(
|
|||
:param workspace: Optionally the workspace related to the event.
|
||||
"""
|
||||
|
||||
if user.is_anonymous:
|
||||
if user is None or user.is_anonymous:
|
||||
# The user_id cannot be None. It's needed by Posthog to identify the user
|
||||
user_id = str(uuid4())
|
||||
user_email = None
|
||||
|
|
|
@ -840,6 +840,7 @@ def test_get_public_gallery_view(api_client, data_fixture):
|
|||
"type": "gallery",
|
||||
"card_cover_image_field": None,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -3310,6 +3310,7 @@ def test_get_public_grid_view(api_client, data_fixture):
|
|||
"row_identifier_type": grid_view.row_identifier_type,
|
||||
"row_height_size": grid_view.row_height_size,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -971,6 +971,7 @@ def test_user_with_password_can_get_info_about_a_public_password_protected_view(
|
|||
"row_identifier_type": grid_view.row_identifier_type,
|
||||
"row_height_size": grid_view.row_height_size,
|
||||
"show_logo": grid_view.show_logo,
|
||||
"allow_public_export": grid_view.allow_public_export,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -1000,6 +1001,7 @@ def test_user_with_password_can_get_info_about_a_public_password_protected_view(
|
|||
"row_identifier_type": grid_view.row_identifier_type,
|
||||
"row_height_size": grid_view.row_height_size,
|
||||
"show_logo": grid_view.show_logo,
|
||||
"allow_public_export": grid_view.allow_public_export,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -1137,6 +1139,29 @@ def test_view_cant_update_show_logo(data_fixture, api_client):
|
|||
assert response_data["show_logo"] is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_view_cant_update_allow_public_export(data_fixture, api_client):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
view = data_fixture.create_grid_view(
|
||||
user=user, table=table, allow_public_export=False
|
||||
)
|
||||
data = {"allow_public_export": True}
|
||||
|
||||
response = api_client.patch(
|
||||
reverse("api:database:views:item", kwargs={"view_id": view.id}),
|
||||
data,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
view.refresh_from_db()
|
||||
assert view.allow_public_export is False
|
||||
|
||||
response_data = response.json()
|
||||
assert response_data["allow_public_export"] is False
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_loading_a_sortable_view_will_create_an_index(
|
||||
api_client, data_fixture, enable_singleton_testing
|
||||
|
|
|
@ -14,6 +14,10 @@ from baserow.contrib.database.rows.handler import RowHandler
|
|||
# @pytest.mark.disabled_in_ci # Disable this test in CI in next release.
|
||||
@pytest.mark.django_db
|
||||
@override_settings(BASEROW_DISABLE_MODEL_CACHE=True)
|
||||
@pytest.mark.skip(
|
||||
"Fails because it uses the latest version of the models instead of the ones at the "
|
||||
"time of the migration"
|
||||
)
|
||||
def test_migration_rows_with_deleted_singleselect_options(
|
||||
data_fixture, migrator, teardown_table_metadata
|
||||
):
|
||||
|
|
|
@ -39,6 +39,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
|
|||
from baserow.contrib.database.rows.handler import RowHandler
|
||||
from baserow.contrib.database.views.exceptions import ViewNotInTable
|
||||
from baserow.contrib.database.views.models import GridView, GridViewFieldOptions
|
||||
from baserow.core.exceptions import PermissionDenied
|
||||
from baserow.test_utils.helpers import setup_interesting_test_table
|
||||
|
||||
|
||||
|
@ -179,6 +180,65 @@ def test_exporting_table_ignores_view_filters_sorts_hides(
|
|||
assert contents == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.core.storage.get_default_storage")
|
||||
def test_exporting_public_view_without_user_fails_if_not_publicly_shared_and_allowed(
|
||||
get_storage_mock, data_fixture
|
||||
):
|
||||
storage_mock = MagicMock()
|
||||
get_storage_mock.return_value = storage_mock
|
||||
table = data_fixture.create_database_table()
|
||||
text_field = data_fixture.create_text_field(table=table, name="text_field", order=1)
|
||||
grid_view = data_fixture.create_grid_view(
|
||||
table=table, public=False, allow_public_export=False
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
|
||||
with pytest.raises(PermissionDenied):
|
||||
run_export_job_with_mock_storage(table, grid_view, storage_mock, None)
|
||||
|
||||
grid_view.public = True
|
||||
grid_view.allow_public_export = False
|
||||
grid_view.save()
|
||||
|
||||
with pytest.raises(PermissionDenied):
|
||||
run_export_job_with_mock_storage(table, grid_view, storage_mock, None)
|
||||
|
||||
grid_view.public = False
|
||||
grid_view.allow_public_export = True
|
||||
grid_view.save()
|
||||
|
||||
with pytest.raises(PermissionDenied):
|
||||
run_export_job_with_mock_storage(table, grid_view, storage_mock, None)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.core.storage.get_default_storage")
|
||||
def test_exporting_public_view_without_user(get_storage_mock, data_fixture):
|
||||
storage_mock = MagicMock()
|
||||
get_storage_mock.return_value = storage_mock
|
||||
table = data_fixture.create_database_table()
|
||||
text_field = data_fixture.create_text_field(table=table, name="text_field", order=1)
|
||||
grid_view = data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
_, contents = run_export_job_with_mock_storage(table, grid_view, storage_mock, None)
|
||||
bom = "\ufeff"
|
||||
expected = bom + "id,text_field\r\n" "1,hello\r\n"
|
||||
assert contents == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("baserow.core.storage.get_default_storage")
|
||||
def test_columns_are_exported_by_order_then_field_id(get_storage_mock, data_fixture):
|
||||
|
|
|
@ -42,6 +42,7 @@ def test_view_created_event_type(data_fixture):
|
|||
"filters_disabled": False,
|
||||
"public_view_has_password": False,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
"ownership_type": "collaborative",
|
||||
"owned_by_id": None,
|
||||
"row_identifier_type": "id",
|
||||
|
@ -84,6 +85,7 @@ def test_view_created_event_type_test_payload(data_fixture):
|
|||
"filters_disabled": False,
|
||||
"public_view_has_password": False,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
"ownership_type": "collaborative",
|
||||
"owned_by_id": None,
|
||||
"row_identifier_type": "id",
|
||||
|
@ -135,6 +137,7 @@ def test_view_updated_event_type(data_fixture):
|
|||
"filters_disabled": False,
|
||||
"public_view_has_password": False,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
"ownership_type": "collaborative",
|
||||
"owned_by_id": None,
|
||||
"row_identifier_type": "id",
|
||||
|
@ -177,6 +180,7 @@ def test_view_updated_event_type_test_payload(data_fixture):
|
|||
"filters_disabled": False,
|
||||
"public_view_has_password": False,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
"ownership_type": "collaborative",
|
||||
"owned_by_id": None,
|
||||
"row_identifier_type": "id",
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "feature",
|
||||
"message": "Allow optionally exporting in publicly shared views.",
|
||||
"issue_number": 1213,
|
||||
"bullet_points": [],
|
||||
"created_at": "2025-02-03"
|
||||
}
|
|
@ -51,7 +51,7 @@ class AuditLogHandler:
|
|||
ip_address = get_user_remote_addr_ip(user)
|
||||
|
||||
return AuditLogEntry.objects.create(
|
||||
user_id=user.id,
|
||||
user_id=getattr(user, "id", None),
|
||||
user_email=getattr(user, "email", None),
|
||||
workspace_id=workspace_id,
|
||||
workspace_name=workspace_name,
|
||||
|
|
|
@ -3,3 +3,4 @@ from rest_framework import serializers
|
|||
|
||||
class UpdatePremiumViewAttributesSerializer(serializers.Serializer):
|
||||
show_logo = serializers.BooleanField(required=False)
|
||||
allow_public_export = serializers.BooleanField(required=False)
|
||||
|
|
7
premium/backend/src/baserow_premium/api/views/signers.py
Normal file
7
premium/backend/src/baserow_premium/api/views/signers.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
from django.conf import settings
|
||||
|
||||
from itsdangerous import URLSafeTimedSerializer
|
||||
|
||||
export_public_view_signer = URLSafeTimedSerializer(
|
||||
settings.SECRET_KEY, "export-public-view"
|
||||
)
|
|
@ -1,6 +1,10 @@
|
|||
from django.urls import re_path
|
||||
|
||||
from baserow_premium.api.views.views import PremiumViewAttributesView
|
||||
from baserow_premium.api.views.views import (
|
||||
ExportPublicViewJobView,
|
||||
ExportPublicViewView,
|
||||
PremiumViewAttributesView,
|
||||
)
|
||||
|
||||
app_name = "baserow_premium.api.views"
|
||||
|
||||
|
@ -10,4 +14,14 @@ urlpatterns = [
|
|||
PremiumViewAttributesView.as_view(),
|
||||
name="premium_view_attributes",
|
||||
),
|
||||
re_path(
|
||||
r"(?P<slug>[-\w]+)/export-public-view/$",
|
||||
ExportPublicViewView.as_view(),
|
||||
name="export_public_view",
|
||||
),
|
||||
re_path(
|
||||
r"get-public-view-export/(?P<job_id>[-\w.]+)/$",
|
||||
ExportPublicViewJobView.as_view(),
|
||||
name="get_public_view_export",
|
||||
),
|
||||
]
|
||||
|
|
|
@ -1,25 +1,60 @@
|
|||
from typing import Dict
|
||||
from urllib.request import Request
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from baserow_premium.api.views.errors import (
|
||||
ERROR_CANNOT_UPDATE_PREMIUM_ATTRIBUTES_ON_TEMPLATE,
|
||||
)
|
||||
from baserow_premium.api.views.exceptions import CannotUpdatePremiumAttributesOnTemplate
|
||||
from baserow_premium.api.views.serializers import UpdatePremiumViewAttributesSerializer
|
||||
from baserow_premium.api.views.signers import export_public_view_signer
|
||||
from baserow_premium.license.features import PREMIUM
|
||||
from baserow_premium.license.handler import LicenseHandler
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from itsdangerous.exc import BadData
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from baserow.api.decorators import map_exceptions, validate_body
|
||||
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.contrib.database.api.views.errors import ERROR_VIEW_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.export.errors import ERROR_EXPORT_JOB_DOES_NOT_EXIST
|
||||
from baserow.contrib.database.api.export.serializers import ExportJobSerializer
|
||||
from baserow.contrib.database.api.export.views import (
|
||||
CreateExportJobSerializer,
|
||||
_validate_options,
|
||||
)
|
||||
from baserow.contrib.database.api.fields.errors import (
|
||||
ERROR_FILTER_FIELD_NOT_FOUND,
|
||||
ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
|
||||
)
|
||||
from baserow.contrib.database.api.views.errors import (
|
||||
ERROR_NO_AUTHORIZATION_TO_PUBLICLY_SHARED_VIEW,
|
||||
ERROR_VIEW_DOES_NOT_EXIST,
|
||||
ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
|
||||
)
|
||||
from baserow.contrib.database.api.views.serializers import ViewSerializer
|
||||
from baserow.contrib.database.api.views.utils import get_public_view_authorization_token
|
||||
from baserow.contrib.database.export.exceptions import ExportJobDoesNotExistException
|
||||
from baserow.contrib.database.export.handler import ExportHandler
|
||||
from baserow.contrib.database.export.models import ExportJob
|
||||
from baserow.contrib.database.fields.exceptions import (
|
||||
FilterFieldNotFound,
|
||||
OrderByFieldNotFound,
|
||||
OrderByFieldNotPossible,
|
||||
)
|
||||
from baserow.contrib.database.views.actions import UpdateViewActionType
|
||||
from baserow.contrib.database.views.exceptions import ViewDoesNotExist
|
||||
from baserow.contrib.database.views.exceptions import (
|
||||
NoAuthorizationToPubliclySharedView,
|
||||
ViewDoesNotExist,
|
||||
ViewFilterTypeDoesNotExist,
|
||||
ViewFilterTypeNotAllowedForField,
|
||||
)
|
||||
from baserow.contrib.database.views.handler import ViewHandler
|
||||
from baserow.contrib.database.views.registries import view_type_registry
|
||||
from baserow.core.action.registries import action_type_registry
|
||||
|
@ -98,3 +133,125 @@ class PremiumViewAttributesView(APIView):
|
|||
view, ViewSerializer, context={"user": request.user}
|
||||
)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class ExportPublicViewView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="slug",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.STR,
|
||||
description="Select the view you want to export.",
|
||||
),
|
||||
],
|
||||
tags=["Database table view export"],
|
||||
operation_id="export_publicly_shared_view",
|
||||
description=(
|
||||
"Creates and starts a new export job for a publicly shared view given "
|
||||
"some exporter options. Returns an error if the view doesn't support "
|
||||
"exporting."
|
||||
"\n\nThis is a **premium** feature."
|
||||
),
|
||||
request=CreateExportJobSerializer,
|
||||
responses={
|
||||
200: ExportJobSerializer,
|
||||
400: get_error_schema(
|
||||
[
|
||||
"ERROR_REQUEST_BODY_VALIDATION",
|
||||
"ERROR_TABLE_ONLY_EXPORT_UNSUPPORTED",
|
||||
"ERROR_VIEW_UNSUPPORTED_FOR_EXPORT_TYPE",
|
||||
"ERROR_VIEW_NOT_IN_TABLE",
|
||||
"ERROR_FILTER_FIELD_NOT_FOUND",
|
||||
"ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST",
|
||||
"ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD",
|
||||
"ERROR_ORDER_BY_FIELD_NOT_FOUND",
|
||||
"ERROR_ORDER_BY_FIELD_NOT_POSSIBLE",
|
||||
]
|
||||
),
|
||||
404: get_error_schema(["ERROR_VIEW_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@transaction.atomic
|
||||
@map_exceptions(
|
||||
{
|
||||
UserNotInWorkspace: ERROR_USER_NOT_IN_GROUP,
|
||||
ViewDoesNotExist: ERROR_VIEW_DOES_NOT_EXIST,
|
||||
NoAuthorizationToPubliclySharedView: ERROR_NO_AUTHORIZATION_TO_PUBLICLY_SHARED_VIEW,
|
||||
FilterFieldNotFound: ERROR_FILTER_FIELD_NOT_FOUND,
|
||||
ViewFilterTypeDoesNotExist: ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST,
|
||||
ViewFilterTypeNotAllowedForField: ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD,
|
||||
OrderByFieldNotFound: ERROR_ORDER_BY_FIELD_NOT_FOUND,
|
||||
OrderByFieldNotPossible: ERROR_ORDER_BY_FIELD_NOT_POSSIBLE,
|
||||
}
|
||||
)
|
||||
def post(self, request, slug):
|
||||
"""
|
||||
Starts a new export job for the provided table, view, export type and options.
|
||||
"""
|
||||
|
||||
view_handler = ViewHandler()
|
||||
authorization_token = get_public_view_authorization_token(request)
|
||||
view = view_handler.get_public_view_by_slug(
|
||||
request.user, slug, authorization_token=authorization_token
|
||||
).specific
|
||||
table = view.table
|
||||
option_data = _validate_options(request.data)
|
||||
|
||||
# Delete the provided view ID because it can be identified using the slug
|
||||
# path parameter.
|
||||
del option_data["view_id"]
|
||||
|
||||
job = ExportHandler.create_and_start_new_job(None, table, view, option_data)
|
||||
serialized_job = ExportJobSerializer(job).data
|
||||
serialized_job["id"] = export_public_view_signer.dumps(serialized_job["id"])
|
||||
return Response(serialized_job)
|
||||
|
||||
|
||||
class ExportPublicViewJobView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="job_id",
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.STR,
|
||||
description="The signed job id to lookup information about.",
|
||||
)
|
||||
],
|
||||
tags=["Database table view export"],
|
||||
operation_id="get_public_view_export_job",
|
||||
description=(
|
||||
"Returns information such as export progress and state or the url of the "
|
||||
"exported file for the specified export job, only if the requesting user "
|
||||
"has access."
|
||||
"\n\nThis is a **premium** feature."
|
||||
),
|
||||
request=None,
|
||||
responses={
|
||||
200: ExportJobSerializer,
|
||||
404: get_error_schema(["ERROR_EXPORT_JOB_DOES_NOT_EXIST"]),
|
||||
},
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
ExportJobDoesNotExistException: ERROR_EXPORT_JOB_DOES_NOT_EXIST,
|
||||
BadData: ERROR_EXPORT_JOB_DOES_NOT_EXIST,
|
||||
}
|
||||
)
|
||||
def get(self, request, job_id):
|
||||
"""Retrieves the specified export job by serialized id."""
|
||||
|
||||
job_id = export_public_view_signer.loads(job_id)
|
||||
|
||||
try:
|
||||
job = ExportJob.objects.get(id=job_id, user=None)
|
||||
except ExportJob.DoesNotExist:
|
||||
raise ExportJobDoesNotExistException()
|
||||
|
||||
serialized_job = ExportJobSerializer(job).data
|
||||
serialized_job["id"] = export_public_view_signer.dumps(serialized_job["id"])
|
||||
return Response(serialized_job)
|
||||
|
|
|
@ -10,6 +10,7 @@ from baserow.contrib.database.api.export.serializers import (
|
|||
)
|
||||
from baserow.contrib.database.export.file_writer import FileWriter, QuerysetSerializer
|
||||
from baserow.contrib.database.export.registries import TableExporter
|
||||
from baserow.contrib.database.export.utils import view_is_publicly_exportable
|
||||
from baserow.contrib.database.views.view_types import GridViewType
|
||||
|
||||
from ..license.features import PREMIUM
|
||||
|
@ -23,9 +24,15 @@ class PremiumTableExporter(TableExporter):
|
|||
Checks if the related user access to a valid license before the job is created.
|
||||
"""
|
||||
|
||||
LicenseHandler.raise_if_user_doesnt_have_feature(
|
||||
PREMIUM, user, table.database.workspace
|
||||
)
|
||||
if view_is_publicly_exportable(user, view):
|
||||
# No need to check if the workspace has the license if the view is
|
||||
# publicly exportable because then we should always allow it, regardless
|
||||
# of the license.
|
||||
pass
|
||||
else:
|
||||
LicenseHandler.raise_if_user_doesnt_have_feature(
|
||||
PREMIUM, user, table.database.workspace
|
||||
)
|
||||
super().before_job_create(user, table, view, export_options)
|
||||
|
||||
|
||||
|
|
|
@ -1003,6 +1003,7 @@ def test_get_public_calendar_view_with_single_select_and_cover(
|
|||
"type": "calendar",
|
||||
"date_field": date_field.id,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
"ical_public": False,
|
||||
"ical_feed_url": calendar_view.ical_feed_url,
|
||||
},
|
||||
|
|
|
@ -1587,6 +1587,7 @@ def test_get_public_kanban_without_with_single_select_and_cover(
|
|||
"card_cover_image_field": None,
|
||||
"single_select_field": None,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -1695,6 +1696,7 @@ def test_get_public_kanban_view_with_single_select_and_cover(
|
|||
"card_cover_image_field": cover_field.id,
|
||||
"single_select_field": single_select_field.id,
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,41 @@ def test_premium_view_attributes_view(view_type, api_client, premium_data_fixtur
|
|||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.json()["show_logo"] is False
|
||||
response_json = response.json()
|
||||
assert response_json["show_logo"] is False
|
||||
assert response_json["allow_public_export"] is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
@pytest.mark.parametrize("view_type", view_type_registry.registry.keys())
|
||||
def test_premium_view_attributes_view_allow_public_export(
|
||||
view_type, api_client, premium_data_fixture
|
||||
):
|
||||
user, token = premium_data_fixture.create_user_and_token(
|
||||
email="test@test.nl",
|
||||
password="password",
|
||||
first_name="Test1",
|
||||
has_active_premium_license=True,
|
||||
)
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
view = ViewHandler().create_view(
|
||||
user=user, table=table, type_name=view_type, name=view_type
|
||||
)
|
||||
|
||||
response = api_client.patch(
|
||||
reverse(
|
||||
"api:premium:view:premium_view_attributes", kwargs={"view_id": view.id}
|
||||
),
|
||||
data={"allow_public_export": True},
|
||||
format="json",
|
||||
**{"HTTP_AUTHORIZATION": f"JWT {token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
response_json = response.json()
|
||||
assert response_json["show_logo"] is True
|
||||
assert response_json["allow_public_export"] is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
|
@ -0,0 +1,837 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.shortcuts import reverse
|
||||
from django.test.utils import override_settings
|
||||
|
||||
import pytest
|
||||
from baserow_premium.api.views.signers import export_public_view_signer
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK,
|
||||
HTTP_400_BAD_REQUEST,
|
||||
HTTP_401_UNAUTHORIZED,
|
||||
HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
from baserow.contrib.database.export.models import ExportJob
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_of_not_existing_view(
|
||||
api_client, premium_data_fixture
|
||||
):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view", kwargs={"slug": "does_not_exist"}
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response_json["error"] == "ERROR_VIEW_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_of_not_public_view(api_client, premium_data_fixture):
|
||||
grid = premium_data_fixture.create_grid_view(public=False)
|
||||
|
||||
response = api_client.post(
|
||||
reverse("api:premium:view:export_public_view", kwargs={"slug": grid.slug}),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response_json["error"] == "ERROR_VIEW_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_respecting_view_password(
|
||||
api_client, premium_data_fixture
|
||||
):
|
||||
(
|
||||
grid,
|
||||
public_view_token,
|
||||
) = premium_data_fixture.create_public_password_protected_grid_view_with_token(
|
||||
password="12345678",
|
||||
allow_public_export=True,
|
||||
)
|
||||
|
||||
response = api_client.post(
|
||||
reverse("api:premium:view:export_public_view", kwargs={"slug": grid.slug}),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_401_UNAUTHORIZED
|
||||
|
||||
response = api_client.post(
|
||||
reverse("api:premium:view:export_public_view", kwargs={"slug": grid.slug}),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
HTTP_BASEROW_VIEW_AUTHORIZATION=f"JWT {public_view_token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
grid = premium_data_fixture.create_grid_view(public=True, allow_public_export=True)
|
||||
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view", kwargs={"slug": grid.slug}
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
job = ExportJob.objects.all().first()
|
||||
del response_json["created_at"]
|
||||
|
||||
job_id = response_json.pop("id")
|
||||
assert export_public_view_signer.loads(job_id) == job.id
|
||||
|
||||
assert response_json == {
|
||||
"table": grid.table_id,
|
||||
"view": grid.id,
|
||||
"exporter_type": "csv",
|
||||
"state": "pending",
|
||||
"status": "pending",
|
||||
"exported_file_name": None,
|
||||
"progress_percentage": 0.0,
|
||||
"url": None,
|
||||
}
|
||||
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export", kwargs={"job_id": job_id}
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
|
||||
job_id = response_json.pop("id")
|
||||
del response_json["created_at"]
|
||||
assert export_public_view_signer.loads(job_id) == job.id
|
||||
filename = response_json["exported_file_name"]
|
||||
assert response_json == {
|
||||
"table": grid.table_id,
|
||||
"view": grid.id,
|
||||
"exporter_type": "csv",
|
||||
"state": "finished",
|
||||
"status": "finished",
|
||||
"exported_file_name": filename,
|
||||
"progress_percentage": 100.0,
|
||||
"url": f"http://localhost:8000/media/export_files/{filename}",
|
||||
}
|
||||
|
||||
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
|
||||
assert file_path.isfile()
|
||||
expected = "\ufeff" "id\n"
|
||||
with open(file_path, "r", encoding="utf-8") as written_file:
|
||||
assert written_file.read() == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_respecting_view_visible_fields(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
hidden_text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=2
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "Something",
|
||||
f"field_{hidden_text_field.id}": "Should be hidden",
|
||||
},
|
||||
)
|
||||
premium_data_fixture.create_grid_view_field_option(
|
||||
grid_view=grid_view, field=hidden_text_field, hidden=True
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
job_id = response.json().pop("id")
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export", kwargs={"job_id": job_id}
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
filename = response_json["exported_file_name"]
|
||||
|
||||
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
|
||||
assert file_path.isfile()
|
||||
expected = "\ufeff" "id,text_field\n1,Something\n"
|
||||
with open(file_path, "r", encoding="utf-8") as written_file:
|
||||
assert written_file.read() == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_respecting_view_filters(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
premium_data_fixture.create_view_filter(
|
||||
view=grid_view, field=text_field, type="contains", value="world"
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "world",
|
||||
},
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
job_id = response.json().pop("id")
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export", kwargs={"job_id": job_id}
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
filename = response_json["exported_file_name"]
|
||||
|
||||
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
|
||||
assert file_path.isfile()
|
||||
expected = "\ufeff" "id,text_field\n2,world\n"
|
||||
with open(file_path, "r", encoding="utf-8") as written_file:
|
||||
assert written_file.read() == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_respecting_ad_hoc_filters(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "world",
|
||||
},
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"filters": {
|
||||
"filter_type": "AND",
|
||||
"filters": [
|
||||
{
|
||||
"type": "contains",
|
||||
"field": text_field.id,
|
||||
"value": "world",
|
||||
}
|
||||
],
|
||||
"groups": [],
|
||||
},
|
||||
"order_by": "",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
job_id = response.json().pop("id")
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export", kwargs={"job_id": job_id}
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
filename = response_json["exported_file_name"]
|
||||
|
||||
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
|
||||
assert file_path.isfile()
|
||||
expected = "\ufeff" "id,text_field\n2,world\n"
|
||||
with open(file_path, "r", encoding="utf-8") as written_file:
|
||||
assert written_file.read() == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_malformed_ad_hoc_filters(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "world",
|
||||
},
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"filters": {"test": ""},
|
||||
"order_by": "",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_respecting_ad_hoc_order_by(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "world",
|
||||
},
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"filters": None,
|
||||
"order_by": f"-field_{text_field.id}",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
print(response.json())
|
||||
|
||||
job_id = response.json().pop("id")
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export", kwargs={"job_id": job_id}
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
filename = response_json["exported_file_name"]
|
||||
|
||||
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
|
||||
assert file_path.isfile()
|
||||
expected = "\ufeff" "id,text_field\n2,world\n1,hello\n"
|
||||
with open(file_path, "r", encoding="utf-8") as written_file:
|
||||
assert written_file.read() == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_malformed_order_by(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{text_field.id}": "world",
|
||||
},
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"filters": None,
|
||||
"order_by": f"TEST",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_ORDER_BY_FIELD_NOT_FOUND"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_respecting_include_visible_fields_in_order(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
text_field_2 = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field2", order=2
|
||||
)
|
||||
text_field_3 = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field3", order=2
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"fields": [text_field_2.id, text_field.id],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
job_id = response.json().pop("id")
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export", kwargs={"job_id": job_id}
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
filename = response_json["exported_file_name"]
|
||||
|
||||
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
|
||||
assert file_path.isfile()
|
||||
expected = "\ufeff" "id,text_field2,text_field\n"
|
||||
with open(file_path, "r", encoding="utf-8") as written_file:
|
||||
assert written_file.read() == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_respecting_include_visible_fields_in_order_wrong_field_id(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
text_field_2 = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field2", order=2
|
||||
)
|
||||
text_field_3 = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field3", order=2
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"fields": [9999999, text_field_2.id, text_field.id],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
job_id = response.json().pop("id")
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export", kwargs={"job_id": job_id}
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
filename = response_json["exported_file_name"]
|
||||
|
||||
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
|
||||
assert file_path.isfile()
|
||||
expected = "\ufeff" "id,text_field2,text_field\n"
|
||||
with open(file_path, "r", encoding="utf-8") as written_file:
|
||||
assert written_file.read() == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_error_hidden_fields_in_order_by(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
hidden_text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=2
|
||||
)
|
||||
premium_data_fixture.create_grid_view_field_option(
|
||||
grid_view=grid_view, field=hidden_text_field, hidden=True
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"order_by": f"field_{hidden_text_field.id}",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_ORDER_BY_FIELD_NOT_FOUND"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_can_sort_by_manually_hidden_view(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
hidden_text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=2
|
||||
)
|
||||
premium_data_fixture.create_grid_view_field_option(
|
||||
grid_view=grid_view, field=text_field, hidden=False
|
||||
)
|
||||
premium_data_fixture.create_grid_view_field_option(
|
||||
grid_view=grid_view, field=hidden_text_field, hidden=False
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"order_by": f"field_{hidden_text_field.id}",
|
||||
"fields": [text_field.id],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_create_public_view_export_error_hidden_fields_in_filters(
|
||||
api_client, premium_data_fixture, django_capture_on_commit_callbacks, tmpdir
|
||||
):
|
||||
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
|
||||
|
||||
user = premium_data_fixture.create_user()
|
||||
table = premium_data_fixture.create_database_table(user=user)
|
||||
text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=1
|
||||
)
|
||||
grid_view = premium_data_fixture.create_grid_view(
|
||||
table=table, public=True, allow_public_export=True
|
||||
)
|
||||
hidden_text_field = premium_data_fixture.create_text_field(
|
||||
table=table, name="text_field", order=2
|
||||
)
|
||||
premium_data_fixture.create_grid_view_field_option(
|
||||
grid_view=grid_view, field=text_field, hidden=False
|
||||
)
|
||||
premium_data_fixture.create_grid_view_field_option(
|
||||
grid_view=grid_view, field=hidden_text_field, hidden=True
|
||||
)
|
||||
model = table.get_model()
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{hidden_text_field.id}": "hello",
|
||||
},
|
||||
)
|
||||
model.objects.create(
|
||||
**{
|
||||
f"field_{hidden_text_field.id}": "world",
|
||||
},
|
||||
)
|
||||
|
||||
with patch("baserow.core.storage.get_default_storage") as get_storage_mock:
|
||||
get_storage_mock.return_value = storage
|
||||
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:premium:view:export_public_view",
|
||||
kwargs={"slug": grid_view.slug},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
"filters": {
|
||||
"filter_type": "AND",
|
||||
"filters": [
|
||||
{
|
||||
"type": "contains",
|
||||
"field": hidden_text_field.id,
|
||||
"value": "world",
|
||||
}
|
||||
],
|
||||
"groups": [],
|
||||
},
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert response.json()["error"] == "ERROR_FILTER_FIELD_NOT_FOUND"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_get_public_view_export_job_not_found(api_client, premium_data_fixture):
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:premium:view:get_public_view_export",
|
||||
kwargs={"job_id": export_public_view_signer.dumps(0)},
|
||||
),
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response_json["error"] == "ERROR_EXPORT_JOB_DOES_NOT_EXIST"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_get_public_view_export_invalid_signed_id(api_client, premium_data_fixture):
|
||||
response = api_client.get(
|
||||
reverse("api:premium:view:get_public_view_export", kwargs={"job_id": "test"}),
|
||||
)
|
||||
response_json = response.json()
|
||||
assert response.status_code == HTTP_404_NOT_FOUND
|
||||
assert response_json["error"] == "ERROR_EXPORT_JOB_DOES_NOT_EXIST"
|
|
@ -1171,6 +1171,7 @@ def test_get_public_timeline_view(api_client, premium_data_fixture):
|
|||
"id": PUBLIC_PLACEHOLDER_ENTITY_ID,
|
||||
},
|
||||
"show_logo": True,
|
||||
"allow_public_export": False,
|
||||
"type": "timeline",
|
||||
"start_date_field": start_date_field.id,
|
||||
"end_date_field": end_date_field.id,
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
<template>
|
||||
<div
|
||||
v-tooltip="tooltipText"
|
||||
class="view-sharing__option"
|
||||
:class="{ 'view-sharing__option--disabled': !hasPremiumFeatures }"
|
||||
@click="click"
|
||||
>
|
||||
<SwitchInput
|
||||
small
|
||||
:value="!view.show_logo"
|
||||
:disabled="!hasPremiumFeatures"
|
||||
@input="update"
|
||||
>
|
||||
<img src="@baserow/modules/core/static/img/baserow-icon.svg" />
|
||||
<span>
|
||||
{{ $t('shareLinkOptions.baserowLogo.label') }}
|
||||
</span>
|
||||
<i v-if="!hasPremiumFeatures" class="deactivated-label iconoir-lock" />
|
||||
</SwitchInput>
|
||||
|
||||
<PremiumModal
|
||||
v-if="!hasPremiumFeatures"
|
||||
ref="premiumModal"
|
||||
:workspace="workspace"
|
||||
:name="$t('shareLinkOptions.baserowLogo.premiumModalName')"
|
||||
></PremiumModal>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapGetters } from 'vuex'
|
||||
import ViewPremiumService from '@baserow_premium/services/view'
|
||||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
import PremiumModal from '@baserow_premium/components/PremiumModal'
|
||||
import PremiumFeatures from '@baserow_premium/features'
|
||||
|
||||
export default {
|
||||
name: 'BaserowLogoShareLinkOption',
|
||||
components: { PremiumModal },
|
||||
|
||||
props: {
|
||||
view: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
...mapGetters({
|
||||
additionalUserData: 'auth/getAdditionalUserData',
|
||||
}),
|
||||
workspace() {
|
||||
return this.$store.getters['application/get'](this.view.table.database_id)
|
||||
.workspace
|
||||
},
|
||||
hasPremiumFeatures() {
|
||||
return this.$hasFeature(PremiumFeatures.PREMIUM, this.workspace.id)
|
||||
},
|
||||
tooltipText() {
|
||||
if (this.hasPremiumFeatures) {
|
||||
return null
|
||||
} else {
|
||||
return this.$t('premium.deactivated')
|
||||
}
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
async update(value) {
|
||||
const showLogo = !value
|
||||
try {
|
||||
// We are being optimistic that the request will succeed.
|
||||
this.$emit('update-view', { ...this.view, show_logo: showLogo })
|
||||
await ViewPremiumService(this.$client).update(this.view.id, {
|
||||
show_logo: showLogo,
|
||||
})
|
||||
} catch (error) {
|
||||
// In case it didn't we will roll back the change.
|
||||
this.$emit('update-view', { ...this.view, show_logo: !showLogo })
|
||||
notifyIf(error, 'view')
|
||||
}
|
||||
},
|
||||
click() {
|
||||
if (!this.hasPremiumFeatures) {
|
||||
this.$refs.premiumModal.show()
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -0,0 +1,113 @@
|
|||
<template>
|
||||
<div>
|
||||
<div
|
||||
v-tooltip="tooltipText"
|
||||
class="view-sharing__option"
|
||||
:class="{ 'view-sharing__option--disabled': !hasPremiumFeatures }"
|
||||
@click="click"
|
||||
>
|
||||
<SwitchInput
|
||||
small
|
||||
:value="!view.show_logo"
|
||||
:disabled="!hasPremiumFeatures"
|
||||
@input="update('show_logo', !$event)"
|
||||
>
|
||||
<img src="@baserow/modules/core/static/img/baserow-icon.svg" />
|
||||
<span>
|
||||
{{ $t('shareLinkOptions.baserowLogo.label') }}
|
||||
</span>
|
||||
<i v-if="!hasPremiumFeatures" class="deactivated-label iconoir-lock" />
|
||||
</SwitchInput>
|
||||
|
||||
<PremiumModal
|
||||
v-if="!hasPremiumFeatures"
|
||||
ref="premiumModal"
|
||||
:workspace="workspace"
|
||||
:name="$t('shareLinkOptions.baserowLogo.premiumModalName')"
|
||||
></PremiumModal>
|
||||
</div>
|
||||
<div
|
||||
v-if="hasValidExporter"
|
||||
v-tooltip="tooltipText"
|
||||
class="view-sharing__option"
|
||||
:class="{ 'view-sharing__option--disabled': !hasPremiumFeatures }"
|
||||
@click="click"
|
||||
>
|
||||
<SwitchInput
|
||||
small
|
||||
:value="view.allow_public_export"
|
||||
:disabled="!hasPremiumFeatures"
|
||||
@input="update('allow_public_export', $event)"
|
||||
>
|
||||
<i class="iconoir iconoir-share-ios"></i>
|
||||
<span>
|
||||
{{ $t('shareLinkOptions.allowPublicExportLabel') }}
|
||||
</span>
|
||||
<i v-if="!hasPremiumFeatures" class="deactivated-label iconoir-lock" />
|
||||
</SwitchInput>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapGetters } from 'vuex'
|
||||
import ViewPremiumService from '@baserow_premium/services/view'
|
||||
import { notifyIf } from '@baserow/modules/core/utils/error'
|
||||
import PremiumModal from '@baserow_premium/components/PremiumModal'
|
||||
import PremiumFeatures from '@baserow_premium/features'
|
||||
import viewTypeHasExporterTypes from '@baserow/modules/database/utils/viewTypeHasExporterTypes'
|
||||
|
||||
export default {
|
||||
name: 'PremiumViewOptions',
|
||||
components: { PremiumModal },
|
||||
|
||||
props: {
|
||||
view: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
...mapGetters({
|
||||
additionalUserData: 'auth/getAdditionalUserData',
|
||||
}),
|
||||
workspace() {
|
||||
return this.$store.getters['application/get'](this.view.table.database_id)
|
||||
.workspace
|
||||
},
|
||||
hasPremiumFeatures() {
|
||||
return this.$hasFeature(PremiumFeatures.PREMIUM, this.workspace.id)
|
||||
},
|
||||
tooltipText() {
|
||||
if (this.hasPremiumFeatures) {
|
||||
return null
|
||||
} else {
|
||||
return this.$t('premium.deactivated')
|
||||
}
|
||||
},
|
||||
hasValidExporter() {
|
||||
return viewTypeHasExporterTypes(this.view.type, this.$registry)
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
async update(key, value) {
|
||||
try {
|
||||
// We are being optimistic that the request will succeed.
|
||||
this.$emit('update-view', { ...this.view, [key]: value })
|
||||
await ViewPremiumService(this.$client).update(this.view.id, {
|
||||
[key]: value,
|
||||
})
|
||||
} catch (error) {
|
||||
// In case it didn't we will roll back the change.
|
||||
this.$emit('update-view', { ...this.view, [key]: !value })
|
||||
notifyIf(error, 'view')
|
||||
}
|
||||
},
|
||||
click() {
|
||||
if (!this.hasPremiumFeatures) {
|
||||
this.$refs.premiumModal.show()
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -0,0 +1,125 @@
|
|||
<template>
|
||||
<li
|
||||
v-if="view.allow_public_export"
|
||||
class="header__filter-item header__filter-item--no-margin-left"
|
||||
>
|
||||
<a
|
||||
ref="target"
|
||||
class="header__filter-link"
|
||||
@click="$refs.context.toggle($event.target, 'bottom', 'left', 4)"
|
||||
>
|
||||
<i class="header__filter-icon baserow-icon-more-vertical"></i>
|
||||
</a>
|
||||
<Context ref="context">
|
||||
<ul class="context__menu">
|
||||
<li class="context__menu-item">
|
||||
<a
|
||||
class="context__menu-item-link"
|
||||
@click=";[$refs.exportModal.show(), $refs.context.hide()]"
|
||||
>
|
||||
<i class="context__menu-item-icon iconoir-share-ios"></i>
|
||||
{{ $t('publicViewExport.export') }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</Context>
|
||||
<ExportTableModal
|
||||
ref="exportModal"
|
||||
:view="view"
|
||||
:table="table"
|
||||
:database="database"
|
||||
:start-export="startExport"
|
||||
:get-job="getJob"
|
||||
:enable-views-dropdown="false"
|
||||
:ad-hoc-filtering="true"
|
||||
:ad-hoc-sorting="true"
|
||||
:ad-hoc-fields="visibleOrderedFields"
|
||||
></ExportTableModal>
|
||||
</li>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import ExportTableModal from '@baserow/modules/database/components/export/ExportTableModal'
|
||||
import PublicViewExportService from '@baserow_premium/services/publicViewExport'
|
||||
import {
|
||||
createFiltersTree,
|
||||
getOrderBy,
|
||||
} from '@baserow/modules/database/utils/view'
|
||||
|
||||
export default {
|
||||
name: 'PublicViewExport',
|
||||
components: { ExportTableModal },
|
||||
props: {
|
||||
database: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
table: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
view: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
fields: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
isPublicView: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
storePrefix: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
visibleOrderedFields() {
|
||||
const viewType = this.$registry.get('view', this.view.type)
|
||||
return viewType.getVisibleFieldsInOrder(
|
||||
this,
|
||||
this.fields,
|
||||
this.view,
|
||||
this.storePrefix
|
||||
)
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
startExport({ view, values, client }) {
|
||||
// There is no need to include the `view_id` in the body because we're already
|
||||
// providing the slug as path parameter.
|
||||
delete values.view_id
|
||||
|
||||
let filters = null
|
||||
const filterTree = createFiltersTree(
|
||||
this.view.filter_type,
|
||||
this.view.filters,
|
||||
this.view.filter_groups
|
||||
)
|
||||
filters = filterTree.getFiltersTreeSerialized()
|
||||
values.filters = filters
|
||||
|
||||
const orderBy = getOrderBy(this.view, true)
|
||||
values.order_by = orderBy
|
||||
|
||||
values.fields =
|
||||
this.visibleOrderedFields === null
|
||||
? null
|
||||
: this.visibleOrderedFields.map((f) => f.id)
|
||||
|
||||
const publicAuthToken =
|
||||
this.$store.getters['page/view/public/getAuthToken']
|
||||
return PublicViewExportService(client).export({
|
||||
slug: view.slug,
|
||||
values,
|
||||
publicAuthToken,
|
||||
})
|
||||
},
|
||||
getJob(job, client) {
|
||||
return PublicViewExportService(client).get(job.id)
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -276,7 +276,8 @@
|
|||
"baserowLogo": {
|
||||
"label": "Hide Baserow logo on shared view",
|
||||
"premiumModalName": "public logo removal"
|
||||
}
|
||||
},
|
||||
"allowPublicExportLabel": "Allow export on shared view"
|
||||
},
|
||||
"viewsContext": {
|
||||
"personal": "Personal"
|
||||
|
@ -337,5 +338,8 @@
|
|||
"textDescription": "Generates free text based on the prompt.",
|
||||
"choice": "Choice",
|
||||
"choiceDescription": "Chooses only one of the field options."
|
||||
},
|
||||
"publicViewExport": {
|
||||
"export": "Export"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { BaserowPlugin } from '@baserow/modules/core/plugins'
|
||||
import Impersonate from '@baserow_premium/components/sidebar/Impersonate'
|
||||
import HighestLicenseTypeBadge from '@baserow_premium/components/sidebar/HighestLicenseTypeBadge'
|
||||
import BaserowLogoShareLinkOption from '@baserow_premium/components/views/BaserowLogoShareLinkOption'
|
||||
import PremiumViewOptions from '@baserow_premium/components/views/PremiumViewOptions'
|
||||
import PublicViewExport from '@baserow_premium/components/views/PublicViewExport'
|
||||
|
||||
export class PremiumPlugin extends BaserowPlugin {
|
||||
static getType() {
|
||||
|
@ -17,7 +18,11 @@ export class PremiumPlugin extends BaserowPlugin {
|
|||
}
|
||||
|
||||
getAdditionalShareLinkOptions() {
|
||||
return [BaserowLogoShareLinkOption]
|
||||
return [PremiumViewOptions]
|
||||
}
|
||||
|
||||
getAdditionalTableHeaderComponents(view, isPublic) {
|
||||
return isPublic ? [PublicViewExport] : []
|
||||
}
|
||||
|
||||
hasFeature(feature, forSpecificWorkspace) {
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
import addPublicAuthTokenHeader from '@baserow/modules/database/utils/publicView'
|
||||
|
||||
export default (client) => {
|
||||
return {
|
||||
export({ slug, values, publicAuthToken = null }) {
|
||||
const config = {}
|
||||
|
||||
if (publicAuthToken) {
|
||||
addPublicAuthTokenHeader(config, publicAuthToken)
|
||||
}
|
||||
|
||||
return client.post(
|
||||
`/database/view/${slug}/export-public-view/`,
|
||||
{
|
||||
...values,
|
||||
},
|
||||
config
|
||||
)
|
||||
},
|
||||
get(jobId) {
|
||||
return client.get(`/database/view/get-public-view-export/${jobId}/`)
|
||||
},
|
||||
}
|
||||
}
|
|
@ -16,7 +16,12 @@ class PremiumTableExporterType extends TableExporterType {
|
|||
}
|
||||
|
||||
isDeactivated(workspaceId) {
|
||||
return !this.app.$hasFeature(PremiumFeatures.PREMIUM, workspaceId)
|
||||
// If the user is looking a publicly shared view, then the feature must never be
|
||||
// deactivated because the check can't be done properly.
|
||||
const isPublic = this.app.store.getters['page/view/public/getIsPublic']
|
||||
return (
|
||||
!this.app.$hasFeature(PremiumFeatures.PREMIUM, workspaceId) && !isPublic
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -114,6 +114,14 @@ export class BaserowPlugin extends Registerable {
|
|||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Every registered plugin can display multiple components to the head of the table
|
||||
* header. This will be positioned directly next to the name of the view.
|
||||
*/
|
||||
getAdditionalTableHeaderComponents(view, isPublic) {
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Every registered plugin can display multiple additional context items in the
|
||||
* application context displayed by the sidebar when opening the context menu of a
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
<div class="row">
|
||||
<div class="col col-12">
|
||||
<FormGroup
|
||||
v-if="enableViewsDropdown"
|
||||
small-label
|
||||
:label="$t('exportTableForm.viewLabel')"
|
||||
required
|
||||
|
@ -75,6 +76,11 @@ export default {
|
|||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
enableViewsDropdown: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
:view="view"
|
||||
:views="views"
|
||||
:loading="loading"
|
||||
:enable-views-dropdown="enableViewsDropdown"
|
||||
@submitted="submitted"
|
||||
@values-changed="valuesChanged"
|
||||
>
|
||||
|
@ -56,6 +57,25 @@ export default {
|
|||
required: false,
|
||||
default: null,
|
||||
},
|
||||
startExport: {
|
||||
type: Function,
|
||||
required: false,
|
||||
default: function ({ table, values, client }) {
|
||||
return ExporterService(client).export(table.id, values)
|
||||
},
|
||||
},
|
||||
getJob: {
|
||||
type: Function,
|
||||
required: false,
|
||||
default: function (job, client) {
|
||||
return ExporterService(client).get(job.id)
|
||||
},
|
||||
},
|
||||
enableViewsDropdown: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
@ -121,10 +141,12 @@ export default {
|
|||
this.hideError()
|
||||
|
||||
try {
|
||||
const { data } = await ExporterService(this.$client).export(
|
||||
this.table.id,
|
||||
values
|
||||
)
|
||||
const { data } = await this.startExport({
|
||||
table: this.table,
|
||||
view: this.view,
|
||||
values,
|
||||
client: this.$client,
|
||||
})
|
||||
this.job = data
|
||||
if (this.pollInterval !== null) {
|
||||
clearInterval(this.pollInterval)
|
||||
|
@ -136,7 +158,7 @@ export default {
|
|||
},
|
||||
async getLatestJobInfo() {
|
||||
try {
|
||||
const { data } = await ExporterService(this.$client).get(this.job.id)
|
||||
const { data } = await this.getJob(this.job, this.$client)
|
||||
this.job = data
|
||||
if (!this.jobIsRunning) {
|
||||
this.loading = false
|
||||
|
|
|
@ -33,7 +33,10 @@
|
|||
<span class="header__filter-name header__filter-name--forced">
|
||||
<EditableViewName ref="rename" :view="view"></EditableViewName>
|
||||
</span>
|
||||
<i class="header__sub-icon iconoir-nav-arrow-down"></i>
|
||||
<i
|
||||
v-if="views !== null"
|
||||
class="header__sub-icon iconoir-nav-arrow-down"
|
||||
></i>
|
||||
</template>
|
||||
<template v-else-if="view !== null">
|
||||
{{ $t('table.chooseView') }}
|
||||
|
@ -77,6 +80,21 @@
|
|||
>
|
||||
</ViewContext>
|
||||
</li>
|
||||
<component
|
||||
:is="component"
|
||||
v-for="(component, index) in getAdditionalTableHeaderComponents(
|
||||
view,
|
||||
isPublic
|
||||
)"
|
||||
:key="index"
|
||||
:database="database"
|
||||
:table="table"
|
||||
:view="view"
|
||||
:fields="fields"
|
||||
:is-public-view="isPublic"
|
||||
:store-prefix="storePrefix"
|
||||
>
|
||||
</component>
|
||||
<li
|
||||
v-if="
|
||||
hasSelectedView &&
|
||||
|
@ -446,6 +464,17 @@ export default {
|
|||
const type = this.$registry.get('view', view.type)
|
||||
return type.getHeaderComponent()
|
||||
},
|
||||
getAdditionalTableHeaderComponents(view, isPublic) {
|
||||
const opts = Object.values(this.$registry.getAll('plugin'))
|
||||
.reduce((components, plugin) => {
|
||||
components = components.concat(
|
||||
plugin.getAdditionalTableHeaderComponents(view, isPublic)
|
||||
)
|
||||
return components
|
||||
}, [])
|
||||
.filter((component) => component !== null)
|
||||
return opts
|
||||
},
|
||||
/**
|
||||
* When the window resizes, we want to check if the content of the header is
|
||||
* overflowing. If that is the case, we want to make some space by removing some
|
||||
|
|
|
@ -8,9 +8,11 @@ import FormView from '@baserow/modules/database/components/view/form/FormView'
|
|||
import FormViewHeader from '@baserow/modules/database/components/view/form/FormViewHeader'
|
||||
import { FileFieldType } from '@baserow/modules/database/fieldTypes'
|
||||
import {
|
||||
filterVisibleFieldsFunction,
|
||||
isAdhocFiltering,
|
||||
isAdhocSorting,
|
||||
newFieldMatchesActiveSearchTerm,
|
||||
sortFieldsByOrderAndIdFunction,
|
||||
} from '@baserow/modules/database/utils/view'
|
||||
import { clone } from '@baserow/modules/core/utils/object'
|
||||
import { getDefaultSearchModeFromEnv } from '@baserow/modules/database/utils/search'
|
||||
|
@ -483,6 +485,16 @@ export class ViewType extends Registerable {
|
|||
isCompatibleWithDataSync(dataSync) {
|
||||
return true
|
||||
}
|
||||
|
||||
getVisibleFieldsInOrder({ $store: store }, fields, view, storePrefix = '') {
|
||||
const fieldOptions =
|
||||
store.getters[
|
||||
storePrefix + 'view/' + this.getType() + '/getAllFieldOptions'
|
||||
]
|
||||
return fields
|
||||
.filter(filterVisibleFieldsFunction(fieldOptions))
|
||||
.sort(sortFieldsByOrderAndIdFunction(fieldOptions, true))
|
||||
}
|
||||
}
|
||||
|
||||
export class GridViewType extends ViewType {
|
||||
|
|
|
@ -93,16 +93,14 @@ exports[`Public View Page Tests Can see a publicly shared grid view 1`] = `
|
|||
</span>
|
||||
</span>
|
||||
|
||||
<i
|
||||
class="header__sub-icon iconoir-nav-arrow-down"
|
||||
/>
|
||||
<!---->
|
||||
</a>
|
||||
|
||||
<!---->
|
||||
</li>
|
||||
|
||||
<!---->
|
||||
|
||||
|
||||
<li
|
||||
class="header__filter-item"
|
||||
>
|
||||
|
|
Loading…
Add table
Reference in a new issue