mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-17 18:32:35 +00:00
Merge branch '1473-make-datefield-timezone-aware' into 'develop'
Resolve "Make DateField timezone-aware" Closes #1473 See merge request bramw/baserow!1264
This commit is contained in:
commit
f258ba8ca6
74 changed files with 2691 additions and 2287 deletions
backend
src/baserow
api
contrib/database
airtable
api
apps.pyfields
file_import
formula/types
migrations
0102_add_timezone_attrs_for_datetimes.py0103_fix_datetimes_timezones.py0104_remove_timezone_field.py
views
core
test_utils
tests/baserow/contrib/database
airtable
api
airtable
fields
rows
export
field
test_created_on_field_type.pytest_field_actions.pytest_field_types.pytest_last_modified_field_type.py
view
changelog/entries/unreleased
feature
refactor
premium
backend/tests/baserow_premium_tests/export
web-frontend/modules/baserow_premium
web-frontend
modules
core
database
test/unit/database
|
@ -198,6 +198,8 @@ def validate_data_custom_fields(
|
|||
:type type_attribute_name: str
|
||||
:param partial: Whether the data is a partial update.
|
||||
:type partial: bool
|
||||
:param allow_empty_type: Whether the type can be empty.
|
||||
:type allow_empty_type: bool
|
||||
:raises RequestBodyValidationException: When the type is not a valid choice.
|
||||
:return: The validated data.
|
||||
:rtype: dict
|
||||
|
@ -286,6 +288,7 @@ def get_serializer_class(
|
|||
meta_ref_name=None,
|
||||
required_fields=None,
|
||||
base_mixins=None,
|
||||
meta_extra_kwargs=None,
|
||||
):
|
||||
"""
|
||||
Generates a model serializer based on the provided field names and field overrides.
|
||||
|
@ -307,6 +310,9 @@ def get_serializer_class(
|
|||
:type required_fields: list[str]
|
||||
:param mixins: An optional list of mixins that must be added to the serializer.
|
||||
:type base_mixins: list[serializers.Serializer]
|
||||
:param meta_extra_kwargs: An optional dict containing extra kwargs for the Meta
|
||||
class.
|
||||
:type meta_extra_kwargs: dict or None
|
||||
:return: The generated model serializer containing the provided fields.
|
||||
:rtype: ModelSerializer
|
||||
"""
|
||||
|
@ -326,15 +332,18 @@ def get_serializer_class(
|
|||
base_class = ModelSerializer
|
||||
|
||||
extends_meta = object
|
||||
meta_extra_kwargs = meta_extra_kwargs or {}
|
||||
|
||||
if hasattr(base_class, "Meta"):
|
||||
extends_meta = getattr(base_class, "Meta")
|
||||
field_names = list(extends_meta.fields) + list(field_names)
|
||||
meta_extra_kwargs.update(getattr(extends_meta, "extra_kwargs", {}))
|
||||
|
||||
class Meta(extends_meta):
|
||||
ref_name = meta_ref_name
|
||||
model = model_
|
||||
fields = list(field_names)
|
||||
extra_kwargs = meta_extra_kwargs
|
||||
|
||||
attrs = {"Meta": Meta}
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ from django.core.exceptions import ValidationError
|
|||
|
||||
from loguru import logger
|
||||
from pytz import UTC
|
||||
from pytz import timezone as pytz_timezone
|
||||
|
||||
from baserow.contrib.database.export_serialized import DatabaseExportSerializedStructure
|
||||
from baserow.contrib.database.fields.models import (
|
||||
|
@ -36,7 +35,7 @@ from .registry import AirtableColumnType
|
|||
class TextAirtableColumnType(AirtableColumnType):
|
||||
type = "text"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
validator_name = raw_airtable_column.get("typeOptions", {}).get("validatorName")
|
||||
if validator_name == "url":
|
||||
return URLField()
|
||||
|
@ -51,7 +50,6 @@ class TextAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
if isinstance(baserow_field, (EmailField, URLField)):
|
||||
|
@ -67,14 +65,14 @@ class TextAirtableColumnType(AirtableColumnType):
|
|||
class MultilineTextAirtableColumnType(AirtableColumnType):
|
||||
type = "multilineText"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
return LongTextField()
|
||||
|
||||
|
||||
class RichTextTextAirtableColumnType(AirtableColumnType):
|
||||
type = "richText"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
return LongTextField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
|
@ -83,7 +81,6 @@ class RichTextTextAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
# We don't support rich text formatting yet, so this converts the value to
|
||||
|
@ -94,7 +91,7 @@ class RichTextTextAirtableColumnType(AirtableColumnType):
|
|||
class NumberAirtableColumnType(AirtableColumnType):
|
||||
type = "number"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
decimal_places = 0
|
||||
|
||||
|
@ -115,7 +112,6 @@ class NumberAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
if value is not None:
|
||||
|
@ -130,14 +126,14 @@ class NumberAirtableColumnType(AirtableColumnType):
|
|||
class RatingAirtableColumnType(AirtableColumnType):
|
||||
type = "rating"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, values, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, values):
|
||||
return RatingField(max_value=values.get("typeOptions", {}).get("max", 5))
|
||||
|
||||
|
||||
class CheckboxAirtableColumnType(AirtableColumnType):
|
||||
type = "checkbox"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
return BooleanField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
|
@ -146,7 +142,6 @@ class CheckboxAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
return "true" if value else "false"
|
||||
|
@ -155,9 +150,22 @@ class CheckboxAirtableColumnType(AirtableColumnType):
|
|||
class DateAirtableColumnType(AirtableColumnType):
|
||||
type = "date"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
return DateField(**import_airtable_date_type_options(type_options))
|
||||
# Check if a timezone is provided in the type options, if so, we might want
|
||||
# to use that timezone for the conversion later on.
|
||||
airtable_timezone = type_options.get("timeZone", None)
|
||||
date_show_tzinfo = type_options.get("shouldDisplayTimeZone", False)
|
||||
|
||||
# date_force_timezone=None it the equivalent of airtable_timezone="client".
|
||||
if airtable_timezone == "client":
|
||||
airtable_timezone = None
|
||||
|
||||
return DateField(
|
||||
date_show_tzinfo=date_show_tzinfo,
|
||||
date_force_timezone=airtable_timezone,
|
||||
**import_airtable_date_type_options(type_options),
|
||||
)
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
self,
|
||||
|
@ -165,33 +173,14 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
# Check if a timezone is provided in the type options, if so, we might want
|
||||
# to use that timezone for the conversion later on.
|
||||
airtable_timezone = raw_airtable_column.get("typeOptions", {}).get(
|
||||
"timeZone", None
|
||||
)
|
||||
|
||||
# Baserow doesn't support a "client" option for the date field, so if that is
|
||||
# provided, we must fallback on the main timezone chosen during the import.
|
||||
# Otherwise, we can use the timezone of that value.
|
||||
if airtable_timezone is not None and airtable_timezone != "client":
|
||||
timezone = pytz_timezone(airtable_timezone)
|
||||
|
||||
# The provided Airtable date value is always in UTC format. Because Baserow
|
||||
# doesn't support different timezones for the date field, we need to convert
|
||||
# to the given timezone because then it will be visible in the correct
|
||||
# timezone to the user.
|
||||
try:
|
||||
value = (
|
||||
datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
.astimezone(timezone)
|
||||
.replace(tzinfo=UTC)
|
||||
value = datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ").replace(
|
||||
tzinfo=UTC
|
||||
)
|
||||
except ValueError:
|
||||
tb = traceback.format_exc()
|
||||
|
@ -216,27 +205,28 @@ class DateAirtableColumnType(AirtableColumnType):
|
|||
class FormulaAirtableColumnType(AirtableColumnType):
|
||||
type = "formula"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
display_type = type_options.get("displayType", "")
|
||||
airtable_timezone = type_options.get("timeZone", None)
|
||||
date_show_tzinfo = type_options.get("shouldDisplayTimeZone", False)
|
||||
|
||||
# Baserow doesn't support a "client" option for the date field, so if that is
|
||||
# provided, we must fallback on the main timezone chosen during the import.
|
||||
# Otherwise, we can use the timezone of that field.
|
||||
if airtable_timezone is not None and airtable_timezone != "client":
|
||||
timezone = pytz_timezone(airtable_timezone)
|
||||
# date_force_timezone=None it the equivalent of airtable_timezone="client".
|
||||
if airtable_timezone == "client":
|
||||
airtable_timezone = None
|
||||
|
||||
# The formula conversion isn't support yet, but because the Created on and
|
||||
# Last modified fields work as a formula, we can convert those.
|
||||
if display_type == "lastModifiedTime":
|
||||
return LastModifiedField(
|
||||
timezone=str(timezone),
|
||||
date_show_tzinfo=date_show_tzinfo,
|
||||
date_force_timezone=airtable_timezone,
|
||||
**import_airtable_date_type_options(type_options),
|
||||
)
|
||||
elif display_type == "createdTime":
|
||||
return CreatedOnField(
|
||||
timezone=str(timezone),
|
||||
date_show_tzinfo=date_show_tzinfo,
|
||||
date_force_timezone=airtable_timezone,
|
||||
**import_airtable_date_type_options(type_options),
|
||||
)
|
||||
|
||||
|
@ -246,7 +236,6 @@ class FormulaAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
if isinstance(baserow_field, CreatedOnField):
|
||||
|
@ -267,7 +256,7 @@ class FormulaAirtableColumnType(AirtableColumnType):
|
|||
class ForeignKeyAirtableColumnType(AirtableColumnType):
|
||||
type = "foreignKey"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
type_options = raw_airtable_column.get("typeOptions", {})
|
||||
foreign_table_id = type_options.get("foreignTableId")
|
||||
|
||||
|
@ -282,7 +271,6 @@ class ForeignKeyAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
foreign_table_id = raw_airtable_column["typeOptions"]["foreignTableId"]
|
||||
|
@ -292,7 +280,7 @@ class ForeignKeyAirtableColumnType(AirtableColumnType):
|
|||
class MultipleAttachmentAirtableColumnType(AirtableColumnType):
|
||||
type = "multipleAttachment"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
return FileField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
|
@ -301,7 +289,6 @@ class MultipleAttachmentAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
new_value = []
|
||||
|
@ -322,7 +309,7 @@ class MultipleAttachmentAirtableColumnType(AirtableColumnType):
|
|||
class SelectAirtableColumnType(AirtableColumnType):
|
||||
type = "select"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
field = SingleSelectField()
|
||||
field = set_select_options_on_field(
|
||||
field, raw_airtable_column.get("typeOptions", {})
|
||||
|
@ -333,7 +320,7 @@ class SelectAirtableColumnType(AirtableColumnType):
|
|||
class MultiSelectAirtableColumnType(AirtableColumnType):
|
||||
type = "multiSelect"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
field = MultipleSelectField()
|
||||
field = set_select_options_on_field(
|
||||
field, raw_airtable_column.get("typeOptions", {})
|
||||
|
@ -344,7 +331,7 @@ class MultiSelectAirtableColumnType(AirtableColumnType):
|
|||
class PhoneAirtableColumnType(AirtableColumnType):
|
||||
type = "phone"
|
||||
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column, timezone):
|
||||
def to_baserow_field(self, raw_airtable_table, raw_airtable_column):
|
||||
return PhoneNumberField()
|
||||
|
||||
def to_baserow_export_serialized_value(
|
||||
|
@ -353,7 +340,6 @@ class PhoneAirtableColumnType(AirtableColumnType):
|
|||
raw_airtable_column,
|
||||
baserow_field,
|
||||
value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
):
|
||||
try:
|
||||
|
|
|
@ -10,7 +10,7 @@ from django.contrib.auth import get_user_model
|
|||
from django.core.files.storage import Storage
|
||||
|
||||
import requests
|
||||
from pytz import UTC, BaseTzInfo
|
||||
from pytz import UTC
|
||||
from requests import Response
|
||||
|
||||
from baserow.contrib.database.airtable.constants import (
|
||||
|
@ -189,7 +189,6 @@ class AirtableHandler:
|
|||
def to_baserow_field(
|
||||
table: dict,
|
||||
column: dict,
|
||||
timezone: BaseTzInfo,
|
||||
) -> Union[Tuple[None, None, None], Tuple[Field, FieldType, AirtableColumnType]]:
|
||||
"""
|
||||
Converts the provided Airtable column dict to the right Baserow field object.
|
||||
|
@ -198,7 +197,6 @@ class AirtableHandler:
|
|||
field is the primary field.
|
||||
:param column: The Airtable column dict. These values will be converted to
|
||||
Baserow format.
|
||||
:param timezone: The main timezone used for date conversions if needed.
|
||||
:return: The converted Baserow field, field type and the Airtable column type.
|
||||
"""
|
||||
|
||||
|
@ -206,7 +204,7 @@ class AirtableHandler:
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
table, column, timezone
|
||||
table, column
|
||||
)
|
||||
|
||||
if baserow_field is None:
|
||||
|
@ -239,7 +237,6 @@ class AirtableHandler:
|
|||
column_mapping: Dict[str, dict],
|
||||
row: dict,
|
||||
index: int,
|
||||
timezone: BaseTzInfo,
|
||||
files_to_download: Dict[str, str],
|
||||
) -> dict:
|
||||
"""
|
||||
|
@ -253,7 +250,6 @@ class AirtableHandler:
|
|||
Baserow field dict.
|
||||
:param row: The Airtable row that must be converted a Baserow row.
|
||||
:param index: The index the row has in the table.
|
||||
:param timezone: The main timezone used for date conversions if needed.
|
||||
:param files_to_download: A dict that contains all the user file URLs that must
|
||||
be downloaded. The key is the file name and the value the URL. Additional
|
||||
files can be added to this dict.
|
||||
|
@ -291,7 +287,6 @@ class AirtableHandler:
|
|||
mapping_values["raw_airtable_column"],
|
||||
mapping_values["baserow_field"],
|
||||
column_value,
|
||||
timezone,
|
||||
files_to_download,
|
||||
)
|
||||
exported_row[f"field_{column_id}"] = baserow_serialized_value
|
||||
|
@ -339,7 +334,6 @@ class AirtableHandler:
|
|||
init_data: dict,
|
||||
schema: dict,
|
||||
tables: list,
|
||||
timezone: BaseTzInfo,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
download_files_buffer: Union[None, IOBase] = None,
|
||||
) -> Tuple[dict, IOBase]:
|
||||
|
@ -354,7 +348,6 @@ class AirtableHandler:
|
|||
shared base.
|
||||
:param schema: An object containing the schema of the Airtable base.
|
||||
:param tables: a list containing the table data.
|
||||
:param timezone: The main timezone used for date conversions if needed.
|
||||
:param progress_builder: If provided will be used to build a child progress bar
|
||||
and report on this methods progress to the parent of the progress_builder.
|
||||
:param download_files_buffer: Optionally a file buffer can be provided to store
|
||||
|
@ -414,7 +407,7 @@ class AirtableHandler:
|
|||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(table, column, timezone)
|
||||
) = cls.to_baserow_field(table, column)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
||||
# None means that none of the field types know how to parse this field,
|
||||
|
@ -457,7 +450,7 @@ class AirtableHandler:
|
|||
baserow_field,
|
||||
baserow_field_type,
|
||||
airtable_column_type,
|
||||
) = cls.to_baserow_field(table, airtable_column, timezone)
|
||||
) = cls.to_baserow_field(table, airtable_column)
|
||||
baserow_field.primary = True
|
||||
field_mapping["primary_id"] = {
|
||||
"baserow_field": baserow_field,
|
||||
|
@ -481,12 +474,7 @@ class AirtableHandler:
|
|||
for row_index, row in enumerate(tables[table["id"]]["rows"]):
|
||||
exported_rows.append(
|
||||
cls.to_baserow_row_export(
|
||||
row_id_mapping,
|
||||
field_mapping,
|
||||
row,
|
||||
row_index,
|
||||
timezone,
|
||||
files_to_download,
|
||||
row_id_mapping, field_mapping, row, row_index, files_to_download
|
||||
)
|
||||
)
|
||||
converting_progress.increment(state=AIRTABLE_EXPORT_JOB_CONVERTING)
|
||||
|
@ -539,7 +527,6 @@ class AirtableHandler:
|
|||
cls,
|
||||
group: Group,
|
||||
share_id: str,
|
||||
timezone: BaseTzInfo = UTC,
|
||||
storage: Optional[Storage] = None,
|
||||
progress_builder: Optional[ChildProgressBuilder] = None,
|
||||
download_files_buffer: Union[None, IOBase] = None,
|
||||
|
@ -551,7 +538,6 @@ class AirtableHandler:
|
|||
|
||||
:param group: The group where the copy of the Airtable must be added to.
|
||||
:param share_id: The shared Airtable ID that must be imported.
|
||||
:param timezone: The main timezone used for date conversions if needed.
|
||||
:param storage: The storage where the user files must be saved to.
|
||||
:param progress_builder: If provided will be used to build a child progress bar
|
||||
and report on this methods progress to the parent of the progress_builder.
|
||||
|
@ -602,7 +588,6 @@ class AirtableHandler:
|
|||
init_data,
|
||||
schema,
|
||||
tables,
|
||||
timezone,
|
||||
progress.create_child_builder(represents_progress=300),
|
||||
download_files_buffer,
|
||||
)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from pytz import all_timezones
|
||||
from pytz import timezone as pytz_timezone
|
||||
from requests.exceptions import RequestException
|
||||
from rest_framework import serializers
|
||||
|
||||
|
@ -46,7 +44,6 @@ class AirtableImportJobType(JobType):
|
|||
request_serializer_field_names = [
|
||||
"group_id",
|
||||
"database_id",
|
||||
"timezone",
|
||||
"airtable_share_url",
|
||||
]
|
||||
|
||||
|
@ -59,20 +56,12 @@ class AirtableImportJobType(JobType):
|
|||
help_text="The publicly shared URL of the Airtable base (e.g. "
|
||||
"https://airtable.com/shrxxxxxxxxxxxxxx)",
|
||||
),
|
||||
"timezone": serializers.ChoiceField(
|
||||
required=False,
|
||||
choices=all_timezones,
|
||||
help_text="Optionally a timezone can be provided that must be respected "
|
||||
"during import. This is for example setting the correct value of the date "
|
||||
"fields.",
|
||||
),
|
||||
}
|
||||
|
||||
serializer_field_names = [
|
||||
"group_id",
|
||||
"database",
|
||||
"airtable_share_id",
|
||||
"timezone",
|
||||
]
|
||||
|
||||
serializer_field_overrides = {
|
||||
|
@ -83,11 +72,6 @@ class AirtableImportJobType(JobType):
|
|||
max_length=18,
|
||||
help_text="Public ID of the shared Airtable base that must be imported.",
|
||||
),
|
||||
"timezone": serializers.CharField(
|
||||
help_text="Optionally a timezone can be provided that must be respected "
|
||||
"during import. This is for example setting the correct value of the date "
|
||||
"fields.",
|
||||
),
|
||||
"database": ApplicationSerializer(),
|
||||
}
|
||||
|
||||
|
@ -99,24 +83,14 @@ class AirtableImportJobType(JobType):
|
|||
)
|
||||
|
||||
airtable_share_id = extract_share_id_from_url(values["airtable_share_url"])
|
||||
timezone = values.get("timezone")
|
||||
|
||||
if timezone is not None:
|
||||
timezone = pytz_timezone(timezone)
|
||||
|
||||
return {
|
||||
"airtable_share_id": airtable_share_id,
|
||||
"timezone": timezone,
|
||||
"group": group,
|
||||
}
|
||||
|
||||
def run(self, job, progress):
|
||||
|
||||
kwargs = {}
|
||||
|
||||
if job.timezone is not None:
|
||||
kwargs["timezone"] = pytz_timezone(job.timezone)
|
||||
|
||||
database = action_type_registry.get(
|
||||
ImportDatabaseFromAirtableActionType.type
|
||||
).do(
|
||||
|
@ -126,7 +100,6 @@ class AirtableImportJobType(JobType):
|
|||
progress_builder=progress.create_child_builder(
|
||||
represents_progress=progress.total
|
||||
),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
application_created.send(self, application=database, user=None)
|
|
@ -15,7 +15,6 @@ class AirtableImportJob(JobWithUserIpAddress, Job):
|
|||
max_length=18,
|
||||
help_text="Public ID of the shared Airtable base that must be imported.",
|
||||
)
|
||||
timezone = models.CharField(null=True, max_length=255)
|
||||
database = models.ForeignKey(
|
||||
Application,
|
||||
null=True,
|
||||
|
|
|
@ -31,7 +31,6 @@ class AirtableColumnType(Instance):
|
|||
raw_airtable_column: dict,
|
||||
baserow_field: Field,
|
||||
value: Any,
|
||||
timezone: BaseTzInfo,
|
||||
files_to_download: Dict[str, str],
|
||||
):
|
||||
"""
|
||||
|
@ -45,7 +44,6 @@ class AirtableColumnType(Instance):
|
|||
:param raw_airtable_column: A dict containing the raw Airtable column values.
|
||||
:param baserow_field: The Baserow field that the column has been converted to.
|
||||
:param value: The raw Airtable value that must be converted.
|
||||
:param timezone: The main timezone used for date conversions if needed.
|
||||
:param files_to_download: A dict that contains all the user file URLs that must
|
||||
be downloaded. The key is the file name and the value the URL. Additional
|
||||
files can be added to this dict.
|
||||
|
@ -59,7 +57,7 @@ class AirtableColumnTypeRegistry(Registry):
|
|||
name = "airtable_column"
|
||||
|
||||
def from_airtable_column_to_serialized(
|
||||
self, raw_airtable_table: dict, raw_airtable_column: dict, timezone: BaseTzInfo
|
||||
self, raw_airtable_table: dict, raw_airtable_column: dict
|
||||
) -> Union[Tuple[Field, AirtableColumnType], Tuple[None, None]]:
|
||||
"""
|
||||
Tries to find a Baserow field that matches that raw Airtable column data. If
|
||||
|
@ -67,7 +65,6 @@ class AirtableColumnTypeRegistry(Registry):
|
|||
|
||||
:param raw_airtable_table: The raw Airtable table data related to the column.
|
||||
:param raw_airtable_column: The raw Airtable column data that must be
|
||||
:param timezone: The main timezone used for date conversions if needed.
|
||||
:return: The related Baserow field and AirtableColumnType that should be used
|
||||
for the conversion.
|
||||
"""
|
||||
|
@ -76,7 +73,7 @@ class AirtableColumnTypeRegistry(Registry):
|
|||
type_name = raw_airtable_column.get("type", "")
|
||||
airtable_column_type = self.get(type_name)
|
||||
baserow_field = airtable_column_type.to_baserow_field(
|
||||
raw_airtable_table, raw_airtable_column, timezone
|
||||
raw_airtable_table, raw_airtable_column
|
||||
)
|
||||
|
||||
if baserow_field is None:
|
||||
|
|
|
@ -116,3 +116,10 @@ ERROR_FAILED_TO_LOCK_FIELD_DUE_TO_CONFLICT = (
|
|||
"The requested field is already being updated or used by another operation, "
|
||||
"please try again after other concurrent operations have finished.",
|
||||
)
|
||||
ERROR_DATE_FORCE_TIMEZONE_OFFSET_ERROR = (
|
||||
"ERROR_DATE_FORCE_TIMEZONE_OFFSET_ERROR",
|
||||
HTTP_400_BAD_REQUEST,
|
||||
"The field date should already exists and date_include_time "
|
||||
"must be set to True on the field to convert values with "
|
||||
"the utc_offset provided in date_force_timezone_offset.",
|
||||
)
|
||||
|
|
|
@ -31,7 +31,7 @@ from baserow.contrib.database.fields.exceptions import (
|
|||
MaxFieldNameLengthExceeded,
|
||||
ReservedBaserowFieldNameException,
|
||||
)
|
||||
from baserow.contrib.database.file_import.job_type import FileImportJobType
|
||||
from baserow.contrib.database.file_import.job_types import FileImportJobType
|
||||
from baserow.contrib.database.handler import DatabaseHandler
|
||||
from baserow.contrib.database.operations import (
|
||||
CreateTableDatabaseTableOperationType,
|
||||
|
|
|
@ -472,9 +472,9 @@ class DatabaseConfig(AppConfig):
|
|||
|
||||
from baserow.core.jobs.registries import job_type_registry
|
||||
|
||||
from .airtable.job_type import AirtableImportJobType
|
||||
from .airtable.job_types import AirtableImportJobType
|
||||
from .fields.job_types import DuplicateFieldJobType
|
||||
from .file_import.job_type import FileImportJobType
|
||||
from .file_import.job_types import FileImportJobType
|
||||
from .table.job_types import DuplicateTableJobType
|
||||
|
||||
job_type_registry.register(AirtableImportJobType())
|
||||
|
|
|
@ -89,6 +89,9 @@ class UpdateFieldActionType(UndoableActionCustomCleanupMixin, UndoableActionType
|
|||
original_exported_values = cls._get_prepared_field_attrs(
|
||||
field, kwargs, to_field_type_name
|
||||
)
|
||||
original_exported_values.update(
|
||||
from_field_type.get_request_kwargs_to_backup(field, kwargs)
|
||||
)
|
||||
|
||||
optional_backup_data = cls._backup_field_if_required(
|
||||
field, kwargs, to_field_type_name, backup_uuid
|
||||
|
@ -272,12 +275,17 @@ class UpdateFieldActionType(UndoableActionCustomCleanupMixin, UndoableActionType
|
|||
handler = FieldHandler()
|
||||
field = handler.get_specific_field_for_update(params.field_id)
|
||||
|
||||
from_field_type = field_type_registry.get_by_model(field)
|
||||
from_field_type_name = from_field_type.type
|
||||
|
||||
updated_field_attrs = set(new_field_attributes.keys())
|
||||
request_kwargs = from_field_type.get_request_kwargs_to_backup(
|
||||
field, new_field_attributes
|
||||
)
|
||||
original_field_params = cls._get_prepared_field_attrs(
|
||||
field, updated_field_attrs, to_field_type_name
|
||||
)
|
||||
from_field_type = field_type_registry.get_by_model(field)
|
||||
from_field_type_name = from_field_type.type
|
||||
original_field_params.update(request_kwargs)
|
||||
|
||||
backup_uid = params.backup_uid or action.id
|
||||
optional_backup_data = cls._backup_field_if_required(
|
||||
|
|
|
@ -215,3 +215,9 @@ class FailedToLockFieldDueToConflict(LockConflict):
|
|||
Raised when a user tried to update a field which was locked by another
|
||||
concurrent operation
|
||||
"""
|
||||
|
||||
|
||||
class DateForceTimezoneOffsetValueError(ValueError):
|
||||
"""
|
||||
Raised when the force_timezone_offset value offset cannot be set.
|
||||
"""
|
||||
|
|
|
@ -47,33 +47,54 @@ def construct_all_possible_field_kwargs(
|
|||
"date": [
|
||||
{"name": "datetime_us", "date_include_time": True, "date_format": "US"},
|
||||
{"name": "date_us", "date_include_time": False, "date_format": "US"},
|
||||
{"name": "datetime_eu", "date_include_time": True, "date_format": "EU"},
|
||||
{
|
||||
"name": "datetime_eu",
|
||||
"date_include_time": True,
|
||||
"date_format": "EU",
|
||||
},
|
||||
{"name": "date_eu", "date_include_time": False, "date_format": "EU"},
|
||||
{
|
||||
"name": "datetime_eu_tzone_visible",
|
||||
"date_include_time": True,
|
||||
"date_format": "EU",
|
||||
"date_force_timezone": "Europe/Amsterdam",
|
||||
"date_show_tzinfo": True,
|
||||
},
|
||||
{
|
||||
"name": "datetime_eu_tzone_hidden",
|
||||
"date_include_time": True,
|
||||
"date_format": "EU",
|
||||
"date_force_timezone": "Europe/Amsterdam",
|
||||
"date_show_tzinfo": False,
|
||||
},
|
||||
],
|
||||
"last_modified": [
|
||||
{
|
||||
"name": "last_modified_datetime_us",
|
||||
"date_include_time": True,
|
||||
"date_format": "US",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "last_modified_date_us",
|
||||
"date_include_time": False,
|
||||
"date_format": "US",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "last_modified_datetime_eu",
|
||||
"date_include_time": True,
|
||||
"date_format": "EU",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "last_modified_date_eu",
|
||||
"date_include_time": False,
|
||||
"date_format": "EU",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "last_modified_datetime_eu_tzone",
|
||||
"date_include_time": True,
|
||||
"date_format": "EU",
|
||||
"date_force_timezone": "Europe/Amsterdam",
|
||||
"date_show_tzinfo": True,
|
||||
},
|
||||
],
|
||||
"created_on": [
|
||||
|
@ -81,25 +102,28 @@ def construct_all_possible_field_kwargs(
|
|||
"name": "created_on_datetime_us",
|
||||
"date_include_time": True,
|
||||
"date_format": "US",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "created_on_date_us",
|
||||
"date_include_time": False,
|
||||
"date_format": "US",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "created_on_datetime_eu",
|
||||
"date_include_time": True,
|
||||
"date_format": "EU",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "created_on_date_eu",
|
||||
"date_include_time": False,
|
||||
"date_format": "EU",
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
{
|
||||
"name": "created_on_datetime_eu_tzone",
|
||||
"date_include_time": True,
|
||||
"date_format": "EU",
|
||||
"date_force_timezone": "Europe/Amsterdam",
|
||||
"date_show_tzinfo": True,
|
||||
},
|
||||
],
|
||||
"link_row": [
|
||||
|
|
|
@ -2,7 +2,7 @@ import re
|
|||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import date, datetime
|
||||
from datetime import date, datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from random import randint, randrange, sample
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple
|
||||
|
@ -19,13 +19,15 @@ from django.db.models import CharField, DateTimeField, F, Func, Q, Value
|
|||
from django.db.models.functions import Coalesce
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
import pytz
|
||||
from dateutil import parser
|
||||
from dateutil.parser import ParserError
|
||||
from loguru import logger
|
||||
from pytz import all_timezones, timezone
|
||||
from pytz import timezone
|
||||
from rest_framework import serializers
|
||||
|
||||
from baserow.contrib.database.api.fields.errors import (
|
||||
ERROR_DATE_FORCE_TIMEZONE_OFFSET_ERROR,
|
||||
ERROR_INCOMPATIBLE_PRIMARY_FIELD_TYPE,
|
||||
ERROR_INVALID_LOOKUP_TARGET_FIELD,
|
||||
ERROR_INVALID_LOOKUP_THROUGH_FIELD,
|
||||
|
@ -81,6 +83,7 @@ from .exceptions import (
|
|||
AllProvidedCollaboratorIdsMustBeValidUsers,
|
||||
AllProvidedMultipleSelectValuesMustBeSelectOption,
|
||||
AllProvidedValuesMustBeIntegersOrStrings,
|
||||
DateForceTimezoneOffsetValueError,
|
||||
FieldDoesNotExist,
|
||||
IncompatiblePrimaryFieldTypeError,
|
||||
InvalidLookupTargetField,
|
||||
|
@ -662,11 +665,105 @@ class BooleanFieldType(FieldType):
|
|||
return BooleanField()
|
||||
|
||||
|
||||
def valid_utc_offset_value_validator(value):
|
||||
if value != 0 and value % 30 != 0:
|
||||
raise serializers.ValidationError(
|
||||
"The UTC offset must be different from 0 and a multiple of 30 minutes."
|
||||
)
|
||||
|
||||
|
||||
class DateFieldType(FieldType):
|
||||
type = "date"
|
||||
model_class = DateField
|
||||
allowed_fields = ["date_format", "date_include_time", "date_time_format"]
|
||||
serializer_field_names = ["date_format", "date_include_time", "date_time_format"]
|
||||
allowed_fields = [
|
||||
"date_format",
|
||||
"date_include_time",
|
||||
"date_time_format",
|
||||
"date_show_tzinfo",
|
||||
"date_force_timezone",
|
||||
]
|
||||
serializer_field_names = [
|
||||
"date_format",
|
||||
"date_include_time",
|
||||
"date_time_format",
|
||||
"date_show_tzinfo",
|
||||
"date_force_timezone",
|
||||
]
|
||||
request_serializer_field_names = serializer_field_names + [
|
||||
"date_force_timezone_offset",
|
||||
]
|
||||
request_serializer_field_overrides = {
|
||||
"date_force_timezone_offset": serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text=(
|
||||
"A UTC offset in minutes to add to all the field datetimes values.",
|
||||
),
|
||||
)
|
||||
}
|
||||
serializer_extra_kwargs = {"date_force_timezone_offset": {"write_only": True}}
|
||||
api_exceptions_map = {
|
||||
DateForceTimezoneOffsetValueError: ERROR_DATE_FORCE_TIMEZONE_OFFSET_ERROR
|
||||
}
|
||||
|
||||
def get_request_kwargs_to_backup(self, field, kwargs) -> Dict[str, Any]:
|
||||
date_force_timezone_offset = kwargs.get("date_force_timezone_offset", None)
|
||||
if date_force_timezone_offset:
|
||||
return {"date_force_timezone_offset": -date_force_timezone_offset}
|
||||
return {}
|
||||
|
||||
def before_create(
|
||||
self, table, primary, allowed_field_values, order, user, field_kwargs
|
||||
):
|
||||
force_timezone_offset = field_kwargs.get("date_force_timezone_offset", None)
|
||||
if force_timezone_offset is not None:
|
||||
raise DateForceTimezoneOffsetValueError(
|
||||
"date_force_timezone_offset is not allowed when creating a date field."
|
||||
)
|
||||
|
||||
def before_update(self, from_field, to_field_values, user, field_kwargs):
|
||||
force_timezone_offset = field_kwargs.get("date_force_timezone_offset", None)
|
||||
if not isinstance(from_field, DateField):
|
||||
return
|
||||
|
||||
if force_timezone_offset is not None and not to_field_values.get(
|
||||
"date_include_time", from_field.date_include_time
|
||||
):
|
||||
raise DateForceTimezoneOffsetValueError(
|
||||
"date_include_time must be set to true"
|
||||
)
|
||||
|
||||
def after_update(
|
||||
self,
|
||||
from_field,
|
||||
to_field,
|
||||
from_model,
|
||||
to_model,
|
||||
user,
|
||||
connection,
|
||||
altered_column,
|
||||
before,
|
||||
to_field_kwargs,
|
||||
):
|
||||
"""
|
||||
If the date_force_timezone field is changed and
|
||||
date_force_timezone_offset is set to an integer value, we need to
|
||||
replace the timezone of all the values in the database by adding the
|
||||
utcOffset accordingly.
|
||||
"""
|
||||
|
||||
timezone_offset_to_add_to_replace_tz = to_field_kwargs.get(
|
||||
"date_force_timezone_offset", None
|
||||
)
|
||||
if timezone_offset_to_add_to_replace_tz is None:
|
||||
return
|
||||
|
||||
to_model.objects.filter(**{f"{to_field.db_column}__isnull": False}).update(
|
||||
**{
|
||||
to_field.db_column: models.F(to_field.db_column)
|
||||
+ timedelta(minutes=timezone_offset_to_add_to_replace_tz)
|
||||
}
|
||||
)
|
||||
|
||||
def prepare_value_for_db(self, instance, value):
|
||||
"""
|
||||
|
@ -726,8 +823,11 @@ class DateFieldType(FieldType):
|
|||
if value is None:
|
||||
return value if rich_value else ""
|
||||
|
||||
python_format = field_object["field"].get_python_format()
|
||||
return value.strftime(python_format)
|
||||
field = field_object["field"]
|
||||
if isinstance(value, datetime) and field.date_force_timezone is not None:
|
||||
value = value.astimezone(pytz.timezone(field.date_force_timezone))
|
||||
|
||||
return value.strftime(field.get_python_format())
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
required = kwargs.get("required", False)
|
||||
|
@ -755,32 +855,24 @@ class DateFieldType(FieldType):
|
|||
else:
|
||||
return fake.date_object()
|
||||
|
||||
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
|
||||
"""
|
||||
If the field type has changed then we want to convert the date or timestamp to
|
||||
a human readable text following the old date format.
|
||||
"""
|
||||
|
||||
to_field_type = field_type_registry.get_by_model(to_field)
|
||||
if to_field_type.type != self.type and connection.vendor == "postgresql":
|
||||
sql_format = from_field.get_psql_format()
|
||||
sql_type = from_field.get_psql_type()
|
||||
return f"""p_in = TO_CHAR(p_in::{sql_type}, '{sql_format}');"""
|
||||
|
||||
return super().get_alter_column_prepare_old_value(
|
||||
connection, from_field, to_field
|
||||
)
|
||||
|
||||
def contains_query(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == "":
|
||||
return Q()
|
||||
|
||||
# No user input goes into the RawSQL, safe to use.
|
||||
return AnnotatedQ(
|
||||
annotation={
|
||||
f"formatted_date_{field_name}": Coalesce(
|
||||
Func(
|
||||
F(field_name),
|
||||
Func(
|
||||
# FIXME: what if date_force_timezone is None(user timezone)?
|
||||
Value(field.date_force_timezone or "UTC"),
|
||||
F(field_name),
|
||||
function="timezone",
|
||||
output_field=DateTimeField(),
|
||||
),
|
||||
Value(field.get_psql_format()),
|
||||
function="to_char",
|
||||
output_field=CharField(),
|
||||
|
@ -791,6 +883,41 @@ class DateFieldType(FieldType):
|
|||
q={f"formatted_date_{field_name}__icontains": value},
|
||||
)
|
||||
|
||||
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
|
||||
"""
|
||||
If the field type has changed then we want to convert the date or timestamp to
|
||||
a human readable text following the old date format.
|
||||
"""
|
||||
|
||||
to_field_type = field_type_registry.get_by_model(to_field)
|
||||
if to_field_type.type != self.type:
|
||||
sql_format = from_field.get_psql_format()
|
||||
variables = {}
|
||||
variable_name = f"{from_field.db_column}_timezone"
|
||||
# FIXME: what if date_force_timezone is None(user timezone)?
|
||||
variables[variable_name] = from_field.date_force_timezone or "UTC"
|
||||
return (
|
||||
f"""p_in = TO_CHAR(p_in::timestamptz at time zone %({variable_name})s,
|
||||
'{sql_format}');""",
|
||||
variables,
|
||||
)
|
||||
|
||||
if (
|
||||
to_field.date_include_time is False
|
||||
and from_field.date_force_timezone is not None
|
||||
):
|
||||
variables = {}
|
||||
variable_name = f"{from_field.db_column}_timezone"
|
||||
variables[variable_name] = from_field.date_force_timezone or "UTC"
|
||||
return (
|
||||
f"""p_in = (p_in::timestamptz at time zone %({variable_name})s)::date;""",
|
||||
variables,
|
||||
)
|
||||
|
||||
return super().get_alter_column_prepare_old_value(
|
||||
connection, from_field, to_field
|
||||
)
|
||||
|
||||
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
|
||||
"""
|
||||
If the field type has changed into a date field then we want to parse the old
|
||||
|
@ -856,6 +983,8 @@ class DateFieldType(FieldType):
|
|||
field.date_format,
|
||||
field.date_include_time,
|
||||
field.date_time_format,
|
||||
date_force_timezone=field.date_force_timezone,
|
||||
date_show_tzinfo=field.date_show_tzinfo,
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
|
@ -866,6 +995,8 @@ class DateFieldType(FieldType):
|
|||
date_format=formula_type.date_format,
|
||||
date_include_time=formula_type.date_include_time,
|
||||
date_time_format=formula_type.date_time_format,
|
||||
date_force_timezone=formula_type.date_force_timezone,
|
||||
date_show_tzinfo=formula_type.date_show_tzinfo,
|
||||
)
|
||||
|
||||
def should_backup_field_data_for_same_type_update(
|
||||
|
@ -881,36 +1012,16 @@ class CreatedOnLastModifiedBaseFieldType(ReadOnlyFieldType, DateFieldType):
|
|||
can_be_in_form_view = False
|
||||
field_data_is_derived_from_attrs = True
|
||||
|
||||
allowed_fields = DateFieldType.allowed_fields + ["timezone"]
|
||||
serializer_field_names = DateFieldType.serializer_field_names + ["timezone"]
|
||||
serializer_field_overrides = {
|
||||
"timezone": serializers.ChoiceField(choices=all_timezones, required=True)
|
||||
}
|
||||
source_field_name = None
|
||||
model_field_class = models.DateTimeField
|
||||
model_field_kwargs = {}
|
||||
populate_from_field = None
|
||||
|
||||
def get_export_value(self, value, field_object, rich_value=False):
|
||||
if value is None:
|
||||
return value if rich_value else ""
|
||||
|
||||
python_format = field_object["field"].get_python_format()
|
||||
field = field_object["field"]
|
||||
field_timezone = timezone(field.get_timezone())
|
||||
return value.astimezone(field_timezone).strftime(python_format)
|
||||
|
||||
def get_serializer_field(self, instance, **kwargs):
|
||||
if not instance.date_include_time:
|
||||
kwargs["format"] = "%Y-%m-%d"
|
||||
kwargs["default_timezone"] = timezone(instance.timezone)
|
||||
|
||||
return serializers.DateTimeField(
|
||||
**{
|
||||
"required": False,
|
||||
**kwargs,
|
||||
}
|
||||
)
|
||||
return serializers.DateTimeField(**{"required": False, **kwargs})
|
||||
|
||||
def get_model_field(self, instance, **kwargs):
|
||||
kwargs["null"] = True
|
||||
|
@ -918,56 +1029,6 @@ class CreatedOnLastModifiedBaseFieldType(ReadOnlyFieldType, DateFieldType):
|
|||
kwargs.update(self.model_field_kwargs)
|
||||
return self.model_field_class(**kwargs)
|
||||
|
||||
def contains_query(self, field_name, value, model_field, field):
|
||||
value = value.strip()
|
||||
# If an empty value has been provided we do not want to filter at all.
|
||||
if value == "":
|
||||
return Q()
|
||||
# No user input goes into the RawSQL, safe to use.
|
||||
return AnnotatedQ(
|
||||
annotation={
|
||||
f"formatted_date_{field_name}": Coalesce(
|
||||
Func(
|
||||
Func(
|
||||
Value(
|
||||
field.get_timezone(),
|
||||
),
|
||||
F(field_name),
|
||||
function="timezone",
|
||||
output_field=DateTimeField(),
|
||||
),
|
||||
Value(field.get_psql_format()),
|
||||
function="to_char",
|
||||
output_field=CharField(),
|
||||
),
|
||||
Value(""),
|
||||
)
|
||||
},
|
||||
q={f"formatted_date_{field_name}__icontains": value},
|
||||
)
|
||||
|
||||
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
|
||||
"""
|
||||
If the field type has changed then we want to convert the date or timestamp to
|
||||
a human readable text following the old date format.
|
||||
"""
|
||||
|
||||
to_field_type = field_type_registry.get_by_model(to_field)
|
||||
if to_field_type.type != self.type:
|
||||
sql_format = from_field.get_psql_format()
|
||||
variables = {}
|
||||
variable_name = f"{from_field.db_column}_timezone"
|
||||
variables[variable_name] = from_field.get_timezone()
|
||||
return (
|
||||
f"""p_in = TO_CHAR(p_in::timestamptz at time zone %({variable_name})s,
|
||||
'{sql_format}');""",
|
||||
variables,
|
||||
)
|
||||
|
||||
return super().get_alter_column_prepare_old_value(
|
||||
connection, from_field, to_field
|
||||
)
|
||||
|
||||
def after_create(self, field, model, user, connection, before, field_kwargs):
|
||||
"""
|
||||
Immediately after the field has been created, we need to populate the values
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
from django.db import models
|
||||
|
||||
import pytz
|
||||
|
||||
DATE_FORMAT = {
|
||||
"EU": {"name": "European (D/M/Y)", "format": "%d/%m/%Y", "sql": "DD/MM/YYYY"},
|
||||
"US": {"name": "US (M/D/Y)", "format": "%m/%d/%Y", "sql": "MM/DD/YYYY"},
|
||||
|
@ -20,12 +18,19 @@ def get_date_time_format(options, format_type):
|
|||
date_format_for_type = DATE_FORMAT[options.date_format][format_type]
|
||||
time_format_for_type = DATE_TIME_FORMAT[options.date_time_format][format_type]
|
||||
if options.date_include_time:
|
||||
return f"{date_format_for_type} {time_format_for_type}"
|
||||
format_time = f"{date_format_for_type} {time_format_for_type}"
|
||||
return format_time
|
||||
else:
|
||||
return date_format_for_type
|
||||
|
||||
|
||||
class BaseDateMixin(models.Model):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
# Add retro-compatibility for the old timezone field.
|
||||
if (old_timezone := kwargs.pop("timezone", None)) is not None:
|
||||
kwargs["date_force_timezone"] = old_timezone
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
date_format = models.CharField(
|
||||
choices=DATE_FORMAT_CHOICES,
|
||||
default=DATE_FORMAT_CHOICES[0][0],
|
||||
|
@ -41,6 +46,14 @@ class BaseDateMixin(models.Model):
|
|||
max_length=32,
|
||||
help_text="24 (14:30) or 12 (02:30 PM)",
|
||||
)
|
||||
date_show_tzinfo = models.BooleanField(
|
||||
default=False, help_text="Indicates if the timezone should be shown."
|
||||
)
|
||||
date_force_timezone = models.CharField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
help_text="Force a timezone for the field overriding user profile settings.",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
@ -92,25 +105,3 @@ class BaseDateMixin(models.Model):
|
|||
|
||||
def _get_format(self, format_type):
|
||||
return get_date_time_format(self, format_type)
|
||||
|
||||
|
||||
class TimezoneMixin(models.Model):
|
||||
timezone = models.CharField(
|
||||
max_length=255,
|
||||
blank=False,
|
||||
help_text="Timezone of User during field creation.",
|
||||
default="UTC",
|
||||
)
|
||||
|
||||
def get_timezone(self, fallback="UTC"):
|
||||
return self.timezone if self.timezone in pytz.all_timezones else fallback
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Check if the timezone is a valid choice."""
|
||||
|
||||
if self.timezone not in pytz.all_timezones:
|
||||
raise ValueError(f"{self.timezone} is not a valid choice.")
|
||||
super().save(*args, **kwargs)
|
||||
|
|
|
@ -10,7 +10,6 @@ from baserow.contrib.database.fields.mixins import (
|
|||
DATE_FORMAT_CHOICES,
|
||||
DATE_TIME_FORMAT_CHOICES,
|
||||
BaseDateMixin,
|
||||
TimezoneMixin,
|
||||
)
|
||||
from baserow.contrib.database.formula import (
|
||||
BASEROW_FORMULA_ARRAY_TYPE_CHOICES,
|
||||
|
@ -294,11 +293,11 @@ class DateField(Field, BaseDateMixin):
|
|||
pass
|
||||
|
||||
|
||||
class LastModifiedField(Field, BaseDateMixin, TimezoneMixin):
|
||||
class LastModifiedField(Field, BaseDateMixin):
|
||||
pass
|
||||
|
||||
|
||||
class CreatedOnField(Field, BaseDateMixin, TimezoneMixin):
|
||||
class CreatedOnField(Field, BaseDateMixin):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -422,6 +421,16 @@ class FormulaField(Field):
|
|||
max_length=32,
|
||||
help_text="24 (14:30) or 12 (02:30 PM)",
|
||||
)
|
||||
date_show_tzinfo = models.BooleanField(
|
||||
default=None,
|
||||
null=True,
|
||||
help_text="Indicates if the time zone should be shown.",
|
||||
)
|
||||
date_force_timezone = models.CharField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
help_text="Force a timezone for the field overriding user profile settings.",
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def cached_untyped_expression(self):
|
||||
|
@ -482,6 +491,7 @@ class FormulaField(Field):
|
|||
recalculate = kwargs.pop("recalculate", not self.trashed)
|
||||
field_cache = kwargs.pop("field_cache", None)
|
||||
raise_if_invalid = kwargs.pop("raise_if_invalid", False)
|
||||
|
||||
if recalculate:
|
||||
self.recalculate_internal_fields(
|
||||
field_cache=field_cache, raise_if_invalid=raise_if_invalid
|
||||
|
|
|
@ -455,6 +455,23 @@ class FieldType(
|
|||
|
||||
return values
|
||||
|
||||
def get_request_kwargs_to_backup(
|
||||
self, field: Field, kwargs: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Returns a dict of attributes that should be backed up when the field is
|
||||
updated. These attributes are sent in the request body but are not
|
||||
stored in the database field model. This is for example used by the
|
||||
DateField to replace the timezone adding/subtracting the corresponding
|
||||
timedelta.
|
||||
|
||||
:param field: The field to update.
|
||||
:param kwargs: The kwargs that are passed to the update request.
|
||||
:return: A dict of attributes that should be backed up.
|
||||
"""
|
||||
|
||||
return {}
|
||||
|
||||
def export_prepared_values(self, field: Field):
|
||||
"""
|
||||
Returns a serializable dict of prepared values for the fields attributes.
|
||||
|
|
|
@ -427,15 +427,25 @@ class BaserowFormulaDateType(BaserowFormulaValidType):
|
|||
"date_format",
|
||||
"date_include_time",
|
||||
"date_time_format",
|
||||
"date_show_tzinfo",
|
||||
"date_force_timezone",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self, date_format: str, date_include_time: bool, date_time_format: str, **kwargs
|
||||
self,
|
||||
date_format: str,
|
||||
date_include_time: bool,
|
||||
date_time_format: str,
|
||||
date_show_tzinfo: bool = False,
|
||||
date_force_timezone: Optional[str] = None,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(**kwargs)
|
||||
self.date_format = date_format
|
||||
self.date_include_time = date_include_time
|
||||
self.date_time_format = date_time_format
|
||||
self.date_show_tzinfo = date_show_tzinfo
|
||||
self.date_force_timezone = date_force_timezone
|
||||
|
||||
@property
|
||||
def comparable_types(self) -> List[Type["BaserowFormulaValidType"]]:
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
# Generated by Django 3.2.13 on 2023-02-10 16:12
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("database", "0101_formulafield_nullable"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="createdonfield",
|
||||
name="date_force_timezone",
|
||||
field=models.CharField(
|
||||
help_text="Force a timezone for the field overriding user profile settings.",
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="createdonfield",
|
||||
name="date_show_tzinfo",
|
||||
field=models.BooleanField(
|
||||
default=False, help_text="Indicates if the timezone should be shown."
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="datefield",
|
||||
name="date_force_timezone",
|
||||
field=models.CharField(
|
||||
help_text="Force a timezone for the field overriding user profile settings.",
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="datefield",
|
||||
name="date_show_tzinfo",
|
||||
field=models.BooleanField(
|
||||
default=False, help_text="Indicates if the timezone should be shown."
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="formulafield",
|
||||
name="date_force_timezone",
|
||||
field=models.CharField(
|
||||
help_text="Force a timezone for the field overriding user profile settings.",
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="formulafield",
|
||||
name="date_show_tzinfo",
|
||||
field=models.BooleanField(
|
||||
default=None,
|
||||
help_text="Indicates if the time zone should be shown.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="lastmodifiedfield",
|
||||
name="date_force_timezone",
|
||||
field=models.CharField(
|
||||
help_text="Force a timezone for the field overriding user profile settings.",
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="lastmodifiedfield",
|
||||
name="date_show_tzinfo",
|
||||
field=models.BooleanField(
|
||||
default=False, help_text="Indicates if the timezone should be shown."
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,48 @@
|
|||
# Generated by Django 3.2.13 on 2023-02-10 16:12
|
||||
|
||||
from django.db import connection, migrations
|
||||
|
||||
from psycopg2 import sql
|
||||
|
||||
from baserow.contrib.database.fields.models import (
|
||||
CreatedOnField,
|
||||
DateField,
|
||||
FormulaField,
|
||||
LastModifiedField,
|
||||
)
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
# since all the datetimes saved in the database are in UTC, we need to set the
|
||||
# `date_force_timezone` to UTC for all the fields and set `date_show_tzinfo` to
|
||||
# True so the user can be aware of the timezone.
|
||||
for qs in [
|
||||
DateField.objects.filter(date_include_time=True),
|
||||
FormulaField.objects.filter(formula_type="date", date_include_time=True),
|
||||
]:
|
||||
qs.update(date_force_timezone="UTC")
|
||||
|
||||
# for the created_on and last_modified fields we need to set the
|
||||
# `date_force_timezone` to the timezone saved in in the field
|
||||
cursor = connection.cursor()
|
||||
for Field in [CreatedOnField, LastModifiedField]:
|
||||
cursor.execute(
|
||||
sql.SQL(
|
||||
"UPDATE {table_name} SET "
|
||||
"date_force_timezone = timezone "
|
||||
"WHERE date_include_time = true"
|
||||
).format(
|
||||
table_name=sql.Identifier(Field._meta.db_table),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("database", "0102_add_timezone_attrs_for_datetimes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forward, migrations.RunPython.noop),
|
||||
]
|
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 3.2.13 on 2023-02-10 16:28
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("database", "0103_fix_datetimes_timezones"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="airtableimportjob",
|
||||
name="timezone",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="createdonfield",
|
||||
name="timezone",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="lastmodifiedfield",
|
||||
name="timezone",
|
||||
),
|
||||
]
|
|
@ -1,17 +1,16 @@
|
|||
from collections import defaultdict
|
||||
from datetime import datetime, time, timedelta
|
||||
from datetime import date, datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from math import ceil, floor
|
||||
from typing import Dict, Union
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
|
||||
from django.contrib.postgres.aggregates.general import ArrayAgg
|
||||
from django.db.models import DateTimeField, IntegerField, Q
|
||||
from django.db.models.functions import Cast, Length
|
||||
from django.db.models import DateField, DateTimeField, IntegerField, Q
|
||||
from django.db.models.functions import Cast, Extract, Length, TruncDate
|
||||
|
||||
import pytz
|
||||
from dateutil import parser
|
||||
from dateutil.parser import ParserError
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from pytz import all_timezones, timezone
|
||||
|
||||
from baserow.contrib.database.fields.field_filters import (
|
||||
FILTER_TYPE_AND,
|
||||
|
@ -39,6 +38,7 @@ from baserow.contrib.database.fields.field_types import (
|
|||
TextFieldType,
|
||||
URLFieldType,
|
||||
)
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.formula import (
|
||||
BaserowFormulaBooleanType,
|
||||
|
@ -47,11 +47,13 @@ from baserow.contrib.database.formula import (
|
|||
BaserowFormulaNumberType,
|
||||
BaserowFormulaTextType,
|
||||
)
|
||||
from baserow.core.expressions import Timezone
|
||||
from baserow.core.models import GroupUser
|
||||
|
||||
from .registries import ViewFilterType
|
||||
|
||||
DATE_FILTER_EMPTY_VALUE = ""
|
||||
DATE_FILTER_TIMEZONE_SEPARATOR = "?"
|
||||
|
||||
|
||||
class NotViewFilterTypeMixin:
|
||||
def default_filter_on_exception(self):
|
||||
|
@ -318,7 +320,160 @@ class LowerThanViewFilterType(ViewFilterType):
|
|||
return self.default_filter_on_exception()
|
||||
|
||||
|
||||
class DateEqualViewFilterType(ViewFilterType):
|
||||
class TimezoneAwareDateViewFilterType(ViewFilterType):
|
||||
|
||||
compatible_field_types = [
|
||||
DateFieldType.type,
|
||||
LastModifiedFieldType.type,
|
||||
CreatedOnFieldType.type,
|
||||
FormulaFieldType.compatible_with_formula_types(BaserowFormulaDateType.type),
|
||||
]
|
||||
|
||||
def is_empty_filter(self, filter_value: str) -> bool:
|
||||
return filter_value == DATE_FILTER_EMPTY_VALUE
|
||||
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
"""
|
||||
Parses the provided filter value and returns a date or datetime object
|
||||
that can be used to compare with the field value.
|
||||
|
||||
:param filter_value: The value that has been provided by the user.
|
||||
:param timezone: The timezone that should be used to convert the date to
|
||||
an aware date.
|
||||
:return: a date or an aware datetime that should be used to compare with
|
||||
the field value.
|
||||
:raises ValueError: If the provided value is not valid.
|
||||
:raise OverflowError: If the provided value is out of range.
|
||||
:raise ParserError: If the provided value is not a valid date to parse.
|
||||
"""
|
||||
|
||||
try:
|
||||
return parser.isoparser().parse_isodate(filter_value)
|
||||
except ValueError:
|
||||
datetime_value = parser.isoparse(filter_value)
|
||||
if datetime_value.tzinfo is None:
|
||||
return timezone.localize(datetime_value)
|
||||
return datetime_value.astimezone(timezone)
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[datetime, date]
|
||||
) -> Dict:
|
||||
"""
|
||||
Returns a dictionary that can be used to create a Q object.
|
||||
|
||||
:param field_name: The name of the field that should be used in the query.
|
||||
:param aware_filter_date: The date that should be used to compare with the
|
||||
field value.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def _split_optional_timezone_and_filter_value(
|
||||
self, field, filter_value, separator
|
||||
) -> Tuple[Optional[str], Optional[str]]:
|
||||
if separator in filter_value: # specific timezone provided by the filter
|
||||
try:
|
||||
timezone_value, filter_value = filter_value.split(separator)
|
||||
return timezone_value, filter_value
|
||||
except ValueError:
|
||||
# the separator was included multiple times, we don't know what to do
|
||||
return None, None
|
||||
elif filter_value in pytz.all_timezones:
|
||||
# only a timezone value was provided with no filter value
|
||||
return filter_value, None
|
||||
else:
|
||||
# default to the fields timezone if any and the provided value
|
||||
return field.date_force_timezone, filter_value
|
||||
|
||||
def split_timezone_and_filter_value(
|
||||
self, field, filter_value, separator=DATE_FILTER_TIMEZONE_SEPARATOR
|
||||
) -> Tuple[pytz.BaseTzInfo, str]:
|
||||
"""
|
||||
Splits the timezone and the value from the provided value. If the value
|
||||
does not contain a timezone then the default timezone will be used.
|
||||
|
||||
:param field: The field that is being filtered.
|
||||
:param filter_value: The value that has been provided by the user.
|
||||
:param separator: The separator that is used to split the timezone and
|
||||
the value.
|
||||
:return: A tuple containing the timezone and the filter_value string.
|
||||
"""
|
||||
|
||||
(
|
||||
user_timezone_str,
|
||||
parsed_filter_value,
|
||||
) = self._split_optional_timezone_and_filter_value(
|
||||
field, filter_value, separator
|
||||
)
|
||||
|
||||
python_timezone = (
|
||||
pytz.timezone(user_timezone_str)
|
||||
if user_timezone_str is not None
|
||||
else pytz.UTC
|
||||
)
|
||||
|
||||
validated_filter_value = (
|
||||
parsed_filter_value
|
||||
if parsed_filter_value is not None
|
||||
else DATE_FILTER_EMPTY_VALUE
|
||||
)
|
||||
|
||||
return python_timezone, validated_filter_value
|
||||
|
||||
def get_filter(
|
||||
self, field_name: str, value: str, model_field, field: Field
|
||||
) -> Union[Q, AnnotatedQ]:
|
||||
"""
|
||||
Returns a Q object that can be used to filter the provided field.
|
||||
|
||||
:param field_name: The name of the field that should be used in the
|
||||
query.
|
||||
:param value: The value that has been provided by the user.
|
||||
:param model_field: The Django model field of the database table that is
|
||||
being filtered.
|
||||
:param field: The Baserow field instance containing the metadata related
|
||||
to the field.
|
||||
:return: A Q object that can be used to filter the provided field.
|
||||
"""
|
||||
|
||||
try:
|
||||
timezone, filter_value = self.split_timezone_and_filter_value(
|
||||
field, value.strip()
|
||||
)
|
||||
if self.is_empty_filter(filter_value):
|
||||
return Q()
|
||||
|
||||
filter_date = self.get_filter_date(filter_value, timezone)
|
||||
except (
|
||||
OverflowError,
|
||||
ValueError,
|
||||
parser.ParserError,
|
||||
pytz.UnknownTimeZoneError,
|
||||
):
|
||||
return Q(pk__in=[])
|
||||
|
||||
annotation = {}
|
||||
query_field_name = field_name
|
||||
|
||||
if isinstance(model_field, DateTimeField):
|
||||
|
||||
if not isinstance(filter_date, datetime):
|
||||
query_field_name = f"{field_name}_tzdate"
|
||||
annotation[query_field_name] = TruncDate(field_name, tzinfo=timezone)
|
||||
|
||||
elif isinstance(model_field, DateField) and isinstance(filter_date, datetime):
|
||||
filter_date = filter_date.date()
|
||||
|
||||
query_dict = {
|
||||
f"{field_name}__isnull": False, # makes `NotViewFilterTypeMixin` work with timezones
|
||||
**self.get_filter_query_dict(query_field_name, filter_date),
|
||||
}
|
||||
return AnnotatedQ(annotation=annotation, q=query_dict)
|
||||
|
||||
|
||||
class DateEqualViewFilterType(TimezoneAwareDateViewFilterType):
|
||||
"""
|
||||
The date filter parses the provided value as date and checks if the field value is
|
||||
the same date. It only works if a valid ISO date is provided as value and it is
|
||||
|
@ -326,158 +481,14 @@ class DateEqualViewFilterType(ViewFilterType):
|
|||
"""
|
||||
|
||||
type = "date_equal"
|
||||
compatible_field_types = [
|
||||
DateFieldType.type,
|
||||
LastModifiedFieldType.type,
|
||||
CreatedOnFieldType.type,
|
||||
FormulaFieldType.compatible_with_formula_types(
|
||||
BaserowFormulaDateType.type,
|
||||
),
|
||||
]
|
||||
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
"""
|
||||
Parses the provided value string and converts it to an aware datetime object.
|
||||
That object will used to make a comparison with the provided field name.
|
||||
"""
|
||||
|
||||
value = value.strip()
|
||||
|
||||
if value == "":
|
||||
return Q()
|
||||
|
||||
utc = timezone("UTC")
|
||||
|
||||
try:
|
||||
parsed_datetime = parser.isoparse(value).astimezone(utc)
|
||||
except (ParserError, ValueError):
|
||||
return Q()
|
||||
|
||||
# If the length of the string value is lower than 10 characters we know it is
|
||||
# only a date so we can match only on year, month and day level. This way if a
|
||||
# date is provided, but if it tries to compare with a models.DateTimeField it
|
||||
# will still give back accurate results.
|
||||
# Since the LastModified and CreateOn fields are stored for a specific timezone
|
||||
# we need to make sure to take this timezone into account when comparing to
|
||||
# the "equals_date"
|
||||
has_timezone = hasattr(field, "timezone")
|
||||
if len(value) <= 10:
|
||||
|
||||
def query_dict(query_field_name):
|
||||
return {
|
||||
f"{query_field_name}__year": parsed_datetime.year,
|
||||
f"{query_field_name}__month": parsed_datetime.month,
|
||||
f"{query_field_name}__day": parsed_datetime.day,
|
||||
}
|
||||
|
||||
if has_timezone:
|
||||
timezone_string = field.get_timezone()
|
||||
tmp_field_name = f"{field_name}_timezone_{timezone_string}"
|
||||
return AnnotatedQ(
|
||||
annotation={
|
||||
f"{tmp_field_name}": Timezone(field_name, timezone_string)
|
||||
},
|
||||
q=query_dict(tmp_field_name),
|
||||
)
|
||||
else:
|
||||
return Q(**query_dict(field_name))
|
||||
else:
|
||||
return Q(**{field_name: parsed_datetime})
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {field_name: aware_filter_date}
|
||||
|
||||
|
||||
class BaseDateFieldLookupFilterType(ViewFilterType):
|
||||
"""
|
||||
The base date field lookup filter serves as a base class for DateViewFilters.
|
||||
With it a valid ISO date can be parsed into a date object which subsequently can
|
||||
be used to filter a model.DateField or model.DateTimeField.
|
||||
If the model field in question is a DateTimeField then the get_filter function
|
||||
makes sure to only use the date part of the datetime in order to filter. This means
|
||||
that the time part of a DateTimeField gets completely ignored.
|
||||
|
||||
The 'query_field_lookup' needs to be set on the deriving classes to something like
|
||||
'__lt'
|
||||
'__lte'
|
||||
'__gt'
|
||||
'__gte'
|
||||
"""
|
||||
|
||||
type = "base_date_field_lookup_type"
|
||||
query_field_lookup = ""
|
||||
query_date_lookup = ""
|
||||
compatible_field_types = [
|
||||
DateFieldType.type,
|
||||
LastModifiedFieldType.type,
|
||||
CreatedOnFieldType.type,
|
||||
FormulaFieldType.compatible_with_formula_types(
|
||||
BaserowFormulaDateType.type,
|
||||
),
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def parse_date(value: str) -> Union[datetime.date, datetime]:
|
||||
"""
|
||||
Parses the provided value string and converts it to a date object.
|
||||
Raises an error if the provided value is an empty string or cannot be parsed
|
||||
to a date object
|
||||
"""
|
||||
|
||||
value = value.strip()
|
||||
|
||||
if value == "":
|
||||
raise ValueError
|
||||
|
||||
utc = timezone("UTC")
|
||||
|
||||
try:
|
||||
parsed_datetime = parser.isoparse(value).astimezone(utc)
|
||||
return parsed_datetime
|
||||
except ValueError as e:
|
||||
raise e
|
||||
|
||||
@staticmethod
|
||||
def is_date(value: str) -> bool:
|
||||
try:
|
||||
datetime.strptime(value, "%Y-%m-%d")
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
# in order to only compare the date part of a datetime field
|
||||
# we need to verify that we are in fact dealing with a datetime field
|
||||
# if so the django query lookup '__date' gets appended to the field_name
|
||||
# otherwise (i.e. it is a date field) nothing gets appended
|
||||
query_date_lookup = self.query_date_lookup
|
||||
if (
|
||||
isinstance(model_field, DateTimeField)
|
||||
and self.is_date(value)
|
||||
and not query_date_lookup
|
||||
):
|
||||
query_date_lookup = "__date"
|
||||
try:
|
||||
parsed_date = self.parse_date(value)
|
||||
has_timezone = hasattr(field, "timezone")
|
||||
field_key = f"{field_name}{query_date_lookup}{self.query_field_lookup}"
|
||||
if has_timezone:
|
||||
timezone_string = field.get_timezone()
|
||||
tmp_field_name = f"{field_name}_timezone_{timezone_string}"
|
||||
field_key = (
|
||||
f"{tmp_field_name}{query_date_lookup}{self.query_field_lookup}"
|
||||
)
|
||||
|
||||
return AnnotatedQ(
|
||||
annotation={
|
||||
f"{tmp_field_name}": Timezone(field_name, timezone_string)
|
||||
},
|
||||
q={field_key: parsed_date},
|
||||
)
|
||||
else:
|
||||
return Q(**{field_key: parsed_date})
|
||||
except (ParserError, ValueError):
|
||||
return Q()
|
||||
|
||||
|
||||
class DateBeforeViewFilterType(BaseDateFieldLookupFilterType):
|
||||
class DateBeforeViewFilterType(TimezoneAwareDateViewFilterType):
|
||||
"""
|
||||
The date before filter parses the provided filter value as date and checks if the
|
||||
field value is before this date (lower than).
|
||||
|
@ -485,10 +496,14 @@ class DateBeforeViewFilterType(BaseDateFieldLookupFilterType):
|
|||
"""
|
||||
|
||||
type = "date_before"
|
||||
query_field_lookup = "__lt"
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict[str, Any]:
|
||||
return {f"{field_name}__lt": aware_filter_date}
|
||||
|
||||
|
||||
class DateAfterViewFilterType(BaseDateFieldLookupFilterType):
|
||||
class DateAfterViewFilterType(TimezoneAwareDateViewFilterType):
|
||||
"""
|
||||
The after date filter parses the provided filter value as date and checks if
|
||||
the field value is after this date (greater than).
|
||||
|
@ -496,178 +511,184 @@ class DateAfterViewFilterType(BaseDateFieldLookupFilterType):
|
|||
"""
|
||||
|
||||
type = "date_after"
|
||||
query_field_lookup = "__gt"
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict[str, Any]:
|
||||
return {f"{field_name}__gt": aware_filter_date}
|
||||
|
||||
|
||||
class DateCompareTodayViewFilterType(ViewFilterType):
|
||||
class DateEqualsDayOfMonthViewFilterType(TimezoneAwareDateViewFilterType):
|
||||
"""
|
||||
The today filter checks if the field value matches the defined operator with
|
||||
today's date.
|
||||
The day of month filter checks if the field number value
|
||||
matches the date's day of the month value.
|
||||
"""
|
||||
|
||||
@property
|
||||
def type(self) -> str:
|
||||
"""
|
||||
Returns the type of the filter (e.g. 'date_equals_today' for a
|
||||
view_filter that filters for today).
|
||||
"""
|
||||
type = "date_equals_day_of_month"
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def make_query_dict(self, field_name: str, now: datetime) -> Dict:
|
||||
"""
|
||||
Creates a query dict for the specific view_filter, given the field name
|
||||
based on today's date.
|
||||
|
||||
:param field_name: The field name to use in the query dict.
|
||||
:param now: The current date.
|
||||
:return: The query dict.
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
compatible_field_types = [
|
||||
DateFieldType.type,
|
||||
LastModifiedFieldType.type,
|
||||
CreatedOnFieldType.type,
|
||||
FormulaFieldType.compatible_with_formula_types(
|
||||
BaserowFormulaDateType.type,
|
||||
),
|
||||
]
|
||||
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
timezone_string = value if value in all_timezones else "UTC"
|
||||
timezone_object = timezone(timezone_string)
|
||||
field_has_timezone = hasattr(field, "timezone")
|
||||
now = datetime.utcnow().astimezone(timezone_object)
|
||||
|
||||
if field_has_timezone:
|
||||
tmp_field_name = f"{field_name}_timezone_{timezone_string}"
|
||||
return AnnotatedQ(
|
||||
annotation={f"{tmp_field_name}": Timezone(field_name, timezone_string)},
|
||||
q=self.make_query_dict(tmp_field_name, now),
|
||||
def get_filter(
|
||||
self, field_name: str, value: str, model_field, field: Field
|
||||
) -> Union[Q, AnnotatedQ]:
|
||||
try:
|
||||
timezone, filter_value = self.split_timezone_and_filter_value(
|
||||
field, value.strip()
|
||||
)
|
||||
else:
|
||||
return Q(**self.make_query_dict(field_name, now))
|
||||
if self.is_empty_filter(filter_value):
|
||||
return Q()
|
||||
|
||||
day_of_month = int(filter_value)
|
||||
if day_of_month < 1 or day_of_month > 31:
|
||||
raise ValueError
|
||||
|
||||
except (ValueError, pytz.UnknownTimeZoneError):
|
||||
return Q(pk__in=[])
|
||||
|
||||
if field.date_include_time: # filter on a datetime field
|
||||
annotated_field_name = f"{field_name}_day_of_month_tz"
|
||||
return AnnotatedQ(
|
||||
annotation={annotated_field_name: Extract(field_name, "day", timezone)},
|
||||
q={annotated_field_name: day_of_month},
|
||||
)
|
||||
else: # filter on a date field
|
||||
return Q(**{f"{field_name}__day": day_of_month})
|
||||
|
||||
|
||||
class DateEqualsTodayViewFilterType(DateCompareTodayViewFilterType):
|
||||
class EmptyFilterValueMixin:
|
||||
def is_empty_filter(self, filter_value: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class DateEqualsTodayViewFilterType(
|
||||
EmptyFilterValueMixin, TimezoneAwareDateViewFilterType
|
||||
):
|
||||
"""
|
||||
The today filter checks if the field value matches with today's date.
|
||||
"""
|
||||
|
||||
type = "date_equals_today"
|
||||
|
||||
def make_query_dict(self, field_name, now):
|
||||
return {
|
||||
f"{field_name}__day": now.day,
|
||||
f"{field_name}__month": now.month,
|
||||
f"{field_name}__year": now.year,
|
||||
}
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
return datetime.now(tz=timezone).date()
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {field_name: aware_filter_date}
|
||||
|
||||
|
||||
class DateBeforeTodayViewFilterType(DateCompareTodayViewFilterType):
|
||||
class DateBeforeTodayViewFilterType(
|
||||
EmptyFilterValueMixin, TimezoneAwareDateViewFilterType
|
||||
):
|
||||
"""
|
||||
The before today filter checks if the field value is before today's date.
|
||||
"""
|
||||
|
||||
type = "date_before_today"
|
||||
|
||||
def make_query_dict(self, field_name, now):
|
||||
min_today = datetime.combine(now, time.min)
|
||||
return {f"{field_name}__lt": min_today}
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
return (datetime.now(tz=timezone) - timedelta(days=1)).date()
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {f"{field_name}__lte": aware_filter_date}
|
||||
|
||||
|
||||
class DateAfterTodayViewFilterType(DateCompareTodayViewFilterType):
|
||||
class DateAfterTodayViewFilterType(
|
||||
EmptyFilterValueMixin, TimezoneAwareDateViewFilterType
|
||||
):
|
||||
"""
|
||||
The after today filter checks if the field value is after today's date.
|
||||
"""
|
||||
|
||||
type = "date_after_today"
|
||||
|
||||
def make_query_dict(self, field_name, now):
|
||||
max_today = datetime.combine(now, time.max)
|
||||
return {f"{field_name}__gt": max_today}
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
return (datetime.now(tz=timezone) + timedelta(days=1)).date()
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {f"{field_name}__gte": aware_filter_date}
|
||||
|
||||
|
||||
class DateEqualsXAgoViewFilterType(ViewFilterType):
|
||||
class DateEqualsCurrentWeekViewFilterType(
|
||||
EmptyFilterValueMixin, TimezoneAwareDateViewFilterType
|
||||
):
|
||||
"""
|
||||
Base class for is days, months, years ago filter.
|
||||
The current week filter works as a subset of today filter and checks if the
|
||||
field value falls into current week.
|
||||
"""
|
||||
|
||||
query_for = ["year", "month", "day"]
|
||||
type = "date_equals_week"
|
||||
|
||||
compatible_field_types = [
|
||||
DateFieldType.type,
|
||||
LastModifiedFieldType.type,
|
||||
CreatedOnFieldType.type,
|
||||
FormulaFieldType.compatible_with_formula_types(
|
||||
BaserowFormulaDateType.type,
|
||||
),
|
||||
]
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
return datetime.now(tz=timezone).date()
|
||||
|
||||
def _extract_values(self, value, separator="?"):
|
||||
try:
|
||||
tzone, time_unit_ago = value.split(separator)
|
||||
time_unit_ago = int(time_unit_ago)
|
||||
except ValueError:
|
||||
return None, None
|
||||
|
||||
timezone_string = tzone if tzone in all_timezones else "UTC"
|
||||
return timezone_string, time_unit_ago
|
||||
|
||||
def get_date_to_compare(now: datetime, x_units_ago: int) -> datetime:
|
||||
"""
|
||||
Should be overriden in subclasses and return computed date
|
||||
that will be used to compare year, month and day portions
|
||||
in get_filter.
|
||||
|
||||
:param now: Datetime in the specified timezone.
|
||||
:param x_units_ago: Number of days/months/years that the
|
||||
date needs to shift in the past.
|
||||
"""
|
||||
|
||||
raise NotImplementedError(
|
||||
"Each subclass must have its own get_date_to_compare method."
|
||||
)
|
||||
|
||||
def get_filter(self, field_name, value, model_field, field):
|
||||
timezone_string, x_units_ago = self._extract_values(value)
|
||||
if x_units_ago is None:
|
||||
# invalid x_units_ago value will result in an empty filter
|
||||
return Q()
|
||||
|
||||
timezone_object = timezone(timezone_string)
|
||||
field_has_timezone = hasattr(field, "timezone")
|
||||
now = datetime.utcnow().astimezone(timezone_object)
|
||||
try:
|
||||
when = self.get_date_to_compare(now, x_units_ago)
|
||||
except Exception:
|
||||
# return nothing when the filter can't be computed
|
||||
return Q(pk__in=[])
|
||||
|
||||
def make_query_dict(query_field_name):
|
||||
query_dict = dict()
|
||||
if "year" in self.query_for:
|
||||
query_dict[f"{query_field_name}__year"] = when.year
|
||||
if "month" in self.query_for:
|
||||
query_dict[f"{query_field_name}__month"] = when.month
|
||||
if "day" in self.query_for:
|
||||
query_dict[f"{query_field_name}__day"] = when.day
|
||||
|
||||
return query_dict
|
||||
|
||||
if field_has_timezone:
|
||||
tmp_field_name = f"{field_name}_timezone_{timezone_string}"
|
||||
return AnnotatedQ(
|
||||
annotation={f"{tmp_field_name}": Timezone(field_name, timezone_string)},
|
||||
q=make_query_dict(tmp_field_name),
|
||||
)
|
||||
else:
|
||||
return Q(**make_query_dict(field_name))
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
week_of_year = aware_filter_date.isocalendar().week
|
||||
return {
|
||||
f"{field_name}__week": week_of_year,
|
||||
f"{field_name}__year": aware_filter_date.year,
|
||||
}
|
||||
|
||||
|
||||
class DateEqualsDaysAgoViewFilterType(DateEqualsXAgoViewFilterType):
|
||||
class DateEqualsCurrentMonthViewFilterType(
|
||||
EmptyFilterValueMixin, TimezoneAwareDateViewFilterType
|
||||
):
|
||||
"""
|
||||
The current month filter works as a subset of today filter and checks if the
|
||||
field value falls into current month.
|
||||
"""
|
||||
|
||||
type = "date_equals_month"
|
||||
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
return datetime.now(tz=timezone).date()
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {
|
||||
f"{field_name}__month": aware_filter_date.month,
|
||||
f"{field_name}__year": aware_filter_date.year,
|
||||
}
|
||||
|
||||
|
||||
class DateEqualsCurrentYearViewFilterType(
|
||||
EmptyFilterValueMixin, TimezoneAwareDateViewFilterType
|
||||
):
|
||||
"""
|
||||
The current month filter works as a subset of today filter and checks if the
|
||||
field value falls into current year.
|
||||
"""
|
||||
|
||||
type = "date_equals_year"
|
||||
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
return datetime.now(tz=timezone).date()
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {f"{field_name}__year": aware_filter_date.year}
|
||||
|
||||
|
||||
class DateEqualsDaysAgoViewFilterType(TimezoneAwareDateViewFilterType):
|
||||
"""
|
||||
The "number of days ago" filter checks if the field value matches with today's
|
||||
date minus the specified number of days.
|
||||
|
@ -678,11 +699,19 @@ class DateEqualsDaysAgoViewFilterType(DateEqualsXAgoViewFilterType):
|
|||
|
||||
type = "date_equals_days_ago"
|
||||
|
||||
def get_date_to_compare(self, now, x_units_ago):
|
||||
return now - timedelta(days=x_units_ago)
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
filter_date = datetime.now(tz=timezone) - timedelta(days=int(filter_value))
|
||||
return filter_date.date()
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {field_name: aware_filter_date}
|
||||
|
||||
|
||||
class DateEqualsMonthsAgoViewFilterType(DateEqualsXAgoViewFilterType):
|
||||
class DateEqualsMonthsAgoViewFilterType(TimezoneAwareDateViewFilterType):
|
||||
"""
|
||||
The "number of months ago" filter checks if the field value's month is within
|
||||
the specified "months ago" based on the current date.
|
||||
|
@ -692,13 +721,25 @@ class DateEqualsMonthsAgoViewFilterType(DateEqualsXAgoViewFilterType):
|
|||
"""
|
||||
|
||||
type = "date_equals_months_ago"
|
||||
query_for = ["year", "month"]
|
||||
|
||||
def get_date_to_compare(self, now, x_units_ago):
|
||||
return now + relativedelta(months=-x_units_ago)
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
filter_date = datetime.now(tz=timezone) + relativedelta(
|
||||
months=-int(filter_value)
|
||||
)
|
||||
return filter_date.date()
|
||||
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {
|
||||
f"{field_name}__year": aware_filter_date.year,
|
||||
f"{field_name}__month": aware_filter_date.month,
|
||||
}
|
||||
|
||||
|
||||
class DateEqualsYearsAgoViewFilterType(DateEqualsXAgoViewFilterType):
|
||||
class DateEqualsYearsAgoViewFilterType(TimezoneAwareDateViewFilterType):
|
||||
"""
|
||||
The "is years ago" filter checks if the field value's year is within
|
||||
the specified "years ago" based on the current date.
|
||||
|
@ -708,83 +749,25 @@ class DateEqualsYearsAgoViewFilterType(DateEqualsXAgoViewFilterType):
|
|||
"""
|
||||
|
||||
type = "date_equals_years_ago"
|
||||
query_for = ["year"]
|
||||
|
||||
def get_date_to_compare(self, now, x_units_ago):
|
||||
return now + relativedelta(years=-x_units_ago)
|
||||
def get_filter_date(
|
||||
self, filter_value: str, timezone: pytz.BaseTzInfo
|
||||
) -> Union[datetime, date]:
|
||||
filter_date = datetime.now(tz=timezone) + relativedelta(
|
||||
years=-int(filter_value)
|
||||
)
|
||||
return filter_date.date()
|
||||
|
||||
|
||||
class DateEqualsCurrentWeekViewFilterType(DateCompareTodayViewFilterType):
|
||||
"""
|
||||
The current week filter works as a subset of today filter and checks if the
|
||||
field value falls into current week.
|
||||
"""
|
||||
|
||||
type = "date_equals_week"
|
||||
|
||||
def make_query_dict(self, field_name, now):
|
||||
week_of_year = now.isocalendar()[1]
|
||||
return {
|
||||
f"{field_name}__week": week_of_year,
|
||||
f"{field_name}__year": now.year,
|
||||
}
|
||||
|
||||
|
||||
class DateEqualsCurrentMonthViewFilterType(DateCompareTodayViewFilterType):
|
||||
"""
|
||||
The current month filter works as a subset of today filter and checks if the
|
||||
field value falls into current month.
|
||||
"""
|
||||
|
||||
type = "date_equals_month"
|
||||
|
||||
def make_query_dict(self, field_name, now):
|
||||
return {
|
||||
f"{field_name}__month": now.month,
|
||||
f"{field_name}__year": now.year,
|
||||
}
|
||||
|
||||
|
||||
class DateEqualsCurrentYearViewFilterType(DateCompareTodayViewFilterType):
|
||||
"""
|
||||
The current month filter works as a subset of today filter and checks if the
|
||||
field value falls into current year.
|
||||
"""
|
||||
|
||||
type = "date_equals_year"
|
||||
|
||||
def make_query_dict(self, field_name, now):
|
||||
return {
|
||||
f"{field_name}__year": now.year,
|
||||
}
|
||||
def get_filter_query_dict(
|
||||
self, field_name: str, aware_filter_date: Union[date, datetime]
|
||||
) -> Dict:
|
||||
return {f"{field_name}__year": aware_filter_date.year}
|
||||
|
||||
|
||||
class DateNotEqualViewFilterType(NotViewFilterTypeMixin, DateEqualViewFilterType):
|
||||
type = "date_not_equal"
|
||||
|
||||
|
||||
class DateEqualsDayOfMonthViewFilterType(BaseDateFieldLookupFilterType):
|
||||
"""
|
||||
The day of month filter checks if the field number value
|
||||
matches the date's day of the month value.
|
||||
"""
|
||||
|
||||
type = "date_equals_day_of_month"
|
||||
query_date_lookup = "__day"
|
||||
|
||||
@staticmethod
|
||||
def parse_date(value: str) -> str:
|
||||
# Check if the value is a positive number
|
||||
if not value.isdigit():
|
||||
raise ValueError
|
||||
|
||||
# Check if the value is a valid day of the month
|
||||
if int(value) < 1 or int(value) > 31:
|
||||
raise ValueError
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class SingleSelectEqualViewFilterType(ViewFilterType):
|
||||
"""
|
||||
The single select equal filter accepts a select option id as filter value. This
|
||||
|
|
|
@ -112,6 +112,12 @@ class CustomFieldsInstanceMixin:
|
|||
useful if you want to add some custom SerializerMethodField for example.
|
||||
"""
|
||||
|
||||
serializer_extra_kwargs = None
|
||||
"""
|
||||
The extra kwargs that must be added to the serializer fields. This property is
|
||||
useful if you want to add some custom `write_only` field for example.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
:raises ValueError: If the object does not have a `model_class` attribute.
|
||||
|
@ -151,6 +157,7 @@ class CustomFieldsInstanceMixin:
|
|||
field_names,
|
||||
field_overrides=field_overrides,
|
||||
base_mixins=mixins,
|
||||
meta_extra_kwargs=self.serializer_extra_kwargs,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
|
|
@ -142,6 +142,9 @@ class FieldFixtures:
|
|||
if "order" not in kwargs:
|
||||
kwargs["order"] = 0
|
||||
|
||||
if "date_show_tzinfo" not in kwargs:
|
||||
kwargs["date_show_tzinfo"] = False
|
||||
|
||||
field = DateField.objects.create(**kwargs)
|
||||
|
||||
if create_field:
|
||||
|
@ -308,8 +311,8 @@ class FieldFixtures:
|
|||
if "date_include_time" not in kwargs:
|
||||
kwargs["date_include_time"] = False
|
||||
|
||||
if "timezone" not in kwargs:
|
||||
kwargs["timezone"] = "Europe/Berlin"
|
||||
if "date_show_tzinfo" not in kwargs:
|
||||
kwargs["date_show_tzinfo"] = False
|
||||
|
||||
field = LastModifiedField.objects.create(**kwargs)
|
||||
|
||||
|
@ -331,8 +334,8 @@ class FieldFixtures:
|
|||
if "date_include_time" not in kwargs:
|
||||
kwargs["date_include_time"] = False
|
||||
|
||||
if "timezone" not in kwargs:
|
||||
kwargs["timezone"] = "Europe/Berlin"
|
||||
if "date_show_tzinfo" not in kwargs:
|
||||
kwargs["date_show_tzinfo"] = False
|
||||
|
||||
field = CreatedOnField.objects.create(**kwargs)
|
||||
|
||||
|
@ -373,6 +376,9 @@ class FieldFixtures:
|
|||
if "nullable" not in kwargs:
|
||||
kwargs["nullable"] = False
|
||||
|
||||
if "date_show_tzinfo" not in kwargs:
|
||||
kwargs["date_show_tzinfo"] = False
|
||||
|
||||
recalculate = kwargs.pop("recalculate", True)
|
||||
|
||||
field = FormulaField(**kwargs)
|
||||
|
|
|
@ -149,14 +149,18 @@ def setup_interesting_test_table(
|
|||
"date_us": date,
|
||||
"datetime_eu": datetime,
|
||||
"date_eu": date,
|
||||
"datetime_eu_tzone_visible": datetime,
|
||||
"datetime_eu_tzone_hidden": datetime,
|
||||
"last_modified_datetime_us": None,
|
||||
"last_modified_date_us": None,
|
||||
"last_modified_datetime_eu": None,
|
||||
"last_modified_date_eu": None,
|
||||
"last_modified_datetime_eu_tzone": None,
|
||||
"created_on_datetime_us": None,
|
||||
"created_on_date_us": None,
|
||||
"created_on_datetime_eu": None,
|
||||
"created_on_date_eu": None,
|
||||
"created_on_datetime_eu_tzone": None,
|
||||
# We will setup link rows manually later
|
||||
"link_row": None,
|
||||
"self_link_row": None,
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import pytest
|
||||
import responses
|
||||
from pytz import UTC, timezone
|
||||
|
||||
from baserow.contrib.database.airtable.airtable_column_types import (
|
||||
CheckboxAirtableColumnType,
|
||||
|
@ -45,7 +44,7 @@ def test_unknown_column_type():
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert baserow_field is None
|
||||
assert baserow_field is None
|
||||
|
@ -60,7 +59,7 @@ def test_unknown_column_type():
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert baserow_field is None
|
||||
assert baserow_field is None
|
||||
|
@ -78,7 +77,7 @@ def test_airtable_import_text_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, TextField)
|
||||
assert isinstance(airtable_column_type, TextAirtableColumnType)
|
||||
|
@ -97,7 +96,7 @@ def test_airtable_import_checkbox_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, BooleanField)
|
||||
assert isinstance(airtable_column_type, CheckboxAirtableColumnType)
|
||||
|
@ -125,14 +124,14 @@ def test_airtable_import_created_on_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, CreatedOnField)
|
||||
assert isinstance(airtable_column_type, FormulaAirtableColumnType)
|
||||
assert baserow_field.date_format == "ISO"
|
||||
assert baserow_field.date_include_time is False
|
||||
assert baserow_field.date_time_format == "24"
|
||||
assert baserow_field.timezone == "UTC"
|
||||
assert baserow_field.date_force_timezone is None
|
||||
|
||||
airtable_field = {
|
||||
"id": "fldcTpJuoUVpsDNoszO",
|
||||
|
@ -154,18 +153,18 @@ def test_airtable_import_created_on_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, CreatedOnField)
|
||||
assert isinstance(airtable_column_type, FormulaAirtableColumnType)
|
||||
assert baserow_field.date_format == "EU"
|
||||
assert baserow_field.date_include_time is True
|
||||
assert baserow_field.date_time_format == "12"
|
||||
assert baserow_field.timezone == "Europe/Amsterdam"
|
||||
assert baserow_field.date_force_timezone == "Europe/Amsterdam"
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
@ -184,7 +183,7 @@ def test_airtable_import_date_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
|
@ -194,19 +193,19 @@ def test_airtable_import_date_column(data_fixture, api_client):
|
|||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", {}
|
||||
)
|
||||
== "2022-01-03"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "0999-02-04T14:51:00.000Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "0999-02-04T14:51:00.000Z", {}
|
||||
)
|
||||
== "0999-02-04"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, None, UTC, {}
|
||||
{}, airtable_field, baserow_field, None, {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
@ -228,7 +227,7 @@ def test_airtable_import_european_date_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
|
@ -238,19 +237,19 @@ def test_airtable_import_european_date_column(data_fixture, api_client):
|
|||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", {}
|
||||
)
|
||||
== "2022-01-03"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2020-08-27T21:10:24.828Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "2020-08-27T21:10:24.828Z", {}
|
||||
)
|
||||
== "2020-08-27"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, None, UTC, {}
|
||||
{}, airtable_field, baserow_field, None, {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
@ -274,7 +273,7 @@ def test_airtable_import_datetime_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
|
@ -284,19 +283,19 @@ def test_airtable_import_datetime_column(data_fixture, api_client):
|
|||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", {}
|
||||
)
|
||||
== "2022-01-03T14:51:00+00:00"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2020-08-27T21:10:24.828Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "2020-08-27T21:10:24.828Z", {}
|
||||
)
|
||||
== "2020-08-27T21:10:24.828000+00:00"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, None, UTC, {}
|
||||
{}, airtable_field, baserow_field, None, {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
@ -304,74 +303,7 @@ def test_airtable_import_datetime_column(data_fixture, api_client):
|
|||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_airtable_import_date_with_different_default_timezone_column(
|
||||
data_fixture, api_client
|
||||
):
|
||||
airtable_field = {
|
||||
"id": "fldyAXIzheHfugGhuFD",
|
||||
"name": "ISO DATE",
|
||||
"type": "date",
|
||||
"typeOptions": {"isDateTime": False, "dateFormat": "ISO"},
|
||||
}
|
||||
(
|
||||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
assert baserow_field.date_format == "ISO"
|
||||
assert baserow_field.date_include_time is False
|
||||
assert baserow_field.date_time_format == "24"
|
||||
|
||||
amsterdam = timezone("Europe/Amsterdam")
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T23:51:00.000Z", amsterdam, {}
|
||||
)
|
||||
== "2022-01-04"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_airtable_import_date_with_different_airtable_timezone_column(
|
||||
data_fixture, api_client
|
||||
):
|
||||
airtable_field = {
|
||||
"id": "fldyAXIzheHfugGhuFD",
|
||||
"name": "ISO DATE",
|
||||
"type": "date",
|
||||
"typeOptions": {
|
||||
"isDateTime": False,
|
||||
"dateFormat": "ISO",
|
||||
"timeZone": "Europe/Amsterdam",
|
||||
},
|
||||
}
|
||||
(
|
||||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
assert baserow_field.date_format == "ISO"
|
||||
assert baserow_field.date_include_time is False
|
||||
assert baserow_field.date_time_format == "24"
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T23:51:00.000Z", UTC, {}
|
||||
)
|
||||
== "2022-01-04"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_airtable_import_datetime_with_different_default_timezone_column(
|
||||
def test_airtable_import_datetime_with_default_timezone_column(
|
||||
data_fixture, api_client
|
||||
):
|
||||
airtable_field = {
|
||||
|
@ -389,20 +321,57 @@ def test_airtable_import_datetime_with_different_default_timezone_column(
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
assert baserow_field.date_format == "ISO"
|
||||
assert baserow_field.date_include_time is True
|
||||
assert baserow_field.date_time_format == "24"
|
||||
assert baserow_field.date_force_timezone is None
|
||||
|
||||
amsterdam = timezone("Europe/Amsterdam")
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T23:51:00.000Z", amsterdam, {}
|
||||
{}, airtable_field, baserow_field, "2022-01-03T23:51:00.000Z", {}
|
||||
)
|
||||
== "2022-01-04T00:51:00+00:00"
|
||||
== "2022-01-03T23:51:00+00:00"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_airtable_import_datetime_with_different_default_timezone_column(
|
||||
data_fixture, api_client
|
||||
):
|
||||
airtable_field = {
|
||||
"id": "fldEB5dp0mNjVZu0VJI",
|
||||
"name": "Date",
|
||||
"type": "date",
|
||||
"typeOptions": {
|
||||
"isDateTime": True,
|
||||
"dateFormat": "Local",
|
||||
"timeFormat": "24hour",
|
||||
"timeZone": "Europe/Amsterdam",
|
||||
},
|
||||
}
|
||||
(
|
||||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
assert baserow_field.date_format == "ISO"
|
||||
assert baserow_field.date_include_time is True
|
||||
assert baserow_field.date_time_format == "24"
|
||||
assert baserow_field.date_force_timezone == "Europe/Amsterdam"
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T23:51:00.000Z", {}
|
||||
)
|
||||
== "2022-01-03T23:51:00+00:00"
|
||||
)
|
||||
|
||||
|
||||
|
@ -424,7 +393,7 @@ def test_airtable_import_datetime_edge_case_1(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, DateField)
|
||||
assert isinstance(airtable_column_type, DateAirtableColumnType)
|
||||
|
@ -432,15 +401,9 @@ def test_airtable_import_datetime_edge_case_1(data_fixture, api_client):
|
|||
assert baserow_field.date_include_time is True
|
||||
assert baserow_field.date_time_format == "24"
|
||||
|
||||
amsterdam = timezone("Europe/Amsterdam")
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{},
|
||||
airtable_field,
|
||||
baserow_field,
|
||||
"+020222-03-28T00:00:00.000Z",
|
||||
amsterdam,
|
||||
{},
|
||||
{}, airtable_field, baserow_field, "+020222-03-28T00:00:00.000Z", {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
@ -459,20 +422,20 @@ def test_airtable_import_email_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, EmailField)
|
||||
assert isinstance(airtable_column_type, TextAirtableColumnType)
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "NOT_EMAIL", UTC, {}
|
||||
{}, airtable_field, baserow_field, "NOT_EMAIL", {}
|
||||
)
|
||||
== ""
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "test@test.nl", UTC, {}
|
||||
{}, airtable_field, baserow_field, "test@test.nl", {}
|
||||
)
|
||||
== "test@test.nl"
|
||||
)
|
||||
|
@ -491,7 +454,7 @@ def test_airtable_import_multiple_attachment_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, FileField)
|
||||
assert isinstance(airtable_column_type, MultipleAttachmentAirtableColumnType)
|
||||
|
@ -525,7 +488,6 @@ def test_airtable_import_multiple_attachment_column(data_fixture, api_client):
|
|||
"size": 503296,
|
||||
},
|
||||
],
|
||||
UTC,
|
||||
files_to_download,
|
||||
) == [
|
||||
{
|
||||
|
@ -570,14 +532,14 @@ def test_airtable_import_last_modified_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, LastModifiedField)
|
||||
assert isinstance(airtable_column_type, FormulaAirtableColumnType)
|
||||
assert baserow_field.date_format == "ISO"
|
||||
assert baserow_field.date_include_time is False
|
||||
assert baserow_field.date_time_format == "24"
|
||||
assert baserow_field.timezone == "UTC"
|
||||
assert baserow_field.date_force_timezone is None
|
||||
|
||||
airtable_field = {
|
||||
"id": "fldws6n8xdrEJrMxJFJ",
|
||||
|
@ -602,18 +564,18 @@ def test_airtable_import_last_modified_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, LastModifiedField)
|
||||
assert isinstance(airtable_column_type, FormulaAirtableColumnType)
|
||||
assert baserow_field.date_format == "US"
|
||||
assert baserow_field.date_include_time is True
|
||||
assert baserow_field.date_time_format == "12"
|
||||
assert baserow_field.timezone == "Europe/Amsterdam"
|
||||
assert baserow_field.date_force_timezone == "Europe/Amsterdam"
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", UTC, {}
|
||||
{}, airtable_field, baserow_field, "2022-01-03T14:51:00.000Z", {}
|
||||
)
|
||||
== "2022-01-03T14:51:00+00:00"
|
||||
)
|
||||
|
@ -637,7 +599,7 @@ def test_airtable_import_foreign_key_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{"id": "tblxxx"}, airtable_field, UTC
|
||||
{"id": "tblxxx"}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, LinkRowField)
|
||||
assert isinstance(airtable_column_type, ForeignKeyAirtableColumnType)
|
||||
|
@ -663,7 +625,6 @@ def test_airtable_import_foreign_key_column(data_fixture, api_client):
|
|||
"foreignRowDisplayName": "Bram 2",
|
||||
},
|
||||
],
|
||||
UTC,
|
||||
{},
|
||||
) == [1, 2]
|
||||
|
||||
|
@ -683,7 +644,7 @@ def test_airtable_import_foreign_key_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{"id": "tblRpq315qnnIcg5IjI"}, airtable_field, UTC
|
||||
{"id": "tblRpq315qnnIcg5IjI"}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, LinkRowField)
|
||||
assert isinstance(airtable_column_type, ForeignKeyAirtableColumnType)
|
||||
|
@ -703,14 +664,14 @@ def test_airtable_import_multiline_text_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, LongTextField)
|
||||
assert isinstance(airtable_column_type, MultilineTextAirtableColumnType)
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "test", UTC, {}
|
||||
{}, airtable_field, baserow_field, "test", {}
|
||||
)
|
||||
== "test"
|
||||
)
|
||||
|
@ -728,7 +689,7 @@ def test_airtable_import_rich_text_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, LongTextField)
|
||||
assert isinstance(airtable_column_type, RichTextTextAirtableColumnType)
|
||||
|
@ -745,7 +706,7 @@ def test_airtable_import_rich_text_column(data_fixture, api_client):
|
|||
}
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, content, UTC, {}
|
||||
{}, airtable_field, baserow_field, content, {}
|
||||
)
|
||||
== "Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere "
|
||||
"cubilia curae; Class aptent taciti sociosqu ad litora."
|
||||
|
@ -782,7 +743,7 @@ def test_airtable_import_multi_select_column(
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, MultipleSelectField)
|
||||
assert isinstance(airtable_column_type, MultiSelectAirtableColumnType)
|
||||
|
@ -818,7 +779,7 @@ def test_airtable_import_number_integer_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, NumberField)
|
||||
assert isinstance(airtable_column_type, NumberAirtableColumnType)
|
||||
|
@ -827,31 +788,31 @@ def test_airtable_import_number_integer_column(data_fixture, api_client):
|
|||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "10", UTC, {}
|
||||
{}, airtable_field, baserow_field, "10", {}
|
||||
)
|
||||
== "10"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, 10, UTC, {}
|
||||
{}, airtable_field, baserow_field, 10, {}
|
||||
)
|
||||
== "10"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "-10", UTC, {}
|
||||
{}, airtable_field, baserow_field, "-10", {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, -10, UTC, {}
|
||||
{}, airtable_field, baserow_field, -10, {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, None, UTC, {}
|
||||
{}, airtable_field, baserow_field, None, {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
@ -874,7 +835,7 @@ def test_airtable_import_number_decimal_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, NumberField)
|
||||
assert isinstance(airtable_column_type, NumberAirtableColumnType)
|
||||
|
@ -895,7 +856,7 @@ def test_airtable_import_number_decimal_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, NumberField)
|
||||
assert isinstance(airtable_column_type, NumberAirtableColumnType)
|
||||
|
@ -904,31 +865,31 @@ def test_airtable_import_number_decimal_column(data_fixture, api_client):
|
|||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "10.22", UTC, {}
|
||||
{}, airtable_field, baserow_field, "10.22", {}
|
||||
)
|
||||
== "10.22"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, 10, UTC, {}
|
||||
{}, airtable_field, baserow_field, 10, {}
|
||||
)
|
||||
== "10"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "-10.555", UTC, {}
|
||||
{}, airtable_field, baserow_field, "-10.555", {}
|
||||
)
|
||||
== "-10.555"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, -10, UTC, {}
|
||||
{}, airtable_field, baserow_field, -10, {}
|
||||
)
|
||||
== "-10"
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, None, UTC, {}
|
||||
{}, airtable_field, baserow_field, None, {}
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
@ -947,7 +908,7 @@ def test_airtable_import_number_decimal_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, NumberField)
|
||||
assert isinstance(airtable_column_type, NumberAirtableColumnType)
|
||||
|
@ -963,20 +924,20 @@ def test_airtable_import_phone_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, PhoneNumberField)
|
||||
assert isinstance(airtable_column_type, PhoneAirtableColumnType)
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "NOT_PHONE", UTC, {}
|
||||
{}, airtable_field, baserow_field, "NOT_PHONE", {}
|
||||
)
|
||||
== ""
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "1234", UTC, {}
|
||||
{}, airtable_field, baserow_field, "1234", {}
|
||||
)
|
||||
== "1234"
|
||||
)
|
||||
|
@ -995,14 +956,14 @@ def test_airtable_import_rating_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, RatingField)
|
||||
assert isinstance(airtable_column_type, RatingAirtableColumnType)
|
||||
assert baserow_field.max_value == 5
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, 5, UTC, {}
|
||||
{}, airtable_field, baserow_field, 5, {}
|
||||
)
|
||||
== 5
|
||||
)
|
||||
|
@ -1038,7 +999,7 @@ def test_airtable_import_select_column(
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, SingleSelectField)
|
||||
assert isinstance(airtable_column_type, SelectAirtableColumnType)
|
||||
|
@ -1070,20 +1031,20 @@ def test_airtable_import_url_column(data_fixture, api_client):
|
|||
baserow_field,
|
||||
airtable_column_type,
|
||||
) = airtable_column_type_registry.from_airtable_column_to_serialized(
|
||||
{}, airtable_field, UTC
|
||||
{}, airtable_field
|
||||
)
|
||||
assert isinstance(baserow_field, URLField)
|
||||
assert isinstance(airtable_column_type, TextAirtableColumnType)
|
||||
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "NOT_URL", UTC, {}
|
||||
{}, airtable_field, baserow_field, "NOT_URL", {}
|
||||
)
|
||||
== ""
|
||||
)
|
||||
assert (
|
||||
airtable_column_type.to_baserow_export_serialized_value(
|
||||
{}, airtable_field, baserow_field, "https://test.nl", UTC, {}
|
||||
{}, airtable_field, baserow_field, "https://test.nl", {}
|
||||
)
|
||||
== "https://test.nl"
|
||||
)
|
||||
|
|
|
@ -10,8 +10,6 @@ from django.core.files.storage import FileSystemStorage
|
|||
|
||||
import pytest
|
||||
import responses
|
||||
from pytz import UTC, UnknownTimeZoneError
|
||||
from pytz import timezone as pytz_timezone
|
||||
|
||||
from baserow.contrib.database.airtable.exceptions import AirtableShareIsNotABase
|
||||
from baserow.contrib.database.airtable.handler import AirtableHandler
|
||||
|
@ -182,7 +180,7 @@ def test_to_baserow_database_export():
|
|||
|
||||
schema, tables = AirtableHandler.extract_schema([user_table_json, data_table_json])
|
||||
baserow_database_export, files_buffer = AirtableHandler.to_baserow_database_export(
|
||||
init_data, schema, tables, pytz_timezone("Europe/Amsterdam")
|
||||
init_data, schema, tables
|
||||
)
|
||||
|
||||
with ZipFile(files_buffer, "r", ZIP_DEFLATED, False) as zip_file:
|
||||
|
@ -259,7 +257,7 @@ def test_to_baserow_database_export():
|
|||
}
|
||||
assert (
|
||||
baserow_database_export["tables"][1]["rows"][0]["field_fldEB5dp0mNjVZu0VJI"]
|
||||
== "2022-01-21T01:00:00+00:00"
|
||||
== "2022-01-21T00:00:00+00:00"
|
||||
)
|
||||
assert baserow_database_export["tables"][0]["views"] == [
|
||||
{
|
||||
|
@ -312,14 +310,14 @@ def test_to_baserow_database_export_without_primary_value():
|
|||
|
||||
schema, tables = AirtableHandler.extract_schema(deepcopy([user_table_json]))
|
||||
baserow_database_export, files_buffer = AirtableHandler.to_baserow_database_export(
|
||||
init_data, schema, tables, UTC
|
||||
init_data, schema, tables
|
||||
)
|
||||
assert baserow_database_export["tables"][0]["fields"][0]["primary"] is True
|
||||
|
||||
user_table_json["data"]["tableSchemas"][0]["columns"] = []
|
||||
schema, tables = AirtableHandler.extract_schema(deepcopy([user_table_json]))
|
||||
baserow_database_export, files_buffer = AirtableHandler.to_baserow_database_export(
|
||||
init_data, schema, tables, UTC
|
||||
init_data, schema, tables
|
||||
)
|
||||
assert baserow_database_export["tables"][0]["fields"] == [
|
||||
{
|
||||
|
@ -394,7 +392,6 @@ def test_import_from_airtable_to_group(data_fixture, tmpdir):
|
|||
database = AirtableHandler.import_from_airtable_to_group(
|
||||
group,
|
||||
"shrXxmp0WmqsTkFWTzv",
|
||||
timezone=UTC,
|
||||
storage=storage,
|
||||
progress_builder=progress.create_child_builder(represents_progress=1000),
|
||||
)
|
||||
|
@ -451,7 +448,7 @@ def test_import_unsupported_publicly_shared_view(data_fixture, tmpdir):
|
|||
|
||||
with pytest.raises(AirtableShareIsNotABase):
|
||||
AirtableHandler.import_from_airtable_to_group(
|
||||
group, "shrXxmp0WmqsTkFWTzv", timezone=UTC, storage=storage
|
||||
group, "shrXxmp0WmqsTkFWTzv", storage=storage
|
||||
)
|
||||
|
||||
|
||||
|
@ -481,7 +478,6 @@ def test_create_and_start_airtable_import_job(mock_run_async_job, data_fixture):
|
|||
assert job.group_id == group.id
|
||||
assert job.airtable_share_id == "shrXxmp0WmqsTkFWTz"
|
||||
assert job.progress_percentage == 0
|
||||
assert job.timezone is None
|
||||
assert job.state == "pending"
|
||||
assert job.error == ""
|
||||
|
||||
|
@ -490,37 +486,6 @@ def test_create_and_start_airtable_import_job(mock_run_async_job, data_fixture):
|
|||
assert args[0][0] == job.id
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
@responses.activate
|
||||
@patch("baserow.core.jobs.handler.run_async_job")
|
||||
def test_create_and_start_airtable_import_job_with_timezone(
|
||||
mock_run_async_job, data_fixture
|
||||
):
|
||||
user = data_fixture.create_user()
|
||||
group = data_fixture.create_group(user=user)
|
||||
|
||||
with pytest.raises(UnknownTimeZoneError):
|
||||
JobHandler().create_and_start_job(
|
||||
user,
|
||||
"airtable",
|
||||
group_id=group.id,
|
||||
airtable_share_url="https://airtable.com/shrXxmp0WmqsTkFWTz",
|
||||
timezone="UNKNOWN",
|
||||
)
|
||||
|
||||
assert AirtableImportJob.objects.all().count() == 0
|
||||
|
||||
job = JobHandler().create_and_start_job(
|
||||
user,
|
||||
"airtable",
|
||||
group_id=group.id,
|
||||
airtable_share_url="https://airtable.com/shrXxmp0WmqsTkFWTz",
|
||||
timezone="Europe/Amsterdam",
|
||||
)
|
||||
|
||||
assert job.timezone.zone == "Europe/Amsterdam"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
def test_create_and_start_airtable_import_job_while_other_job_is_running(data_fixture):
|
||||
|
|
|
@ -5,7 +5,6 @@ from django.db import connections
|
|||
|
||||
import pytest
|
||||
import responses
|
||||
from pytz import BaseTzInfo
|
||||
|
||||
from baserow.contrib.database.airtable.exceptions import AirtableShareIsNotABase
|
||||
from baserow.contrib.database.airtable.models import AirtableImportJob
|
||||
|
@ -56,7 +55,6 @@ def test_run_import_from_airtable(
|
|||
assert args[0][1] == job.airtable_share_id
|
||||
assert isinstance(args[1]["progress_builder"], ChildProgressBuilder)
|
||||
assert args[1]["progress_builder"].represents_progress == 100
|
||||
assert "timezone" not in args[1]
|
||||
|
||||
job = AirtableImportJob.objects.get(pk=job.id)
|
||||
assert job.progress_percentage == 100
|
||||
|
@ -103,32 +101,3 @@ def test_run_import_shared_view(mock_import_from_airtable_to_group, data_fixture
|
|||
== "The shared link is not a base. It's probably a view and the Airtable "
|
||||
"import tool only supports shared bases."
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@responses.activate
|
||||
@patch(
|
||||
"baserow.contrib.database.airtable.handler"
|
||||
".AirtableHandler.import_from_airtable_to_group"
|
||||
)
|
||||
def test_run_import_from_airtable_with_timezone(
|
||||
mock_import_from_airtable_to_group, data_fixture
|
||||
):
|
||||
database = data_fixture.create_database_application()
|
||||
mock_import_from_airtable_to_group.return_value = database
|
||||
|
||||
job = data_fixture.create_airtable_import_job(timezone="Europe/Amsterdam")
|
||||
|
||||
with pytest.raises(Job.DoesNotExist):
|
||||
run_async_job(0)
|
||||
|
||||
run_async_job(job.id)
|
||||
|
||||
mock_import_from_airtable_to_group.assert_called_once()
|
||||
args = mock_import_from_airtable_to_group.call_args
|
||||
assert args[0][0].id == job.group.id
|
||||
assert args[0][1] == job.airtable_share_id
|
||||
assert isinstance(args[1]["progress_builder"], ChildProgressBuilder)
|
||||
assert args[1]["progress_builder"].represents_progress == 100
|
||||
assert isinstance(args[1]["timezone"], BaseTzInfo)
|
||||
assert str(args[1]["timezone"]) == "Europe/Amsterdam"
|
||||
|
|
|
@ -66,7 +66,6 @@ def test_create_airtable_import_job(
|
|||
"type": "airtable",
|
||||
"group_id": "not_int",
|
||||
"airtable_share_url": "https://airtable.com/test",
|
||||
"timezone": "UNKNOWN",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
@ -82,9 +81,6 @@ def test_create_airtable_import_job(
|
|||
"code": "invalid",
|
||||
}
|
||||
],
|
||||
"timezone": [
|
||||
{"error": '"UNKNOWN" is not a valid choice.', "code": "invalid_choice"}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -107,7 +103,6 @@ def test_create_airtable_import_job(
|
|||
"group_id": group.id,
|
||||
"airtable_share_id": "shrxxxxxxxxxxxxxx",
|
||||
"progress_percentage": 0,
|
||||
"timezone": None,
|
||||
"state": "pending",
|
||||
"human_readable_error": "",
|
||||
"database": None,
|
||||
|
@ -121,7 +116,6 @@ def test_create_airtable_import_job(
|
|||
"type": "airtable",
|
||||
"group_id": group.id,
|
||||
"airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx",
|
||||
"timezone": "Europe/Amsterdam",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
@ -135,7 +129,6 @@ def test_create_airtable_import_job(
|
|||
"group_id": group.id,
|
||||
"airtable_share_id": "shrxxxxxxxxxxxxxx",
|
||||
"progress_percentage": 0,
|
||||
"timezone": "Europe/Amsterdam",
|
||||
"state": "pending",
|
||||
"human_readable_error": "",
|
||||
"database": None,
|
||||
|
@ -185,7 +178,6 @@ def test_get_airtable_import_job(data_fixture, api_client):
|
|||
"group_id": airtable_job_1.group_id,
|
||||
"airtable_share_id": "test",
|
||||
"progress_percentage": 0,
|
||||
"timezone": None,
|
||||
"state": "pending",
|
||||
"human_readable_error": "",
|
||||
"database": None,
|
||||
|
@ -212,7 +204,6 @@ def test_get_airtable_import_job(data_fixture, api_client):
|
|||
"group_id": airtable_job_1.group_id,
|
||||
"airtable_share_id": "test",
|
||||
"progress_percentage": 50,
|
||||
"timezone": None,
|
||||
"state": "failed",
|
||||
"human_readable_error": "Wrong",
|
||||
"database": {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from datetime import date, datetime
|
||||
from datetime import date, datetime, timedelta
|
||||
from decimal import Decimal
|
||||
|
||||
from django.shortcuts import reverse
|
||||
|
@ -222,6 +222,98 @@ def test_url_field_type(api_client, data_fixture):
|
|||
assert URLField.objects.all().count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_date_field_type_invalid_force_timezone_offset(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
date_field = data_fixture.create_date_field(table=table)
|
||||
|
||||
response = api_client.post(
|
||||
reverse("api:database:fields:list", kwargs={"table_id": table.id}),
|
||||
{
|
||||
"name": "date",
|
||||
"type": "date",
|
||||
"date_include_time": True,
|
||||
"date_force_timezone_offset": 60,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert "date_force_timezone_offset" in response.json()["detail"]
|
||||
|
||||
response = api_client.patch(
|
||||
reverse("api:database:fields:item", kwargs={"field_id": date_field.id}),
|
||||
{"date_force_timezone_offset": 60},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_400_BAD_REQUEST
|
||||
assert "date_force_timezone_offset" in response.json()["detail"]
|
||||
|
||||
date_field.date_include_time = True
|
||||
date_field.save()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_date_field_type_force_timezone_offset(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
datetime_field = data_fixture.create_date_field(table=table, date_include_time=True)
|
||||
table_model = table.get_model()
|
||||
|
||||
row_1 = table_model.objects.create(
|
||||
**{f"field_{datetime_field.id}": "2022-01-01 00:00Z"}
|
||||
)
|
||||
row_2 = table_model.objects.create(
|
||||
**{f"field_{datetime_field.id}": "2022-01-01 23:30Z"}
|
||||
)
|
||||
row_3 = table_model.objects.create(
|
||||
**{f"field_{datetime_field.id}": "2022-01-02 15:00Z"}
|
||||
)
|
||||
row_1.refresh_from_db()
|
||||
row_2.refresh_from_db()
|
||||
row_3.refresh_from_db()
|
||||
|
||||
utc_offset = 60
|
||||
response = api_client.patch(
|
||||
reverse("api:database:fields:item", kwargs={"field_id": datetime_field.id}),
|
||||
{
|
||||
"date_force_timezone": "Europe/Rome",
|
||||
"date_force_timezone_offset": utc_offset,
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
def row_datetime_updated(row):
|
||||
prev_datetime = getattr(row, f"field_{datetime_field.id}")
|
||||
row.refresh_from_db()
|
||||
new_datetime = getattr(row, f"field_{datetime_field.id}")
|
||||
return new_datetime == (prev_datetime + timedelta(minutes=utc_offset))
|
||||
|
||||
# all the rows has been updated, adding 60 minutes to the time
|
||||
assert row_datetime_updated(row_1)
|
||||
assert row_datetime_updated(row_2)
|
||||
assert row_datetime_updated(row_3)
|
||||
|
||||
# the offset can be negative
|
||||
utc_offset = -180
|
||||
response = api_client.patch(
|
||||
reverse("api:database:fields:item", kwargs={"field_id": datetime_field.id}),
|
||||
{"date_force_timezone": "Etc/GMT-2", "date_force_timezone_offset": utc_offset},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
# all the rows has been updated, adding 60 minutes to the time
|
||||
assert row_datetime_updated(row_1)
|
||||
assert row_datetime_updated(row_2)
|
||||
assert row_datetime_updated(row_3)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_date_field_type(api_client, data_fixture):
|
||||
user, token = data_fixture.create_user_and_token(
|
||||
|
@ -1015,7 +1107,6 @@ def test_created_on_field_type(api_client, data_fixture):
|
|||
"name": "Create",
|
||||
"type": "created_on",
|
||||
"date_include_time": True,
|
||||
"timezone": "Europe/Berlin",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
|
|
|
@ -562,7 +562,7 @@ def test_changing_type_of_reference_field_to_valid_one_for_formula(
|
|||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
|
||||
response = api_client.get(
|
||||
reverse("api:database:rows:list", kwargs={"table_id": table.id}),
|
||||
|
@ -958,6 +958,8 @@ def test_can_type_a_valid_formula_field(data_fixture, api_client):
|
|||
"date_format": None,
|
||||
"date_include_time": None,
|
||||
"date_time_format": None,
|
||||
"date_show_tzinfo": None,
|
||||
"date_force_timezone": None,
|
||||
"error": None,
|
||||
"formula": "1+1",
|
||||
"formula_type": "number",
|
||||
|
|
|
@ -1740,7 +1740,7 @@ def test_batch_update_rows_num_of_queries(api_client, data_fixture):
|
|||
|
||||
# last modified is readonly but the auto update shouldn't produce n+1 queries
|
||||
last_modified_field = data_fixture.create_last_modified_field(
|
||||
table=table_b, date_include_time=True, timezone="Europe/Berlin"
|
||||
table=table_b, date_include_time=True
|
||||
)
|
||||
|
||||
# setup the tables
|
||||
|
|
|
@ -227,14 +227,18 @@ def test_get_row_serializer_with_user_field_names(data_fixture):
|
|||
"date_us": "2020-02-01",
|
||||
"datetime_eu": "2020-02-01T01:23:00Z",
|
||||
"datetime_us": "2020-02-01T01:23:00Z",
|
||||
"datetime_eu_tzone_visible": "2020-02-01T01:23:00Z",
|
||||
"datetime_eu_tzone_hidden": "2020-02-01T01:23:00Z",
|
||||
"last_modified_date_eu": "2021-01-02",
|
||||
"last_modified_date_us": "2021-01-02",
|
||||
"last_modified_datetime_eu": "2021-01-02T12:00:00Z",
|
||||
"last_modified_datetime_us": "2021-01-02T12:00:00Z",
|
||||
"last_modified_datetime_eu_tzone": "2021-01-02T12:00:00Z",
|
||||
"created_on_date_eu": "2021-01-02",
|
||||
"created_on_date_us": "2021-01-02",
|
||||
"created_on_datetime_eu": "2021-01-02T12:00:00Z",
|
||||
"created_on_datetime_us": "2021-01-02T12:00:00Z",
|
||||
"created_on_datetime_eu_tzone": "2021-01-02T12:00:00Z",
|
||||
"decimal_link_row": [
|
||||
{"id": 1, "value": "1.234"},
|
||||
{"id": 2, "value": "-123.456"},
|
||||
|
|
|
@ -222,32 +222,31 @@ def test_can_export_every_interesting_different_field_to_csv(
|
|||
expected = (
|
||||
"\ufeffid,text,long_text,url,email,negative_int,positive_int,"
|
||||
"negative_decimal,positive_decimal,rating,boolean,datetime_us,date_us,"
|
||||
"datetime_eu,date_eu,last_modified_datetime_us,last_modified_date_us,"
|
||||
"last_modified_datetime_eu,last_modified_date_eu,created_on_datetime_us,"
|
||||
"created_on_date_us,created_on_datetime_eu,created_on_date_eu,link_row,"
|
||||
"self_link_row,link_row_without_related,decimal_link_row,file_link_row,file,"
|
||||
"single_select,multiple_select,multiple_collaborators,phone_number,"
|
||||
"formula_text,formula_int,formula_bool,formula_decimal,formula_dateinterval,"
|
||||
"datetime_eu,date_eu,datetime_eu_tzone_visible,datetime_eu_tzone_hidden,"
|
||||
"last_modified_datetime_us,last_modified_date_us,last_modified_datetime_eu,"
|
||||
"last_modified_date_eu,last_modified_datetime_eu_tzone,created_on_datetime_us,"
|
||||
"created_on_date_us,created_on_datetime_eu,created_on_date_eu,created_on_datetime_eu_tzone,"
|
||||
"link_row,self_link_row,link_row_without_related,decimal_link_row,"
|
||||
"file_link_row,file,single_select,multiple_select,multiple_collaborators,"
|
||||
"phone_number,formula_text,formula_int,formula_bool,formula_decimal,formula_dateinterval,"
|
||||
"formula_date,formula_singleselect,formula_email,formula_link_with_label,"
|
||||
"formula_link_url_only,lookup\r\n"
|
||||
"1,,,,,,,,,0,False,,,,,01/02/2021 13:00,01/02/2021,02/01/2021 "
|
||||
"13:00,02/01/2021,01/02/2021 13:00,01/02/2021,02/01/2021 "
|
||||
"13:00,02/01/2021,,,,,,,,,,,test FORMULA,1,True,33.3333333333,1 "
|
||||
"day,2020-01-01,,,label (https://google.com),https://google.com,\r\n"
|
||||
"2,text,long_text,https://www.google.com,test@example.com,-1,1,-1.2,1.2,3,"
|
||||
"True,02/01/2020 "
|
||||
"01:23,02/01/2020,01/02/2020 01:23,01/02/2020,01/02/2021 "
|
||||
"13:00,01/02/2021,02/01/2021 13:00,02/01/2021,01/02/2021 "
|
||||
"13:00,01/02/2021,02/01/2021 "
|
||||
'13:00,02/01/2021,"linked_row_1,linked_row_2,unnamed row 3",unnamed row '
|
||||
'1,"linked_row_1,linked_row_2","1.234,-123.456,unnamed row 3","name.txt '
|
||||
'(http://localhost:8000/media/user_files/test_hash.txt),unnamed row 2","a.txt '
|
||||
"(http://localhost:8000/media/user_files/hashed_name.txt),b.txt "
|
||||
'(http://localhost:8000/media/user_files/other_name.txt)",A,"D,C,E",'
|
||||
'"user2@example.com,user3@example.com",+4412345678,test '
|
||||
"FORMULA,1,True,33.3333333333,1 day,2020-01-01,A,test@example.com,label "
|
||||
'(https://google.com),https://google.com,"linked_row_1,linked_row_2,'
|
||||
'"\r\n'
|
||||
"1,,,,,,,,,0,False,,,,,,,01/02/2021 12:00,01/02/2021,02/01/2021 12:00,02/01/2021,"
|
||||
"02/01/2021 13:00,01/02/2021 12:00,01/02/2021,02/01/2021 12:00,02/01/2021,"
|
||||
"02/01/2021 13:00,,,,,,,,,,,test FORMULA,1,True,33.3333333333,1 day,"
|
||||
"2020-01-01,,,label (https://google.com),https://google.com,\r\n"
|
||||
"2,text,long_text,https://www.google.com,test@example.com,-1,1,-1.2,1.2,3,True,"
|
||||
"02/01/2020 01:23,02/01/2020,01/02/2020 01:23,01/02/2020,01/02/2020 02:23,"
|
||||
"01/02/2020 02:23,01/02/2021 12:00,01/02/2021,02/01/2021 12:00,02/01/2021,"
|
||||
"02/01/2021 13:00,01/02/2021 12:00,01/02/2021,02/01/2021 12:00,02/01/2021,"
|
||||
'02/01/2021 13:00,"linked_row_1,linked_row_2,unnamed row 3",unnamed row 1,'
|
||||
'"linked_row_1,linked_row_2","1.234,-123.456,unnamed row 3",'
|
||||
'"name.txt (http://localhost:8000/media/user_files/test_hash.txt),unnamed row 2",'
|
||||
'"a.txt (http://localhost:8000/media/user_files/hashed_name.txt),'
|
||||
'b.txt (http://localhost:8000/media/user_files/other_name.txt)",A,"D,C,E",'
|
||||
'"user2@example.com,user3@example.com",+4412345678,test FORMULA,1,True,33.3333333333,'
|
||||
"1 day,2020-01-01,A,test@example.com,label (https://google.com),https://google.com,"
|
||||
'"linked_row_1,linked_row_2,"\r\n'
|
||||
)
|
||||
|
||||
assert contents == expected
|
||||
|
|
|
@ -20,8 +20,6 @@ def test_created_on_field_type(data_fixture):
|
|||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
timezone_to_test = "Europe/Berlin"
|
||||
timezone_of_field = timezone(timezone_to_test)
|
||||
time_to_freeze = "2021-08-10 12:00"
|
||||
|
||||
data_fixture.create_text_field(table=table, name="text_field", primary=True)
|
||||
|
@ -30,7 +28,6 @@ def test_created_on_field_type(data_fixture):
|
|||
table=table,
|
||||
type_name="created_on",
|
||||
name="Create Date",
|
||||
timezone=timezone_to_test,
|
||||
)
|
||||
created_on_field_datetime = field_handler.create_field(
|
||||
user=user,
|
||||
|
@ -38,7 +35,6 @@ def test_created_on_field_type(data_fixture):
|
|||
type_name="created_on",
|
||||
name="Create Datetime",
|
||||
date_include_time=True,
|
||||
timezone=timezone_to_test,
|
||||
)
|
||||
assert created_on_field_date.date_include_time is False
|
||||
assert created_on_field_datetime.date_include_time is True
|
||||
|
@ -106,7 +102,6 @@ def test_created_on_field_type(data_fixture):
|
|||
row_create_datetime_before_alter = row.create_datetime
|
||||
|
||||
# changing the field from CreatedOn to Datetime should persist the date
|
||||
# in the corresponding timezone
|
||||
with freeze_time(time_to_freeze):
|
||||
field_handler.update_field(
|
||||
user=user,
|
||||
|
@ -117,15 +112,8 @@ def test_created_on_field_type(data_fixture):
|
|||
|
||||
assert len(CreatedOnField.objects.all()) == 1
|
||||
row.refresh_from_db()
|
||||
field_before_with_timezone = row_create_datetime_before_alter.astimezone(
|
||||
timezone_of_field
|
||||
)
|
||||
assert row.create_datetime.year == field_before_with_timezone.year
|
||||
assert row.create_datetime.month == field_before_with_timezone.month
|
||||
assert row.create_datetime.day == field_before_with_timezone.day
|
||||
assert row.create_datetime.hour == field_before_with_timezone.hour
|
||||
assert row.create_datetime.minute == field_before_with_timezone.minute
|
||||
assert row.create_datetime.second == field_before_with_timezone.second
|
||||
|
||||
assert row.create_datetime == row_create_datetime_before_alter
|
||||
|
||||
# changing the field from LastModified with Datetime to Text Field should persist
|
||||
# the datetime as string
|
||||
|
@ -134,7 +122,7 @@ def test_created_on_field_type(data_fixture):
|
|||
field=created_on_field_datetime,
|
||||
new_type_name="created_on",
|
||||
date_include_time=True,
|
||||
timezone="Europe/Berlin",
|
||||
datetime_force_timezone="Europe/Berlin",
|
||||
)
|
||||
assert len(CreatedOnField.objects.all()) == 2
|
||||
|
||||
|
@ -147,9 +135,9 @@ def test_created_on_field_type(data_fixture):
|
|||
)
|
||||
row.refresh_from_db()
|
||||
assert len(CreatedOnField.objects.all()) == 1
|
||||
assert row.create_datetime == row_create_datetime_before_alter.astimezone(
|
||||
timezone_of_field
|
||||
).strftime("%d/%m/%Y %H:%M")
|
||||
assert row.create_datetime == row_create_datetime_before_alter.strftime(
|
||||
"%d/%m/%Y %H:%M"
|
||||
)
|
||||
|
||||
# deleting the fields
|
||||
field_handler.delete_field(user=user, field=created_on_field_date)
|
||||
|
@ -157,23 +145,6 @@ def test_created_on_field_type(data_fixture):
|
|||
assert len(CreatedOnField.objects.all()) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_created_on_field_type_wrong_timezone(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
type_name="created_on",
|
||||
name="Create Date",
|
||||
timezone="SDj",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_import_export_last_modified_field(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from typing import cast
|
||||
|
||||
|
@ -1411,3 +1412,82 @@ def test_can_undo_redo_duplicate_fields_of_interesting_table(api_client, data_fi
|
|||
row_2_value,
|
||||
field_name=field.name,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_date_field_type_undo_redo_fix_timezone_offset(api_client, data_fixture):
|
||||
session_id = "session-id"
|
||||
user = data_fixture.create_user(session_id=session_id)
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
datetime_field = data_fixture.create_date_field(table=table, date_include_time=True)
|
||||
table_model = table.get_model()
|
||||
|
||||
row_1 = table_model.objects.create(
|
||||
**{f"field_{datetime_field.id}": "2022-01-01 00:00Z"}
|
||||
)
|
||||
row_2 = table_model.objects.create(
|
||||
**{f"field_{datetime_field.id}": "2022-01-01 23:30Z"}
|
||||
)
|
||||
row_3 = table_model.objects.create(
|
||||
**{f"field_{datetime_field.id}": "2022-01-02 15:00Z"}
|
||||
)
|
||||
row_1.refresh_from_db()
|
||||
row_2.refresh_from_db()
|
||||
row_3.refresh_from_db()
|
||||
original_datetime_1 = getattr(row_1, f"field_{datetime_field.id}")
|
||||
original_datetime_2 = getattr(row_2, f"field_{datetime_field.id}")
|
||||
original_datetime_3 = getattr(row_3, f"field_{datetime_field.id}")
|
||||
|
||||
utc_offset = 60
|
||||
action_type_registry.get_by_type(UpdateFieldActionType).do(
|
||||
user,
|
||||
datetime_field,
|
||||
name="test",
|
||||
date_force_timezone="Europe/Rome",
|
||||
date_force_timezone_offset=utc_offset,
|
||||
)
|
||||
|
||||
def row_datetime_updated(row):
|
||||
prev_datetime = getattr(row, f"field_{datetime_field.id}")
|
||||
row.refresh_from_db()
|
||||
new_datetime = getattr(row, f"field_{datetime_field.id}")
|
||||
return new_datetime == (prev_datetime + timedelta(minutes=utc_offset))
|
||||
|
||||
# all the rows has been updated, adding 60 minutes to the time
|
||||
assert row_datetime_updated(row_1)
|
||||
assert row_datetime_updated(row_2)
|
||||
assert row_datetime_updated(row_3)
|
||||
|
||||
actions = ActionHandler.undo(
|
||||
user, [UpdateFieldActionType.scope(table_id=table.id)], session_id
|
||||
)
|
||||
assert len(actions) == 1
|
||||
assert actions[0].type == UpdateFieldActionType.type
|
||||
|
||||
utc_offset = -utc_offset
|
||||
assert row_datetime_updated(row_1)
|
||||
assert row_datetime_updated(row_2)
|
||||
assert row_datetime_updated(row_3)
|
||||
assert getattr(row_1, f"field_{datetime_field.id}") == original_datetime_1
|
||||
assert getattr(row_2, f"field_{datetime_field.id}") == original_datetime_2
|
||||
assert getattr(row_3, f"field_{datetime_field.id}") == original_datetime_3
|
||||
|
||||
actions = ActionHandler.redo(
|
||||
user, [UpdateFieldActionType.scope(table_id=table.id)], session_id
|
||||
)
|
||||
assert len(actions) == 1
|
||||
assert actions[0].type == UpdateFieldActionType.type
|
||||
|
||||
utc_offset = -utc_offset
|
||||
assert row_datetime_updated(row_1)
|
||||
assert row_datetime_updated(row_2)
|
||||
assert row_datetime_updated(row_3)
|
||||
assert getattr(
|
||||
row_1, f"field_{datetime_field.id}"
|
||||
) == original_datetime_1 + timedelta(minutes=utc_offset)
|
||||
assert getattr(
|
||||
row_2, f"field_{datetime_field.id}"
|
||||
) == original_datetime_2 + timedelta(minutes=utc_offset)
|
||||
assert getattr(
|
||||
row_3, f"field_{datetime_field.id}"
|
||||
) == original_datetime_3 + timedelta(minutes=utc_offset)
|
||||
|
|
|
@ -515,20 +515,24 @@ def test_human_readable_values(data_fixture):
|
|||
)
|
||||
blank_results[field["field"].name] = blank_value
|
||||
|
||||
assert blank_results == {
|
||||
blank_expected = {
|
||||
"boolean": "False",
|
||||
"date_eu": "",
|
||||
"date_us": "",
|
||||
"datetime_eu": "",
|
||||
"datetime_us": "",
|
||||
"datetime_eu_tzone_visible": "",
|
||||
"datetime_eu_tzone_hidden": "",
|
||||
"last_modified_date_eu": "02/01/2021",
|
||||
"last_modified_date_us": "01/02/2021",
|
||||
"last_modified_datetime_eu": "02/01/2021 13:00",
|
||||
"last_modified_datetime_us": "01/02/2021 13:00",
|
||||
"last_modified_datetime_eu": "02/01/2021 12:00",
|
||||
"last_modified_datetime_us": "01/02/2021 12:00",
|
||||
"last_modified_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"created_on_date_eu": "02/01/2021",
|
||||
"created_on_date_us": "01/02/2021",
|
||||
"created_on_datetime_eu": "02/01/2021 13:00",
|
||||
"created_on_datetime_us": "01/02/2021 13:00",
|
||||
"created_on_datetime_eu": "02/01/2021 12:00",
|
||||
"created_on_datetime_us": "01/02/2021 12:00",
|
||||
"created_on_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"decimal_link_row": "",
|
||||
"email": "",
|
||||
"file": "",
|
||||
|
@ -560,20 +564,28 @@ def test_human_readable_values(data_fixture):
|
|||
"formula_link_with_label": "label (https://google.com)",
|
||||
"lookup": "",
|
||||
}
|
||||
assert results == {
|
||||
|
||||
for key, value in blank_expected.items():
|
||||
assert blank_results[key] == value, (key, blank_results[key], value)
|
||||
|
||||
expected = {
|
||||
"boolean": "True",
|
||||
"date_eu": "01/02/2020",
|
||||
"date_us": "02/01/2020",
|
||||
"datetime_eu": "01/02/2020 01:23",
|
||||
"datetime_us": "02/01/2020 01:23",
|
||||
"datetime_eu_tzone_visible": "01/02/2020 02:23",
|
||||
"datetime_eu_tzone_hidden": "01/02/2020 02:23",
|
||||
"last_modified_date_eu": "02/01/2021",
|
||||
"last_modified_date_us": "01/02/2021",
|
||||
"last_modified_datetime_eu": "02/01/2021 13:00",
|
||||
"last_modified_datetime_us": "01/02/2021 13:00",
|
||||
"last_modified_datetime_eu": "02/01/2021 12:00",
|
||||
"last_modified_datetime_us": "01/02/2021 12:00",
|
||||
"last_modified_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"created_on_date_eu": "02/01/2021",
|
||||
"created_on_date_us": "01/02/2021",
|
||||
"created_on_datetime_eu": "02/01/2021 13:00",
|
||||
"created_on_datetime_us": "01/02/2021 13:00",
|
||||
"created_on_datetime_eu": "02/01/2021 12:00",
|
||||
"created_on_datetime_us": "01/02/2021 12:00",
|
||||
"created_on_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"decimal_link_row": "1.234, -123.456, unnamed row 3",
|
||||
"email": "test@example.com",
|
||||
"file": "a.txt, b.txt",
|
||||
|
@ -606,6 +618,9 @@ def test_human_readable_values(data_fixture):
|
|||
"lookup": "linked_row_1, linked_row_2, ",
|
||||
}
|
||||
|
||||
for key, value in expected.items():
|
||||
assert results[key] == value, (key, results[key], value)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_import_export_lookup_field(data_fixture, api_client):
|
||||
|
|
|
@ -20,8 +20,6 @@ def test_last_modified_field_type(data_fixture):
|
|||
|
||||
field_handler = FieldHandler()
|
||||
row_handler = RowHandler()
|
||||
timezone_to_test = "Europe/Berlin"
|
||||
timezone_of_field = timezone(timezone_to_test)
|
||||
time_to_freeze = "2021-08-10 12:00"
|
||||
|
||||
data_fixture.create_text_field(table=table, name="text_field", primary=True)
|
||||
|
@ -31,7 +29,6 @@ def test_last_modified_field_type(data_fixture):
|
|||
table=table,
|
||||
type_name="last_modified",
|
||||
name="Last Date",
|
||||
timezone=timezone_to_test,
|
||||
)
|
||||
last_modified_field_datetime = field_handler.create_field(
|
||||
user=user,
|
||||
|
@ -39,7 +36,6 @@ def test_last_modified_field_type(data_fixture):
|
|||
type_name="last_modified",
|
||||
name="Last Datetime",
|
||||
date_include_time=True,
|
||||
timezone=timezone_to_test,
|
||||
)
|
||||
assert last_modified_field_date.date_include_time is False
|
||||
assert last_modified_field_datetime.date_include_time is True
|
||||
|
@ -119,15 +115,7 @@ def test_last_modified_field_type(data_fixture):
|
|||
|
||||
assert len(LastModifiedField.objects.all()) == 1
|
||||
row.refresh_from_db()
|
||||
field_before_with_timezone = row_last_modified_2_before_alter.astimezone(
|
||||
timezone_of_field
|
||||
)
|
||||
assert row.last_datetime.year == field_before_with_timezone.year
|
||||
assert row.last_datetime.month == field_before_with_timezone.month
|
||||
assert row.last_datetime.day == field_before_with_timezone.day
|
||||
assert row.last_datetime.hour == field_before_with_timezone.hour
|
||||
assert row.last_datetime.minute == field_before_with_timezone.minute
|
||||
assert row.last_datetime.second == field_before_with_timezone.second
|
||||
assert row.last_datetime == row_last_modified_2_before_alter
|
||||
|
||||
# changing the field from LastModified with Datetime to Text Field should persist
|
||||
# the datetime as string
|
||||
|
@ -151,9 +139,9 @@ def test_last_modified_field_type(data_fixture):
|
|||
)
|
||||
row.refresh_from_db()
|
||||
assert len(LastModifiedField.objects.all()) == 1
|
||||
assert row.last_datetime == row_last_modified_2_before_alter.astimezone(
|
||||
timezone_of_field
|
||||
).strftime("%d/%m/%Y %H:%M")
|
||||
assert row.last_datetime == row_last_modified_2_before_alter.strftime(
|
||||
"%d/%m/%Y %H:%M"
|
||||
)
|
||||
|
||||
# deleting the fields
|
||||
field_handler.delete_field(user=user, field=last_modified_field_date)
|
||||
|
@ -161,23 +149,6 @@ def test_last_modified_field_type(data_fixture):
|
|||
assert len(LastModifiedField.objects.all()) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_last_modified_field_type_wrong_timezone(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
table = data_fixture.create_database_table(user=user)
|
||||
|
||||
field_handler = FieldHandler()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
field_handler.create_field(
|
||||
user=user,
|
||||
table=table,
|
||||
type_name="last_modified",
|
||||
name="Last Date",
|
||||
timezone="SDj",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_import_export_last_modified_field(data_fixture):
|
||||
user = data_fixture.create_user()
|
||||
|
|
|
@ -186,7 +186,7 @@ def test_view_unique_count_aggregation_for_interesting_table(data_fixture):
|
|||
user, grid_view, aggregation_query, model=model, with_total=True
|
||||
)
|
||||
|
||||
assert len(result.keys()) == 29
|
||||
assert len(result.keys()) == 33
|
||||
|
||||
for field_obj in model._field_objects.values():
|
||||
field = field_obj["field"]
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "feature",
|
||||
"message": "Make date fields timezone aware.",
|
||||
"issue_number": 1473,
|
||||
"bullet_points": [],
|
||||
"created_at": "2023-02-22"
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "refactor",
|
||||
"message": "Refactor date view filters to consider timezone when filtering results.",
|
||||
"issue_number": 1473,
|
||||
"bullet_points": [],
|
||||
"created_at": "2023-02-22"
|
||||
}
|
|
@ -52,14 +52,18 @@ def test_can_export_every_interesting_different_field_to_json(
|
|||
"date_us": "",
|
||||
"datetime_eu": "",
|
||||
"date_eu": "",
|
||||
"last_modified_datetime_us": "01/02/2021 13:00",
|
||||
"datetime_eu_tzone_visible": "",
|
||||
"datetime_eu_tzone_hidden": "",
|
||||
"last_modified_datetime_us": "01/02/2021 12:00",
|
||||
"last_modified_date_us": "01/02/2021",
|
||||
"last_modified_datetime_eu": "02/01/2021 13:00",
|
||||
"last_modified_datetime_eu": "02/01/2021 12:00",
|
||||
"last_modified_date_eu": "02/01/2021",
|
||||
"created_on_datetime_us": "01/02/2021 13:00",
|
||||
"last_modified_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"created_on_datetime_us": "01/02/2021 12:00",
|
||||
"created_on_date_us": "01/02/2021",
|
||||
"created_on_datetime_eu": "02/01/2021 13:00",
|
||||
"created_on_datetime_eu": "02/01/2021 12:00",
|
||||
"created_on_date_eu": "02/01/2021",
|
||||
"created_on_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"link_row": [],
|
||||
"self_link_row": [],
|
||||
"link_row_without_related": [],
|
||||
|
@ -103,14 +107,18 @@ def test_can_export_every_interesting_different_field_to_json(
|
|||
"date_us": "02/01/2020",
|
||||
"datetime_eu": "01/02/2020 01:23",
|
||||
"date_eu": "01/02/2020",
|
||||
"last_modified_datetime_us": "01/02/2021 13:00",
|
||||
"datetime_eu_tzone_visible": "01/02/2020 02:23",
|
||||
"datetime_eu_tzone_hidden": "01/02/2020 02:23",
|
||||
"last_modified_datetime_us": "01/02/2021 12:00",
|
||||
"last_modified_date_us": "01/02/2021",
|
||||
"last_modified_datetime_eu": "02/01/2021 13:00",
|
||||
"last_modified_datetime_eu": "02/01/2021 12:00",
|
||||
"last_modified_date_eu": "02/01/2021",
|
||||
"created_on_datetime_us": "01/02/2021 13:00",
|
||||
"last_modified_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"created_on_datetime_us": "01/02/2021 12:00",
|
||||
"created_on_date_us": "01/02/2021",
|
||||
"created_on_datetime_eu": "02/01/2021 13:00",
|
||||
"created_on_datetime_eu": "02/01/2021 12:00",
|
||||
"created_on_date_eu": "02/01/2021",
|
||||
"created_on_datetime_eu_tzone": "02/01/2021 13:00",
|
||||
"link_row": [
|
||||
"linked_row_1",
|
||||
"linked_row_2",
|
||||
|
@ -257,147 +265,157 @@ def test_can_export_every_interesting_different_field_to_xml(
|
|||
)
|
||||
expected_xml = f"""<?xml version="1.0" encoding="utf-8" ?>
|
||||
<rows>
|
||||
<row>
|
||||
<id>1</id>
|
||||
<text/>
|
||||
<long-text/>
|
||||
<url/>
|
||||
<email/>
|
||||
<negative-int/>
|
||||
<positive-int/>
|
||||
<negative-decimal/>
|
||||
<positive-decimal/>
|
||||
<rating>0</rating>
|
||||
<boolean>false</boolean>
|
||||
<datetime-us/>
|
||||
<date-us/>
|
||||
<datetime-eu/>
|
||||
<date-eu/>
|
||||
<last-modified-datetime-us>01/02/2021 13:00</last-modified-datetime-us>
|
||||
<last-modified-date-us>01/02/2021</last-modified-date-us>
|
||||
<last-modified-datetime-eu>02/01/2021 13:00</last-modified-datetime-eu>
|
||||
<last-modified-date-eu>02/01/2021</last-modified-date-eu>
|
||||
<created-on-datetime-us>01/02/2021 13:00</created-on-datetime-us>
|
||||
<created-on-date-us>01/02/2021</created-on-date-us>
|
||||
<created-on-datetime-eu>02/01/2021 13:00</created-on-datetime-eu>
|
||||
<created-on-date-eu>02/01/2021</created-on-date-eu>
|
||||
<link-row/>
|
||||
<self-link-row/>
|
||||
<link-row-without-related/>
|
||||
<decimal-link-row/>
|
||||
<file-link-row/>
|
||||
<file/>
|
||||
<single-select/>
|
||||
<multiple-select/>
|
||||
<multiple-collaborators/>
|
||||
<phone-number/>
|
||||
<formula-text>test FORMULA</formula-text>
|
||||
<formula-int>1</formula-int>
|
||||
<formula-bool>true</formula-bool>
|
||||
<formula-decimal>33.3333333333</formula-decimal>
|
||||
<formula-dateinterval>1 day</formula-dateinterval>
|
||||
<formula-date>2020-01-01</formula-date>
|
||||
<formula-singleselect/>
|
||||
<formula-email/>
|
||||
<formula-link-with-label>
|
||||
<url>https://google.com</url>
|
||||
<label>label</label>
|
||||
</formula-link-with-label>
|
||||
<formula-link-url-only>
|
||||
<url>https://google.com</url>
|
||||
</formula-link-url-only>
|
||||
<lookup/>
|
||||
</row>
|
||||
<row>
|
||||
<id>2</id>
|
||||
<text>text</text>
|
||||
<long-text>long_text</long-text>
|
||||
<url>https://www.google.com</url>
|
||||
<email>test@example.com</email>
|
||||
<negative-int>-1</negative-int>
|
||||
<positive-int>1</positive-int>
|
||||
<negative-decimal>-1.2</negative-decimal>
|
||||
<positive-decimal>1.2</positive-decimal>
|
||||
<rating>3</rating>
|
||||
<boolean>true</boolean>
|
||||
<datetime-us>02/01/2020 01:23</datetime-us>
|
||||
<date-us>02/01/2020</date-us>
|
||||
<datetime-eu>01/02/2020 01:23</datetime-eu>
|
||||
<date-eu>01/02/2020</date-eu>
|
||||
<last-modified-datetime-us>01/02/2021 13:00</last-modified-datetime-us>
|
||||
<last-modified-date-us>01/02/2021</last-modified-date-us>
|
||||
<last-modified-datetime-eu>02/01/2021 13:00</last-modified-datetime-eu>
|
||||
<last-modified-date-eu>02/01/2021</last-modified-date-eu>
|
||||
<created-on-datetime-us>01/02/2021 13:00</created-on-datetime-us>
|
||||
<created-on-date-us>01/02/2021</created-on-date-us>
|
||||
<created-on-datetime-eu>02/01/2021 13:00</created-on-datetime-eu>
|
||||
<created-on-date-eu>02/01/2021</created-on-date-eu>
|
||||
<link-row>
|
||||
<item>linked_row_1</item>
|
||||
<item>linked_row_2</item>
|
||||
<item>unnamed row 3</item>
|
||||
</link-row>
|
||||
<self-link-row>
|
||||
<item>unnamed row 1</item>
|
||||
</self-link-row>
|
||||
<link-row-without-related>
|
||||
<item>linked_row_1</item>
|
||||
<item>linked_row_2</item>
|
||||
</link-row-without-related>
|
||||
<decimal-link-row>
|
||||
<item>1.234</item>
|
||||
<item>-123.456</item>
|
||||
<item>unnamed row 3</item>
|
||||
</decimal-link-row>
|
||||
<file-link-row>
|
||||
<item>
|
||||
<row>
|
||||
<id>1</id>
|
||||
<text/>
|
||||
<long-text/>
|
||||
<url/>
|
||||
<email/>
|
||||
<negative-int/>
|
||||
<positive-int/>
|
||||
<negative-decimal/>
|
||||
<positive-decimal/>
|
||||
<rating>0</rating>
|
||||
<boolean>false</boolean>
|
||||
<datetime-us/>
|
||||
<date-us/>
|
||||
<datetime-eu/>
|
||||
<date-eu/>
|
||||
<datetime-eu-tzone-visible/>
|
||||
<datetime-eu-tzone-hidden/>
|
||||
<last-modified-datetime-us>01/02/2021 12:00</last-modified-datetime-us>
|
||||
<last-modified-date-us>01/02/2021</last-modified-date-us>
|
||||
<last-modified-datetime-eu>02/01/2021 12:00</last-modified-datetime-eu>
|
||||
<last-modified-date-eu>02/01/2021</last-modified-date-eu>
|
||||
<last-modified-datetime-eu-tzone>02/01/2021 13:00</last-modified-datetime-eu-tzone>
|
||||
<created-on-datetime-us>01/02/2021 12:00</created-on-datetime-us>
|
||||
<created-on-date-us>01/02/2021</created-on-date-us>
|
||||
<created-on-datetime-eu>02/01/2021 12:00</created-on-datetime-eu>
|
||||
<created-on-date-eu>02/01/2021</created-on-date-eu>
|
||||
<created-on-datetime-eu-tzone>02/01/2021 13:00</created-on-datetime-eu-tzone>
|
||||
<link-row/>
|
||||
<self-link-row/>
|
||||
<link-row-without-related/>
|
||||
<decimal-link-row/>
|
||||
<file-link-row/>
|
||||
<file/>
|
||||
<single-select/>
|
||||
<multiple-select/>
|
||||
<multiple-collaborators/>
|
||||
<phone-number/>
|
||||
<formula-text>test FORMULA</formula-text>
|
||||
<formula-int>1</formula-int>
|
||||
<formula-bool>true</formula-bool>
|
||||
<formula-decimal>33.3333333333</formula-decimal>
|
||||
<formula-dateinterval>1 day</formula-dateinterval>
|
||||
<formula-date>2020-01-01</formula-date>
|
||||
<formula-singleselect/>
|
||||
<formula-email/>
|
||||
<formula-link-with-label>
|
||||
<url>https://google.com</url>
|
||||
<label>label</label>
|
||||
</formula-link-with-label>
|
||||
<formula-link-url-only>
|
||||
<url>https://google.com</url>
|
||||
</formula-link-url-only>
|
||||
<lookup/>
|
||||
</row>
|
||||
<row>
|
||||
<id>2</id>
|
||||
<text>text</text>
|
||||
<long-text>long_text</long-text>
|
||||
<url>https://www.google.com</url>
|
||||
<email>test@example.com</email>
|
||||
<negative-int>-1</negative-int>
|
||||
<positive-int>1</positive-int>
|
||||
<negative-decimal>-1.2</negative-decimal>
|
||||
<positive-decimal>1.2</positive-decimal>
|
||||
<rating>3</rating>
|
||||
<boolean>true</boolean>
|
||||
<datetime-us>02/01/2020 01:23</datetime-us>
|
||||
<date-us>02/01/2020</date-us>
|
||||
<datetime-eu>01/02/2020 01:23</datetime-eu>
|
||||
<date-eu>01/02/2020</date-eu>
|
||||
<datetime-eu-tzone-visible>01/02/2020 02:23</datetime-eu-tzone-visible>
|
||||
<datetime-eu-tzone-hidden>01/02/2020 02:23</datetime-eu-tzone-hidden>
|
||||
<last-modified-datetime-us>01/02/2021 12:00</last-modified-datetime-us>
|
||||
<last-modified-date-us>01/02/2021</last-modified-date-us>
|
||||
<last-modified-datetime-eu>02/01/2021 12:00</last-modified-datetime-eu>
|
||||
<last-modified-date-eu>02/01/2021</last-modified-date-eu>
|
||||
<last-modified-datetime-eu-tzone>02/01/2021 13:00</last-modified-datetime-eu-tzone>
|
||||
<created-on-datetime-us>01/02/2021 12:00</created-on-datetime-us>
|
||||
<created-on-date-us>01/02/2021</created-on-date-us>
|
||||
<created-on-datetime-eu>02/01/2021 12:00</created-on-datetime-eu>
|
||||
<created-on-date-eu>02/01/2021</created-on-date-eu>
|
||||
<created-on-datetime-eu-tzone>02/01/2021 13:00</created-on-datetime-eu-tzone>
|
||||
<link-row>
|
||||
<item>linked_row_1</item>
|
||||
<item>linked_row_2</item>
|
||||
<item>unnamed row 3</item>
|
||||
</link-row>
|
||||
<self-link-row>
|
||||
<item>unnamed row 1</item>
|
||||
</self-link-row>
|
||||
<link-row-without-related>
|
||||
<item>linked_row_1</item>
|
||||
<item>linked_row_2</item>
|
||||
</link-row-without-related>
|
||||
<decimal-link-row>
|
||||
<item>1.234</item>
|
||||
<item>-123.456</item>
|
||||
<item>unnamed row 3</item>
|
||||
</decimal-link-row>
|
||||
<file-link-row>
|
||||
<item>
|
||||
<item>
|
||||
<visible_name>name.txt</visible_name>
|
||||
<url>http://localhost:8000/media/user_files/test_hash.txt</url>
|
||||
<visible_name>name.txt</visible_name>
|
||||
<url>http://localhost:8000/media/user_files/test_hash.txt</url>
|
||||
</item>
|
||||
</item>
|
||||
<item>
|
||||
unnamed row 2
|
||||
</item>
|
||||
</file-link-row>
|
||||
<file>
|
||||
<item>
|
||||
</item>
|
||||
<item>unnamed row 2</item>
|
||||
</file-link-row>
|
||||
<file>
|
||||
<item>
|
||||
<visible_name>a.txt</visible_name>
|
||||
<url>http://localhost:8000/media/user_files/hashed_name.txt</url>
|
||||
</item>
|
||||
<item>
|
||||
</item>
|
||||
<item>
|
||||
<visible_name>b.txt</visible_name>
|
||||
<url>http://localhost:8000/media/user_files/other_name.txt</url>
|
||||
</item>
|
||||
</file>
|
||||
<single-select>A</single-select>
|
||||
<multiple-select>
|
||||
<item>D</item>
|
||||
<item>C</item>
|
||||
<item>E</item>
|
||||
</multiple-select>
|
||||
<multiple-collaborators>
|
||||
<item>user2@example.com</item>
|
||||
<item>user3@example.com</item>
|
||||
</multiple-collaborators>
|
||||
<phone-number>+4412345678</phone-number>
|
||||
<formula-text>test FORMULA</formula-text>
|
||||
<formula-int>1</formula-int>
|
||||
<formula-bool>true</formula-bool>
|
||||
<formula-decimal>33.3333333333</formula-decimal>
|
||||
<formula-dateinterval>1 day</formula-dateinterval>
|
||||
<formula-date>2020-01-01</formula-date>
|
||||
<formula-singleselect>A</formula-singleselect>
|
||||
<formula-email>test@example.com</formula-email>
|
||||
<formula-link-with-label>
|
||||
<url>https://google.com</url>
|
||||
<label>label</label>
|
||||
</formula-link-with-label>
|
||||
<formula-link-url-only>
|
||||
<url>https://google.com</url>
|
||||
</formula-link-url-only>
|
||||
<lookup><item>linked_row_1</item><item>linked_row_2</item><item/></lookup>
|
||||
</row>
|
||||
</item>
|
||||
</file>
|
||||
<single-select>A</single-select>
|
||||
<multiple-select>
|
||||
<item>D</item>
|
||||
<item>C</item>
|
||||
<item>E</item>
|
||||
</multiple-select>
|
||||
<multiple-collaborators>
|
||||
<item>user2@example.com</item>
|
||||
<item>user3@example.com</item>
|
||||
</multiple-collaborators>
|
||||
<phone-number>+4412345678</phone-number>
|
||||
<formula-text>test FORMULA</formula-text>
|
||||
<formula-int>1</formula-int>
|
||||
<formula-bool>true</formula-bool>
|
||||
<formula-decimal>33.3333333333</formula-decimal>
|
||||
<formula-dateinterval>1 day</formula-dateinterval>
|
||||
<formula-date>2020-01-01</formula-date>
|
||||
<formula-singleselect>A</formula-singleselect>
|
||||
<formula-email>test@example.com</formula-email>
|
||||
<formula-link-with-label>
|
||||
<url>https://google.com</url>
|
||||
<label>label</label>
|
||||
</formula-link-with-label>
|
||||
<formula-link-url-only>
|
||||
<url>https://google.com</url>
|
||||
</formula-link-url-only>
|
||||
<lookup>
|
||||
<item>linked_row_1</item>
|
||||
<item>linked_row_2</item>
|
||||
<item/>
|
||||
</lookup>
|
||||
</row>
|
||||
</rows>
|
||||
"""
|
||||
assert strip_indents_and_newlines(xml) == strip_indents_and_newlines(expected_xml)
|
||||
|
|
|
@ -68,7 +68,7 @@ export class ConditionalColorValueProviderType extends DecoratorValueProviderTyp
|
|||
|
||||
filter.type = compatibleType.type
|
||||
const viewFilterType = registry.get('viewFilter', filter.type)
|
||||
filter.value = viewFilterType.getDefaultValue()
|
||||
filter.value = viewFilterType.getDefaultValue(field)
|
||||
filter.preload_values = {}
|
||||
filter.id = uuid()
|
||||
|
||||
|
|
|
@ -7,10 +7,22 @@
|
|||
}
|
||||
|
||||
.field-date__time {
|
||||
width: 20%;
|
||||
margin-left: 4%;
|
||||
width: 25%;
|
||||
margin-left: 4px;
|
||||
}
|
||||
|
||||
.field-date__tzinfo {
|
||||
padding-left: 10px;
|
||||
width: 10%;
|
||||
margin-top: auto;
|
||||
margin-bottom: auto;
|
||||
color: $color-neutral-400;
|
||||
}
|
||||
|
||||
.field-date-read-only-timestamp {
|
||||
color: $color-neutral-400;
|
||||
}
|
||||
|
||||
.color--tzinfo {
|
||||
color: $color-neutral-500;
|
||||
}
|
||||
|
|
|
@ -92,6 +92,13 @@
|
|||
}
|
||||
}
|
||||
|
||||
.filters__value-date-timezone {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
.filters__value-timezone {
|
||||
font-size: 11px;
|
||||
color: $color-neutral-400;
|
||||
|
|
|
@ -1,22 +1,24 @@
|
|||
.grid-field-date {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
justify-content: space-between;
|
||||
gap: 8px;
|
||||
padding-left: 10px;
|
||||
padding-right: 10px;
|
||||
}
|
||||
|
||||
%grid-field-date__date {
|
||||
flex: 0 0 100%;
|
||||
width: 100%;
|
||||
|
||||
.grid-field-date--has-time & {
|
||||
width: 60%;
|
||||
flex-basis: 60%;
|
||||
}
|
||||
max-width: 75px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
%grid-field-date__time {
|
||||
flex: 0 0 40%;
|
||||
width: 40%;
|
||||
padding-left: 0;
|
||||
width: 100%;
|
||||
max-width: 65px;
|
||||
padding: 0;
|
||||
flex-shrink: 2;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.grid-field-date__date {
|
||||
|
@ -29,16 +31,23 @@
|
|||
@extend %grid-field-date__time;
|
||||
}
|
||||
|
||||
.grid-field-date__tzinfo {
|
||||
@extend .grid-field-text;
|
||||
|
||||
flex-shrink: 5;
|
||||
padding: 0;
|
||||
color: $color-neutral-500;
|
||||
font-size: 0.8em;
|
||||
width: 100%;
|
||||
max-width: 50px;
|
||||
}
|
||||
|
||||
.grid-field-date__date-input {
|
||||
@extend .grid-field-text__input;
|
||||
@extend %grid-field-date__date;
|
||||
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.grid-field-date__time-input {
|
||||
@extend .grid-field-text__input;
|
||||
@extend %grid-field-date__time;
|
||||
|
||||
min-width: 0;
|
||||
}
|
||||
|
|
|
@ -105,6 +105,11 @@ export default {
|
|||
required: false,
|
||||
default: null,
|
||||
},
|
||||
debounceTime: {
|
||||
type: Number,
|
||||
required: false,
|
||||
default: 400,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
@ -125,6 +130,12 @@ export default {
|
|||
this.results = await this.fetch(this.page, this.query)
|
||||
}
|
||||
},
|
||||
created() {
|
||||
// Small debounce when searching to prevent a lot of requests to the backend.
|
||||
this._search = debounce(async function () {
|
||||
this.results = await this.fetch(this.page, this.query)
|
||||
}, this.debounceTime)
|
||||
},
|
||||
methods: {
|
||||
clear() {
|
||||
this.displayName = this.initialDisplayName
|
||||
|
@ -165,12 +176,6 @@ export default {
|
|||
this.loading = true
|
||||
this._search()
|
||||
},
|
||||
/**
|
||||
* Small debounce when searching to prevent a lot of requests to the backend.
|
||||
*/
|
||||
_search: debounce(async function () {
|
||||
this.results = await this.fetch(this.page, this.query)
|
||||
}, 400),
|
||||
/**
|
||||
* When the user scrolls in the results, we can check if the user is near the end
|
||||
* and if so a new page will be loaded.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Moment should always be imported from here. This will enforce that the timezone
|
||||
// is always included. There were some problems when Baserow is installed as a
|
||||
// dependency and then moment-timezone does not work. Still will resolve that issue.
|
||||
export { default } from 'moment'
|
||||
export { tz } from 'moment-timezone'
|
||||
import moment from 'moment-timezone'
|
||||
|
||||
export default moment
|
||||
|
|
|
@ -71,3 +71,13 @@ Vue.directive('autoOverflowScroll', autoOverflowScroll)
|
|||
Vue.directive('userFileUpload', userFileUpload)
|
||||
Vue.directive('autoScroll', autoScroll)
|
||||
Vue.directive('clickOutside', clickOutside)
|
||||
|
||||
Vue.prototype.$super = function (options) {
|
||||
return new Proxy(options, {
|
||||
get: (options, name) => {
|
||||
if (options.methods && name in options.methods) {
|
||||
return options.methods[name].bind(this)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -93,8 +93,7 @@ export default {
|
|||
try {
|
||||
const { data } = await AirtableService(this.$client).create(
|
||||
this.selectedGroupId,
|
||||
this.airtableUrl,
|
||||
new Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
this.airtableUrl
|
||||
)
|
||||
this.startJobPoller(data)
|
||||
} catch (error) {
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
<template v-if="props.field.date_include_time">{{
|
||||
$options.methods.getTime(props.field, props.value)
|
||||
}}</template>
|
||||
<span v-if="props.field.date_show_tzinfo" class="color--tzinfo">
|
||||
{{ $options.methods.getCellTimezoneAbbr(props.field, props.value) }}
|
||||
</span>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
<template>
|
||||
<div>
|
||||
<FieldDateSubForm
|
||||
:table="table"
|
||||
:default-values="defaultValues"
|
||||
></FieldDateSubForm>
|
||||
<div class="control">
|
||||
<div class="control__elements">
|
||||
<div class="filters__value-timezone">{{ values.timezone }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
|
||||
import FieldDateSubForm from '@baserow/modules/database/components/field/FieldDateSubForm'
|
||||
import fieldSubForm from '@baserow/modules/database/mixins/fieldSubForm'
|
||||
|
||||
export default {
|
||||
name: 'FieldCreatedOnLastModifiedSubForm',
|
||||
components: { FieldDateSubForm },
|
||||
mixins: [form, fieldSubForm],
|
||||
data() {
|
||||
return {
|
||||
allowedValues: ['timezone'],
|
||||
values: {
|
||||
timezone: this.getCurrentTimezone(),
|
||||
},
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getCurrentTimezone() {
|
||||
return new Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
|
@ -30,52 +30,132 @@
|
|||
<Checkbox v-model="values.date_include_time">{{
|
||||
$t('fieldDateSubForm.includeTimeLabel')
|
||||
}}</Checkbox>
|
||||
</div>
|
||||
</div>
|
||||
<div v-show="values.date_include_time" class="control">
|
||||
<label class="control__label control__label--small">{{
|
||||
$t('fieldDateSubForm.timeFormatLabel')
|
||||
}}</label>
|
||||
<div class="control__elements">
|
||||
<Dropdown
|
||||
v-model="values.date_time_format"
|
||||
:class="{ 'dropdown--error': $v.values.date_time_format.$error }"
|
||||
@hide="$v.values.date_time_format.$touch()"
|
||||
<div v-show="values.date_include_time" class="control margin-top-2">
|
||||
<label class="control__label control__label--small">{{
|
||||
$t('fieldDateSubForm.timeFormatLabel')
|
||||
}}</label>
|
||||
<div class="control__elements">
|
||||
<Dropdown
|
||||
v-model="values.date_time_format"
|
||||
@hide="$v.values.date_time_format.$touch()"
|
||||
>
|
||||
<DropdownItem
|
||||
:name="$t('fieldDateSubForm.24Hour') + ' (23:00)'"
|
||||
value="24"
|
||||
></DropdownItem>
|
||||
<DropdownItem
|
||||
:name="$t('fieldDateSubForm.12Hour') + ' (11:00 PM)'"
|
||||
value="12"
|
||||
></DropdownItem>
|
||||
</Dropdown>
|
||||
</div>
|
||||
</div>
|
||||
<Checkbox
|
||||
v-show="values.date_include_time"
|
||||
:value="values.date_force_timezone !== null"
|
||||
@input="toggleForceTimezone()"
|
||||
>{{ $t('fieldDateSubForm.forceTimezoneLabel') }}</Checkbox
|
||||
>
|
||||
<DropdownItem
|
||||
:name="$t('fieldDateSubForm.24Hour') + ' (23:00)'"
|
||||
value="24"
|
||||
></DropdownItem>
|
||||
<DropdownItem
|
||||
:name="$t('fieldDateSubForm.12Hour') + ' (11:00 PM)'"
|
||||
value="12"
|
||||
></DropdownItem>
|
||||
</Dropdown>
|
||||
<div
|
||||
v-show="
|
||||
values.date_include_time && values.date_force_timezone !== null
|
||||
"
|
||||
class="control margin-top-2"
|
||||
>
|
||||
<label class="control__label control__label--small">{{
|
||||
$t('fieldDateSubForm.forceTimezoneValue')
|
||||
}}</label>
|
||||
<div class="control__elements">
|
||||
<PaginatedDropdown
|
||||
:value="values.date_force_timezone"
|
||||
:fetch-page="fetchTimezonePage"
|
||||
:add-empty-item="false"
|
||||
:initial-display-name="defaultValues.date_force_timezone"
|
||||
:fetch-on-open="true"
|
||||
:debounce-time="100"
|
||||
@input="(timezone) => (values.date_force_timezone = timezone.id)"
|
||||
></PaginatedDropdown>
|
||||
</div>
|
||||
</div>
|
||||
<Checkbox
|
||||
v-show="
|
||||
!onCreate &&
|
||||
!defaultValues.read_only &&
|
||||
values.date_include_time &&
|
||||
utcOffsetDiff !== 0
|
||||
"
|
||||
:value="values.date_force_timezone_offset !== null"
|
||||
@input="toggleForceTimezoneOffset()"
|
||||
>{{
|
||||
$t(
|
||||
utcOffsetDiff > 0
|
||||
? 'fieldDateSubForm.addTimezoneOffsetLabel'
|
||||
: 'fieldDateSubForm.subTimezoneOffsetLabel',
|
||||
{ utcOffsetDiff: Math.abs(utcOffsetDiff) }
|
||||
)
|
||||
}}</Checkbox
|
||||
>
|
||||
<Checkbox v-model="values.date_show_tzinfo">{{
|
||||
$t('fieldDateSubForm.showTimezoneLabel')
|
||||
}}</Checkbox>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import moment from '@baserow/modules/core/moment'
|
||||
import { required } from 'vuelidate/lib/validators'
|
||||
|
||||
import form from '@baserow/modules/core/mixins/form'
|
||||
|
||||
import fieldSubForm from '@baserow/modules/database/mixins/fieldSubForm'
|
||||
import PaginatedDropdown from '@baserow/modules/core/components/PaginatedDropdown'
|
||||
|
||||
export default {
|
||||
name: 'FieldDateSubForm',
|
||||
components: {
|
||||
PaginatedDropdown,
|
||||
},
|
||||
mixins: [form, fieldSubForm],
|
||||
data() {
|
||||
return {
|
||||
allowedValues: ['date_format', 'date_include_time', 'date_time_format'],
|
||||
allowedValues: [
|
||||
'date_format',
|
||||
'date_include_time',
|
||||
'date_time_format',
|
||||
'date_show_tzinfo',
|
||||
'date_force_timezone',
|
||||
'date_force_timezone_offset',
|
||||
],
|
||||
values: {
|
||||
date_format: 'EU',
|
||||
date_include_time: false,
|
||||
date_time_format: '24',
|
||||
date_show_tzinfo: false,
|
||||
date_force_timezone: null,
|
||||
date_force_timezone_offset: null,
|
||||
},
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
onCreate() {
|
||||
return (
|
||||
this.defaultValues.name === null ||
|
||||
this.defaultValues.name === undefined
|
||||
)
|
||||
},
|
||||
utcOffsetDiff() {
|
||||
if (this.values.date_force_timezone === null) {
|
||||
return 0
|
||||
}
|
||||
const defaultTz = this.defaultValues.date_force_timezone
|
||||
? this.defaultValues.date_force_timezone
|
||||
: moment.tz.guess()
|
||||
const defaultOffset = moment.tz(defaultTz).utcOffset()
|
||||
const offset = moment.tz(this.values.date_force_timezone).utcOffset()
|
||||
return defaultOffset - offset
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
'values.date_time_format'(newValue, oldValue) {
|
||||
// For formula fields date_time_format is nullable, ensure it is set to the
|
||||
|
@ -91,11 +171,61 @@ export default {
|
|||
this.values.date_include_time = false
|
||||
}
|
||||
},
|
||||
'values.date_show_tzinfo'(newValue, oldValue) {
|
||||
// For formula fields date_show_tzinfo is nullable, ensure it is set to the
|
||||
// default otherwise we will be sending nulls to the server.
|
||||
if (newValue == null) {
|
||||
this.values.date_show_tzinfo = false
|
||||
}
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
fetchTimezonePage(page, search) {
|
||||
const pageSize = 20
|
||||
const start = (page - 1) * pageSize
|
||||
const results = this.filterTimezones(search || '')
|
||||
// The paginate dropdown expects a HTTP response-like object with these properties
|
||||
return {
|
||||
data: {
|
||||
count: results.length,
|
||||
next: results.length > start + pageSize ? page + 1 : null,
|
||||
previous: page > 1 ? page - 1 : null,
|
||||
results: results.slice(start, start + pageSize).map((timezone) => {
|
||||
return {
|
||||
id: timezone,
|
||||
value: timezone,
|
||||
}
|
||||
}),
|
||||
},
|
||||
}
|
||||
},
|
||||
filterTimezones(value) {
|
||||
return moment.tz.names().filter((timezone) => {
|
||||
return timezone.toLowerCase().includes(value.toLowerCase())
|
||||
})
|
||||
},
|
||||
toggleForceTimezone() {
|
||||
if (this.values.date_force_timezone === null) {
|
||||
this.values.date_force_timezone = moment.tz.guess()
|
||||
} else {
|
||||
this.values.date_force_timezone = null
|
||||
}
|
||||
},
|
||||
toggleForceTimezoneOffset() {
|
||||
if (this.values.date_force_timezone_offset === null) {
|
||||
this.values.date_force_timezone_offset = this.utcOffsetDiff
|
||||
} else {
|
||||
this.values.date_force_timezone_offset = null
|
||||
}
|
||||
},
|
||||
},
|
||||
validations: {
|
||||
values: {
|
||||
date_format: { required },
|
||||
date_time_format: { required },
|
||||
date_show_tzinfo: { required },
|
||||
date_force_timezone: {},
|
||||
date_force_timezone_offset: {},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
:inline="true"
|
||||
:monday-first="true"
|
||||
:use-utc="true"
|
||||
:value="copy"
|
||||
:value="pickerDate"
|
||||
:language="datePickerLang[$i18n.locale]"
|
||||
class="datepicker"
|
||||
@input="chooseDate(field, $event)"
|
||||
|
@ -62,6 +62,9 @@
|
|||
@input="chooseTime(field, $event)"
|
||||
></TimeSelectContext>
|
||||
</div>
|
||||
<div class="field-date__tzinfo">
|
||||
{{ getCellTimezoneAbbr(field, value, { force: true }) }}
|
||||
</div>
|
||||
</div>
|
||||
<div v-show="touched && !valid" class="error">
|
||||
{{ error }}
|
||||
|
|
|
@ -190,7 +190,7 @@ export default {
|
|||
* because some filter types are not compatible with certain field types.
|
||||
*/
|
||||
updateFilter(filter, values) {
|
||||
const field = Object.prototype.hasOwnProperty.call(values, 'field')
|
||||
const fieldId = Object.prototype.hasOwnProperty.call(values, 'field')
|
||||
? values.field
|
||||
: filter.field
|
||||
const type = Object.prototype.hasOwnProperty.call(values, 'type')
|
||||
|
@ -206,7 +206,7 @@ export default {
|
|||
const allowedFilterTypes = this.allowedFilters(
|
||||
this.filterTypes,
|
||||
this.fields,
|
||||
field
|
||||
fieldId
|
||||
).map((filter) => filter.type)
|
||||
if (!allowedFilterTypes.includes(type)) {
|
||||
values.type = allowedFilterTypes[0]
|
||||
|
@ -216,11 +216,13 @@ export default {
|
|||
// If the type or value has changed it could be that the value needs to be
|
||||
// formatted or prepared.
|
||||
if (
|
||||
Object.prototype.hasOwnProperty.call(values, 'field') ||
|
||||
Object.prototype.hasOwnProperty.call(values, 'type') ||
|
||||
Object.prototype.hasOwnProperty.call(values, 'value')
|
||||
) {
|
||||
const filterType = this.$registry.get('viewFilter', type)
|
||||
values.value = filterType.prepareValue(value)
|
||||
const field = this.fields.find(({ id }) => id === fieldId)
|
||||
values.value = filterType.prepareValue(value, field, true)
|
||||
}
|
||||
|
||||
this.$emit('updateFilter', { filter, values })
|
||||
|
|
|
@ -1,37 +1,38 @@
|
|||
<template>
|
||||
<div>
|
||||
<input
|
||||
ref="date"
|
||||
v-model="dateString"
|
||||
type="text"
|
||||
class="input filters__value-input"
|
||||
:disabled="disabled"
|
||||
:class="{ 'input--error': $v.copy.$error }"
|
||||
:placeholder="getDatePlaceholder(field)"
|
||||
@focus="$refs.dateContext.toggle($refs.date, 'bottom', 'left', 0)"
|
||||
@blur="$refs.dateContext.hide()"
|
||||
@input="
|
||||
;[setCopyFromDateString(dateString, 'dateString'), delayedUpdate(copy)]
|
||||
"
|
||||
@keydown.enter="delayedUpdate(copy, true)"
|
||||
/>
|
||||
<Context
|
||||
ref="dateContext"
|
||||
:hide-on-click-outside="false"
|
||||
class="datepicker-context"
|
||||
>
|
||||
<client-only>
|
||||
<date-picker
|
||||
:inline="true"
|
||||
:monday-first="true"
|
||||
:use-utc="true"
|
||||
:value="dateObject"
|
||||
:language="datePickerLang[$i18n.locale]"
|
||||
class="datepicker"
|
||||
@input=";[setCopy($event, 'dateObject'), delayedUpdate(copy, true)]"
|
||||
></date-picker>
|
||||
</client-only>
|
||||
</Context>
|
||||
<div class="filters__value-date-timezone">
|
||||
<div>
|
||||
<input
|
||||
ref="date"
|
||||
v-model="dateString"
|
||||
type="text"
|
||||
class="input filters__value-input"
|
||||
:disabled="disabled"
|
||||
:class="{ 'input--error': $v.dateString.$error }"
|
||||
:placeholder="getDatePlaceholder(field)"
|
||||
@focus="$refs.dateContext.toggle($refs.date, 'bottom', 'left', 0)"
|
||||
@blur="$refs.dateContext.hide()"
|
||||
@input=";[setCopyFromDateString(dateString, 'dateString')]"
|
||||
@keydown.enter="delayedUpdate(copy, true)"
|
||||
/>
|
||||
<Context
|
||||
ref="dateContext"
|
||||
:hide-on-click-outside="false"
|
||||
class="datepicker-context"
|
||||
>
|
||||
<client-only>
|
||||
<date-picker
|
||||
:inline="true"
|
||||
:monday-first="true"
|
||||
:use-utc="true"
|
||||
:value="dateObject"
|
||||
:language="datePickerLang[$i18n.locale]"
|
||||
class="datepicker"
|
||||
@input="chooseDate($event)"
|
||||
></date-picker>
|
||||
</client-only>
|
||||
</Context>
|
||||
</div>
|
||||
<div class="filters__value-timezone">{{ getTimezoneAbbr() }}</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
@ -41,15 +42,14 @@ import {
|
|||
getDateMomentFormat,
|
||||
getDateHumanReadableFormat,
|
||||
} from '@baserow/modules/database/utils/date'
|
||||
import filterTypeInput from '@baserow/modules/database/mixins/filterTypeInput'
|
||||
import filterTypeDateInput from '@baserow/modules/database/mixins/filterTypeDateInput'
|
||||
import { en, fr } from 'vuejs-datepicker/dist/locale'
|
||||
|
||||
export default {
|
||||
name: 'ViewFilterTypeDate',
|
||||
mixins: [filterTypeInput],
|
||||
mixins: [filterTypeDateInput],
|
||||
data() {
|
||||
return {
|
||||
copy: '',
|
||||
dateString: '',
|
||||
dateObject: '',
|
||||
datePickerLang: {
|
||||
|
@ -58,26 +58,42 @@ export default {
|
|||
},
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
'filter.value'(value) {
|
||||
this.setCopy(value)
|
||||
},
|
||||
},
|
||||
created() {
|
||||
this.setCopy(this.filter.value)
|
||||
},
|
||||
mounted() {
|
||||
this.$v.$touch()
|
||||
},
|
||||
methods: {
|
||||
isInputValid() {
|
||||
return !this.$v.dateString.$error
|
||||
},
|
||||
chooseDate(value) {
|
||||
const timezone = this.getTimezone()
|
||||
const pickerDate = moment.utc(value)
|
||||
if (!pickerDate.isValid()) {
|
||||
return
|
||||
} else if (timezone !== null) {
|
||||
pickerDate.tz(timezone, true)
|
||||
}
|
||||
|
||||
this.setCopy(pickerDate.format('YYYY-MM-DD'), 'dateObject')
|
||||
this.delayedUpdate(this.copy, true)
|
||||
},
|
||||
setCopy(value, sender) {
|
||||
const newDate = moment.utc(value)
|
||||
const [timezone, filterValue] = this.splitCombinedValue(value)
|
||||
this.timezoneValue = timezone
|
||||
const newDate = moment.utc(
|
||||
filterValue,
|
||||
['YYYY-MM-DD', getDateMomentFormat(this.field.date_format)],
|
||||
true
|
||||
)
|
||||
if (timezone !== null) {
|
||||
newDate.tz(timezone, true)
|
||||
}
|
||||
|
||||
if (newDate.isValid()) {
|
||||
this.copy = newDate.format('YYYY-MM-DD')
|
||||
|
||||
if (sender !== 'dateObject') {
|
||||
this.dateObject = newDate.toDate()
|
||||
this.dateObject = newDate.format('YYYY-MM-DD')
|
||||
}
|
||||
|
||||
if (sender !== 'dateString') {
|
||||
|
@ -93,10 +109,15 @@ export default {
|
|||
}
|
||||
|
||||
const dateFormat = getDateMomentFormat(this.field.date_format)
|
||||
const newDate = moment.utc(value, dateFormat)
|
||||
const timezone = this.getTimezone()
|
||||
const newDate = moment.utc(value, dateFormat, true)
|
||||
if (timezone !== null) {
|
||||
newDate.tz(timezone)
|
||||
}
|
||||
|
||||
if (newDate.isValid()) {
|
||||
this.setCopy(newDate, sender)
|
||||
this.setCopy(newDate.format('YYYY-MM-DD'), sender)
|
||||
this.delayedUpdate(this.copy, true)
|
||||
} else {
|
||||
this.copy = value
|
||||
}
|
||||
|
@ -111,9 +132,11 @@ export default {
|
|||
},
|
||||
},
|
||||
validations: {
|
||||
copy: {
|
||||
date(value) {
|
||||
return value === '' || moment(value).isValid()
|
||||
copy: {},
|
||||
dateString: {
|
||||
isValidDate(value) {
|
||||
const dateFormat = getDateMomentFormat(this.field.date_format)
|
||||
return value === '' || moment.utc(value, dateFormat).isValid()
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<div class="filters__multi-value">
|
||||
<input
|
||||
ref="input"
|
||||
v-model="copy"
|
||||
v-model="xAgo"
|
||||
type="text"
|
||||
class="
|
||||
input
|
||||
|
@ -10,65 +10,49 @@
|
|||
filters__value-input
|
||||
filters__value-input--small
|
||||
"
|
||||
:class="{ 'input--error': $v.copy.$error }"
|
||||
:class="{ 'input--error': $v.xAgo.$error }"
|
||||
:disabled="disabled"
|
||||
@input="combinedDelayedUpdate($event.target.value)"
|
||||
@keydown.enter="combinedDelayedUpdate($event.target.value, true)"
|
||||
@input="
|
||||
;[
|
||||
setCopy($event.target.value),
|
||||
delayedUpdate($event.target.value, true),
|
||||
]
|
||||
"
|
||||
@keydown.enter="
|
||||
;[
|
||||
setCopy($event.target.value),
|
||||
delayedUpdate($event.target.value, true),
|
||||
]
|
||||
"
|
||||
/>
|
||||
<span class="filters__value-timezone">{{ timezoneValue }}</span>
|
||||
<span class="filters__value-timezone">{{ getTimezoneAbbr() }}</span>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import viewFilter from '@baserow/modules/database/mixins/viewFilter'
|
||||
import { integer } from 'vuelidate/lib/validators'
|
||||
|
||||
import filterTypeInput from '@baserow/modules/database/mixins/filterTypeInput'
|
||||
import { integer, required } from 'vuelidate/lib/validators'
|
||||
import filterTypeDateInput from '@baserow/modules/database/mixins/filterTypeDateInput'
|
||||
|
||||
export default {
|
||||
name: 'ViewFilterTypeNumberWithTimeZone',
|
||||
mixins: [filterTypeInput, viewFilter],
|
||||
computed: {
|
||||
timezoneValue() {
|
||||
const [timezone] = this.splitCombinedValue(this.filter.value)
|
||||
return timezone
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
'filter.value'(value) {
|
||||
this.copy = this.getDaysAgo(value)
|
||||
},
|
||||
},
|
||||
created() {
|
||||
this.copy = this.getDaysAgo(this.filter.value)
|
||||
mixins: [filterTypeDateInput],
|
||||
data() {
|
||||
return {
|
||||
xAgo: '',
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getSeparator() {
|
||||
return '?'
|
||||
isInputValid() {
|
||||
return !this.$v.xAgo.$error
|
||||
},
|
||||
splitCombinedValue(value) {
|
||||
const separator = this.getSeparator()
|
||||
const [timezone, daysAgo] = value.split(separator)
|
||||
return [timezone, daysAgo]
|
||||
},
|
||||
getDaysAgo(value) {
|
||||
const [, daysAgo] = this.splitCombinedValue(value)
|
||||
return daysAgo
|
||||
},
|
||||
prepareValue(timezoneValue, daysAgo) {
|
||||
const separator = this.getSeparator()
|
||||
return `${timezoneValue}${separator}${daysAgo}`
|
||||
},
|
||||
combinedDelayedUpdate(value, immediately = false) {
|
||||
const preparedValue = this.prepareValue(this.timezoneValue, value)
|
||||
return this.delayedUpdate(preparedValue, immediately)
|
||||
},
|
||||
focus() {
|
||||
this.$refs.input.focus()
|
||||
setCopy(value, sender) {
|
||||
const [, xAgo] = this.splitCombinedValue(value)
|
||||
this.xAgo = xAgo
|
||||
},
|
||||
},
|
||||
validations: {
|
||||
copy: { integer },
|
||||
copy: { required },
|
||||
xAgo: { integer },
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
<template>
|
||||
<div class="filters__value-timezone">{{ filter.value }}</div>
|
||||
<div class="filters__value-timezone">{{ getTimezoneAbbr() }}</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import viewFilter from '@baserow/modules/database/mixins/viewFilter'
|
||||
import filterTypeDateInput from '@baserow/modules/database/mixins/filterTypeDateInput'
|
||||
|
||||
export default {
|
||||
name: 'ViewFilterTypeTimeZone',
|
||||
mixins: [viewFilter],
|
||||
mixins: [filterTypeDateInput],
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -1,20 +1,27 @@
|
|||
<template functional>
|
||||
<div ref="cell" class="grid-view__cell" :class="data.staticClass || ''">
|
||||
<div
|
||||
class="grid-field-date"
|
||||
:class="{ 'grid-field-date--has-time': props.field.date_include_time }"
|
||||
>
|
||||
<div ref="dateDisplay" class="grid-field-date__date">
|
||||
{{ $options.methods.getDate(props.field, props.value) }}
|
||||
<client-only>
|
||||
<div class="grid-field-date">
|
||||
<div ref="dateDisplay" class="grid-field-date__date">
|
||||
{{
|
||||
$options.methods.getDate(props.field, props.value) || props.value
|
||||
}}
|
||||
</div>
|
||||
<div
|
||||
v-if="props.field.date_include_time"
|
||||
ref="timeDisplay"
|
||||
class="grid-field-date__time"
|
||||
>
|
||||
{{ $options.methods.getTime(props.field, props.value) }}
|
||||
</div>
|
||||
<div
|
||||
v-if="props.field.date_show_tzinfo"
|
||||
class="grid-field-date__tzinfo"
|
||||
>
|
||||
{{ $options.methods.getCellTimezoneAbbr(props.field, props.value) }}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
v-if="props.field.date_include_time"
|
||||
ref="timeDisplay"
|
||||
class="grid-field-date__time"
|
||||
>
|
||||
{{ $options.methods.getTime(props.field, props.value) }}
|
||||
</div>
|
||||
</div>
|
||||
</client-only>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
|
|
@ -5,10 +5,7 @@
|
|||
:class="{ editing: editing }"
|
||||
@contextmenu="stopContextIfEditing($event)"
|
||||
>
|
||||
<div
|
||||
class="grid-field-date"
|
||||
:class="{ 'grid-field-date--has-time': field.date_include_time }"
|
||||
>
|
||||
<div class="grid-field-date">
|
||||
<div v-show="!editing" ref="dateDisplay" class="grid-field-date__date">
|
||||
{{ date }}
|
||||
</div>
|
||||
|
@ -41,7 +38,7 @@
|
|||
:inline="true"
|
||||
:monday-first="true"
|
||||
:use-utc="true"
|
||||
:value="copy"
|
||||
:value="pickerDate"
|
||||
:language="datePickerLang[$i18n.locale]"
|
||||
class="datepicker"
|
||||
@input="chooseDate(field, $event)"
|
||||
|
@ -70,6 +67,9 @@
|
|||
></TimeSelectContext>
|
||||
</template>
|
||||
</template>
|
||||
<div v-if="field.date_show_tzinfo" class="grid-field-date__tzinfo">
|
||||
{{ getCellTimezoneAbbr(field, value, { force: editing }) }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
<template>
|
||||
<div ref="cell" class="grid-view__cell active">
|
||||
<div
|
||||
class="grid-field-date"
|
||||
:class="{ 'grid-field-date--has-time': field.date_include_time }"
|
||||
>
|
||||
<div class="grid-field-date">
|
||||
<div ref="dateDisplay" class="grid-field-date__date">
|
||||
{{ getDate(field, value) }}
|
||||
</div>
|
||||
|
@ -14,6 +11,9 @@
|
|||
>
|
||||
{{ getTime(field, value) }}
|
||||
</div>
|
||||
<div v-if="field.date_show_tzinfo" class="grid-field-date__tzinfo">
|
||||
{{ getCellTimezoneAbbr(field, value) }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -14,7 +14,6 @@ import FieldNumberSubForm from '@baserow/modules/database/components/field/Field
|
|||
import FieldRatingSubForm from '@baserow/modules/database/components/field/FieldRatingSubForm'
|
||||
import FieldTextSubForm from '@baserow/modules/database/components/field/FieldTextSubForm'
|
||||
import FieldDateSubForm from '@baserow/modules/database/components/field/FieldDateSubForm'
|
||||
import FieldCreatedOnLastModifiedSubForm from '@baserow/modules/database/components/field/FieldCreatedOnLastModifiedSubForm'
|
||||
import FieldLinkRowSubForm from '@baserow/modules/database/components/field/FieldLinkRowSubForm'
|
||||
import FieldSelectOptionsSubForm from '@baserow/modules/database/components/field/FieldSelectOptionsSubForm'
|
||||
|
||||
|
@ -84,6 +83,7 @@ import FormViewFieldLinkRow from '@baserow/modules/database/components/view/form
|
|||
import { trueString } from '@baserow/modules/database/utils/constants'
|
||||
import {
|
||||
getDateMomentFormat,
|
||||
getFieldTimezone,
|
||||
getTimeMomentFormat,
|
||||
} from '@baserow/modules/database/utils/date'
|
||||
import {
|
||||
|
@ -1409,7 +1409,11 @@ class BaseDateFieldType extends FieldType {
|
|||
}
|
||||
|
||||
toHumanReadableString(field, value) {
|
||||
const date = moment.tz(value, field.timezone)
|
||||
const timezone = getFieldTimezone(field)
|
||||
const date = moment.utc(value)
|
||||
if (timezone !== null) {
|
||||
date.tz(timezone)
|
||||
}
|
||||
|
||||
if (date.isValid()) {
|
||||
const dateFormat = getDateMomentFormat(field.date_format)
|
||||
|
@ -1419,7 +1423,6 @@ class BaseDateFieldType extends FieldType {
|
|||
const timeFormat = getTimeMomentFormat(field.date_time_format)
|
||||
dateString = `${dateString} ${date.format(timeFormat)}`
|
||||
}
|
||||
|
||||
return dateString
|
||||
} else {
|
||||
return ''
|
||||
|
@ -1438,27 +1441,25 @@ class BaseDateFieldType extends FieldType {
|
|||
* Tries to parse the clipboard text value with moment and returns the date in the
|
||||
* correct format for the field. If it can't be parsed null is returned.
|
||||
*/
|
||||
prepareValueForPaste(field, clipboardData) {
|
||||
if (!clipboardData) {
|
||||
clipboardData = ''
|
||||
}
|
||||
return DateFieldType.formatDate(field, clipboardData)
|
||||
prepareValueForPaste(field, clipboardData, richClipboardData) {
|
||||
const dateValue = DateFieldType.parseDate(field, clipboardData || '')
|
||||
return DateFieldType.formatDate(field, dateValue)
|
||||
}
|
||||
|
||||
static formatDate(field, dateString) {
|
||||
static parseDate(field, dateString) {
|
||||
const value = dateString.toUpperCase()
|
||||
|
||||
// Formats for ISO dates
|
||||
let formats = [
|
||||
moment.ISO_8601,
|
||||
'YYYY-MM-DD',
|
||||
'YYYY-MM-DD hh:mm A',
|
||||
'YYYY-MM-DD HH:mm',
|
||||
'YYYY-MM-DD',
|
||||
]
|
||||
// Formats for EU dates
|
||||
const EUFormat = ['DD/MM/YYYY', 'DD/MM/YYYY hh:mm A', 'DD/MM/YYYY HH:mm']
|
||||
const EUFormat = ['DD/MM/YYYY hh:mm A', 'DD/MM/YYYY HH:mm', 'DD/MM/YYYY']
|
||||
// Formats for US dates
|
||||
const USFormat = ['MM/DD/YYYY', 'MM/DD/YYYY hh:mm A', 'MM/DD/YYYY HH:mm']
|
||||
const USFormat = ['MM/DD/YYYY hh:mm A', 'MM/DD/YYYY HH:mm', 'MM/DD/YYYY']
|
||||
|
||||
// Interpret the pasted date based on the field's current date format
|
||||
if (field.date_format === 'EU') {
|
||||
|
@ -1467,10 +1468,23 @@ class BaseDateFieldType extends FieldType {
|
|||
formats = formats.concat(USFormat).concat(EUFormat)
|
||||
}
|
||||
|
||||
const date = moment.utc(value, formats)
|
||||
const date = moment.utc(value, formats, true)
|
||||
if (!date.isValid()) {
|
||||
return null
|
||||
}
|
||||
const timezone = getFieldTimezone(field)
|
||||
if (timezone) {
|
||||
date.tz(timezone, true)
|
||||
}
|
||||
return date
|
||||
}
|
||||
|
||||
if (date.isValid()) {
|
||||
return field.date_include_time ? date.format() : date.format('YYYY-MM-DD')
|
||||
static formatDate(field, date) {
|
||||
const momentDate = moment.utc(date)
|
||||
if (momentDate.isValid()) {
|
||||
return field.date_include_time
|
||||
? momentDate.format()
|
||||
: momentDate.format('YYYY-MM-DD')
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
|
@ -1540,7 +1554,7 @@ export class CreatedOnLastModifiedBaseFieldType extends BaseDateFieldType {
|
|||
}
|
||||
|
||||
getFormComponent() {
|
||||
return FieldCreatedOnLastModifiedSubForm
|
||||
return FieldDateSubForm
|
||||
}
|
||||
|
||||
getFormViewFieldComponent() {
|
||||
|
@ -1564,7 +1578,7 @@ export class CreatedOnLastModifiedBaseFieldType extends BaseDateFieldType {
|
|||
* is simply the current time.
|
||||
*/
|
||||
getNewRowValue() {
|
||||
return moment().utc().format()
|
||||
return moment().local().format()
|
||||
}
|
||||
|
||||
shouldFetchDataWhenAdded() {
|
||||
|
|
|
@ -235,6 +235,11 @@
|
|||
"dateFormatUS": "US",
|
||||
"dateFormatISO": "ISO",
|
||||
"includeTimeLabel": "Include time",
|
||||
"showTimezoneLabel": "Show timezone",
|
||||
"forceTimezoneLabel": "Set timezone for all collaborators",
|
||||
"forceTimezoneValue": "Timezone",
|
||||
"addTimezoneOffsetLabel": "Convert values (add {utcOffsetDiff} minutes)",
|
||||
"subTimezoneOffsetLabel": "Convert values (subtract {utcOffsetDiff} minutes)",
|
||||
"timeFormatLabel": "Time format",
|
||||
"24Hour": "24 hour",
|
||||
"12Hour": "12 hour"
|
||||
|
@ -753,4 +758,4 @@
|
|||
"errorEmptyFileNameTitle": "Invalid file name",
|
||||
"errorEmptyFileNameMessage": "You can't set an empty name for a file."
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,8 +4,12 @@ import {
|
|||
getTimeMomentFormat,
|
||||
getDateHumanReadableFormat,
|
||||
getTimeHumanReadableFormat,
|
||||
getFieldTimezone,
|
||||
getCellTimezoneAbbr,
|
||||
} from '@baserow/modules/database/utils/date'
|
||||
|
||||
const DATE_PICKER_FORMAT = 'YYYY-MM-DD'
|
||||
|
||||
/**
|
||||
* Mixin that introduces methods for the date field. This can both be used for a row
|
||||
* and grid view field.
|
||||
|
@ -14,7 +18,9 @@ export default {
|
|||
data() {
|
||||
return {
|
||||
date: '',
|
||||
pickerDate: '',
|
||||
time: '',
|
||||
momentDate: null,
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
|
@ -23,7 +29,10 @@ export default {
|
|||
* with a computed property.
|
||||
*/
|
||||
valueAndFormats() {
|
||||
return `${this.value}|${this.field.date_format}|${this.field.date_time_format}`
|
||||
return `${this.value}|${this.field.date_format}|${this.field.date_time_format}|${this.field.date_force_timezone}`
|
||||
},
|
||||
fieldDateFormat() {
|
||||
return getDateMomentFormat(this.field.date_format)
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
|
@ -42,23 +51,37 @@ export default {
|
|||
this.setDateAndTime(this.field, this.value)
|
||||
},
|
||||
methods: {
|
||||
updateDateValue() {
|
||||
this.pickerDate = this.momentDate.format(DATE_PICKER_FORMAT)
|
||||
this.date = this.momentDate.format(this.fieldDateFormat)
|
||||
},
|
||||
updateTimeValue() {
|
||||
const timeFormat = getTimeMomentFormat(this.field.date_time_format)
|
||||
this.time = this.momentDate.format(timeFormat)
|
||||
},
|
||||
/**
|
||||
* When the date part is updated we also need to update the copy data which
|
||||
* contains the whole date(time) in the correct format. The copy contains the
|
||||
* value that is actually going to be saved.
|
||||
*/
|
||||
updateDate(field, value) {
|
||||
const dateFormat = getDateMomentFormat(field.date_format)
|
||||
const newDate = moment.utc(value, dateFormat)
|
||||
this.updateCopy(
|
||||
field,
|
||||
{
|
||||
const dateFormats = [DATE_PICKER_FORMAT, this.fieldDateFormat]
|
||||
const timezone = getFieldTimezone(field)
|
||||
let newDate = moment.utc(value, dateFormats, true)
|
||||
if (timezone !== null) {
|
||||
newDate = newDate.clone().tz(timezone, true)
|
||||
}
|
||||
|
||||
if (newDate.isValid()) {
|
||||
this.updateCopy(field, {
|
||||
year: newDate.year(),
|
||||
month: newDate.month(),
|
||||
date: newDate.date(),
|
||||
},
|
||||
newDate
|
||||
)
|
||||
})
|
||||
this.updateDateValue()
|
||||
} else {
|
||||
this.date = value
|
||||
}
|
||||
},
|
||||
/**
|
||||
* When the time part is updated we also need to update the copy data which
|
||||
|
@ -66,26 +89,35 @@ export default {
|
|||
* value that is actually going to be saved.
|
||||
*/
|
||||
updateTime(field, value) {
|
||||
const newTime = moment.utc(value, ['h:m a', 'H:m'])
|
||||
this.updateCopy(
|
||||
field,
|
||||
{
|
||||
const timeFormats = ['hh:mm a', 'HH:mm']
|
||||
const timezone = getFieldTimezone(field)
|
||||
let newTime = moment.utc(value, timeFormats, true)
|
||||
if (timezone !== null) {
|
||||
newTime = newTime.clone().tz(timezone, true)
|
||||
}
|
||||
|
||||
if (newTime.isValid()) {
|
||||
this.updateCopy(field, {
|
||||
hour: newTime.hour(),
|
||||
minute: newTime.minute(),
|
||||
second: 0,
|
||||
},
|
||||
newTime
|
||||
)
|
||||
})
|
||||
this.updateTimeValue()
|
||||
} else {
|
||||
this.time = value
|
||||
}
|
||||
},
|
||||
/**
|
||||
* When the user uses the datapicker to choose a date, we also need to update
|
||||
* When the user uses the datepicker to choose a date, we also need to update
|
||||
* date data and the copy so that the correct date is visible for the user.
|
||||
*/
|
||||
chooseDate(field, value) {
|
||||
const dateFormat = getDateMomentFormat(field.date_format)
|
||||
value = moment.utc(value).format(dateFormat)
|
||||
this.date = value
|
||||
this.updateDate(field, value)
|
||||
const timezone = getFieldTimezone(field)
|
||||
let pickerDate = moment.utc(value)
|
||||
if (timezone !== null) {
|
||||
pickerDate = pickerDate.clone().tz(timezone, true)
|
||||
}
|
||||
this.updateDate(field, pickerDate.format(DATE_PICKER_FORMAT))
|
||||
},
|
||||
/**
|
||||
* When the user uses the time context to choose a time, we also need to update
|
||||
|
@ -99,36 +131,40 @@ export default {
|
|||
* A helper method that allows updating the copy data by only changing certain
|
||||
* properties of a datetime. For example only the month could be updated.
|
||||
*/
|
||||
updateCopy(field, values, newDate) {
|
||||
if (!newDate.isValid()) {
|
||||
return
|
||||
}
|
||||
|
||||
const existing = moment.utc(this.copy || undefined).seconds(0)
|
||||
existing.set(values)
|
||||
let newValue = existing.format()
|
||||
if (!field.date_include_time) {
|
||||
newValue = existing.format('YYYY-MM-DD')
|
||||
}
|
||||
this.copy = newValue
|
||||
updateCopy(field, values) {
|
||||
const existing = this.momentDate.set(values)
|
||||
this.copy = field.date_include_time
|
||||
? existing.format()
|
||||
: existing.format('YYYY-MM-DD')
|
||||
},
|
||||
/**
|
||||
* Updates the date and time data by converting the value to the correct formats.
|
||||
*/
|
||||
setDateAndTime(field, value) {
|
||||
const timezone = getFieldTimezone(field)
|
||||
if (value === null) {
|
||||
this.date = ''
|
||||
this.time = ''
|
||||
this.date = this.time = ''
|
||||
this.momentDate = moment.utc()
|
||||
if (timezone) {
|
||||
this.momentDate = this.momentDate
|
||||
.clone()
|
||||
.utcOffset(moment.tz(timezone).utcOffset())
|
||||
}
|
||||
this.pickerDate = ''
|
||||
return
|
||||
}
|
||||
|
||||
const existing = moment.utc(value || undefined).seconds(0)
|
||||
let existing = moment.utc(value, moment.ISO_8601, true)
|
||||
if (timezone) {
|
||||
existing = existing.clone().utcOffset(moment.tz(timezone).utcOffset())
|
||||
}
|
||||
|
||||
const dateFormat = getDateMomentFormat(this.field.date_format)
|
||||
const timeFormat = getTimeMomentFormat(this.field.date_time_format)
|
||||
|
||||
this.date = existing.format(dateFormat)
|
||||
this.time = existing.format(timeFormat)
|
||||
this.momentDate = existing
|
||||
this.updateDateValue()
|
||||
this.updateTimeValue()
|
||||
},
|
||||
getCellTimezoneAbbr(field, value, force) {
|
||||
return getCellTimezoneAbbr(field, value, { force })
|
||||
},
|
||||
/**
|
||||
* Returns a human readable date placeholder of the format for the input.
|
||||
|
|
63
web-frontend/modules/database/mixins/filterTypeDateInput.js
Normal file
63
web-frontend/modules/database/mixins/filterTypeDateInput.js
Normal file
|
@ -0,0 +1,63 @@
|
|||
import moment from '@baserow/modules/core/moment'
|
||||
import {
|
||||
splitTimezoneAndFilterValue,
|
||||
DATE_FILTER_TIMEZONE_VALUE_SEPARATOR,
|
||||
} from '@baserow/modules/database/utils/date'
|
||||
import filterTypeInput from '@baserow/modules/database/mixins/filterTypeInput'
|
||||
|
||||
export default {
|
||||
mixins: [filterTypeInput],
|
||||
data() {
|
||||
return {
|
||||
copy: '',
|
||||
timezoneValue: null,
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
'filter.value'(value) {
|
||||
this.setCopy(value)
|
||||
},
|
||||
},
|
||||
created() {
|
||||
this.setCopy(this.filter.value)
|
||||
},
|
||||
methods: {
|
||||
getSeparator() {
|
||||
return DATE_FILTER_TIMEZONE_VALUE_SEPARATOR
|
||||
},
|
||||
getDefaultTimezone() {
|
||||
return this.field.date_force_timezone || moment.tz.guess()
|
||||
},
|
||||
getTimezone() {
|
||||
if (this.timezoneValue === null || this.timezoneValue === undefined) {
|
||||
this.timezoneValue = this.getDefaultTimezone()
|
||||
}
|
||||
return this.timezoneValue
|
||||
},
|
||||
getTimezoneAbbr() {
|
||||
const timezone = this.getTimezone()
|
||||
return timezone !== undefined ? moment.utc().tz(timezone).format('z') : ''
|
||||
},
|
||||
splitCombinedValue(value) {
|
||||
const [timezone, filterValue] = splitTimezoneAndFilterValue(value)
|
||||
return [timezone, filterValue]
|
||||
},
|
||||
setCopy(value) {
|
||||
const [timezone, filterValue] = this.splitCombinedValue(value)
|
||||
this.copy = filterValue
|
||||
this.timezoneValue = timezone
|
||||
},
|
||||
prepareValue(value, field) {
|
||||
const sep = this.getSeparator()
|
||||
const timezone = this.getTimezone()
|
||||
return timezone ? `${timezone}${sep}${value}` : value
|
||||
},
|
||||
delayedUpdate(value, immediately = false) {
|
||||
const combinedValue = this.prepareValue(value, this.field)
|
||||
return this.$super(filterTypeInput).delayedUpdate(
|
||||
combinedValue,
|
||||
immediately
|
||||
)
|
||||
},
|
||||
},
|
||||
}
|
|
@ -28,6 +28,9 @@ export default {
|
|||
}
|
||||
},
|
||||
methods: {
|
||||
isInputValid() {
|
||||
return !this.$v.copy.$error
|
||||
},
|
||||
delayedUpdate(value, immediately = false) {
|
||||
if (this.disabled) {
|
||||
return
|
||||
|
@ -36,7 +39,7 @@ export default {
|
|||
clearTimeout(delayTimeout)
|
||||
this.$v.$touch()
|
||||
|
||||
if (this.$v.copy.$error) {
|
||||
if (!this.isInputValid()) {
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
@ -1,19 +1,24 @@
|
|||
import moment from '@baserow/modules/core/moment'
|
||||
import {
|
||||
getDateMomentFormat,
|
||||
getTimeMomentFormat,
|
||||
getCellTimezoneAbbr,
|
||||
getFieldTimezone,
|
||||
} from '@baserow/modules/database/utils/date'
|
||||
import moment from '@baserow/modules/core/moment'
|
||||
|
||||
export default {
|
||||
methods: {
|
||||
getTimezone(field) {
|
||||
return field.timezone || 'UTC'
|
||||
},
|
||||
getDate(field, value) {
|
||||
if (value === null || value === undefined) {
|
||||
return ''
|
||||
}
|
||||
const existing = moment.tz(value || undefined, this.getTimezone(field))
|
||||
|
||||
const timezone = getFieldTimezone(field)
|
||||
let existing = moment.utc(value, moment.ISO_8601, true)
|
||||
if (timezone) {
|
||||
existing = existing.utcOffset(moment.tz(timezone).utcOffset())
|
||||
}
|
||||
|
||||
const dateFormat = getDateMomentFormat(field.date_format)
|
||||
return existing.format(dateFormat)
|
||||
},
|
||||
|
@ -22,9 +27,17 @@ export default {
|
|||
return ''
|
||||
}
|
||||
|
||||
const existing = moment.tz(value || undefined, this.getTimezone(field))
|
||||
const timezone = getFieldTimezone(field)
|
||||
let existing = moment.utc(value, moment.ISO_8601, true)
|
||||
if (timezone) {
|
||||
existing = existing.utcOffset(moment.tz(timezone).utcOffset())
|
||||
}
|
||||
|
||||
const timeFormat = getTimeMomentFormat(field.date_time_format)
|
||||
return existing.format(timeFormat)
|
||||
},
|
||||
getCellTimezoneAbbr(field, value) {
|
||||
return getCellTimezoneAbbr(field, value)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
export default (client) => {
|
||||
return {
|
||||
create(groupId, shareURL, timezone) {
|
||||
create(groupId, shareURL) {
|
||||
return client.post(`/jobs/`, {
|
||||
type: 'airtable',
|
||||
group_id: groupId,
|
||||
airtable_share_url: shareURL,
|
||||
timezone,
|
||||
})
|
||||
},
|
||||
}
|
||||
|
|
|
@ -480,7 +480,7 @@ export const actions = {
|
|||
// If the value is not provided, then we use the default value related to the type.
|
||||
if (!Object.prototype.hasOwnProperty.call(values, 'value')) {
|
||||
const viewFilterType = this.$registry.get('viewFilter', values.type)
|
||||
values.value = viewFilterType.getDefaultValue()
|
||||
values.value = viewFilterType.getDefaultValue(field)
|
||||
}
|
||||
|
||||
// Some filter input components expect the preload values to exist, that's why we
|
||||
|
|
|
@ -1819,7 +1819,6 @@ export const actions = {
|
|||
const textValue = textData[rowIndex][fieldIndex]
|
||||
const jsonValue =
|
||||
jsonData != null ? jsonData[rowIndex][fieldIndex] : undefined
|
||||
|
||||
const fieldType = this.$registry.get('field', field.type)
|
||||
const preparedValue = fieldType.prepareValueForPaste(
|
||||
field,
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import moment from '@baserow/modules/core/moment'
|
||||
|
||||
const dateMapping = {
|
||||
EU: {
|
||||
momentFormat: 'DD/MM/YYYY',
|
||||
|
@ -51,3 +53,93 @@ export const getTimeHumanReadableFormat = (type) => {
|
|||
}
|
||||
return timeMapping[type].humanFormat
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the timezone for a given field. If the field doesn't have a timezone
|
||||
* set, the timezone of the user is returned.
|
||||
*
|
||||
* @param {Object} field The field object
|
||||
* @returns {String} The timezone for the field
|
||||
* @example
|
||||
* getFieldTimezone({ date_include_time: true, date_force_timezone: 'Europe/Amsterdam' }) // => 'Europe/Amsterdam'
|
||||
* getFieldTimezone({ date_include_time: false }) // => 'UTC'
|
||||
*/
|
||||
export const getFieldTimezone = (field) => {
|
||||
return field.date_include_time
|
||||
? field.date_force_timezone || moment.tz.guess()
|
||||
: null
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the timezone abbreviation for a given field and value.
|
||||
* If the value is null or undefined and force=false, an empty string is returned.
|
||||
*
|
||||
* @param {Object} field The field object
|
||||
* @param {String | moment} value The value to parse into a moment object
|
||||
* @param {Object} options
|
||||
* @param {String} options.format The format to parse the value with
|
||||
* @param {Boolean} options.replace Whether to replace the timezone or not
|
||||
*/
|
||||
export const getCellTimezoneAbbr = (
|
||||
field,
|
||||
value,
|
||||
{ format = 'z', force = false } = {}
|
||||
) => {
|
||||
if (!force && (value === null || value === undefined)) {
|
||||
return ''
|
||||
}
|
||||
const timezone = getFieldTimezone(field)
|
||||
|
||||
return timezone
|
||||
? moment
|
||||
.utc(value || undefined)
|
||||
.tz(timezone)
|
||||
.format(format)
|
||||
: 'UTC'
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a moment object with the correct timezone set.
|
||||
*
|
||||
* @param {Object} field The field object
|
||||
* @param {String | moment} value The value to parse into a moment object
|
||||
* @param {Object} options
|
||||
* @param {String} options.format The format to parse the value with
|
||||
* @param {Boolean} options.replace Whether to replace the timezone or not
|
||||
* @returns {moment} The moment object
|
||||
*/
|
||||
export const localizeMoment = (field, value, { format = undefined } = {}) => {
|
||||
const timezone = getFieldTimezone(field)
|
||||
|
||||
const date = moment.utc(value, format, true /** strict */)
|
||||
return timezone !== null
|
||||
? date.utcOffset(moment.tz(timezone)).tz(timezone, true)
|
||||
: date
|
||||
}
|
||||
|
||||
export const DATE_FILTER_TIMEZONE_VALUE_SEPARATOR = '?'
|
||||
|
||||
/**
|
||||
* Splits the timezone and the filter value from a filter value.
|
||||
*
|
||||
* @param {*} value The filter value
|
||||
* @param {*} separator The separator between the timezone and the filter value
|
||||
* @returns {Array} An array with the timezone and the filter value
|
||||
*/
|
||||
export const splitTimezoneAndFilterValue = (
|
||||
value,
|
||||
separator = DATE_FILTER_TIMEZONE_VALUE_SEPARATOR
|
||||
) => {
|
||||
let timezone = null
|
||||
let filterValue
|
||||
|
||||
if (value.includes(separator)) {
|
||||
// if the filter value already contains a timezone, use it
|
||||
;[timezone, filterValue] = value.split(separator)
|
||||
} else {
|
||||
// fallback for values before timezone was added to the filter value
|
||||
filterValue = value
|
||||
}
|
||||
timezone = moment.tz.zone(timezone) ? timezone : null
|
||||
return [timezone, filterValue]
|
||||
}
|
||||
|
|
|
@ -10,6 +10,10 @@ import ViewFilterTypeTimeZone from '@baserow/modules/database/components/view/Vi
|
|||
import ViewFilterTypeNumberWithTimeZone from '@baserow/modules/database/components/view/ViewFilterTypeNumberWithTimeZone'
|
||||
import ViewFilterTypeLinkRow from '@baserow/modules/database/components/view/ViewFilterTypeLinkRow'
|
||||
import { trueString } from '@baserow/modules/database/utils/constants'
|
||||
import {
|
||||
splitTimezoneAndFilterValue,
|
||||
DATE_FILTER_TIMEZONE_VALUE_SEPARATOR,
|
||||
} from '@baserow/modules/database/utils/date'
|
||||
import { isNumeric } from '@baserow/modules/core/utils/string'
|
||||
import ViewFilterTypeFileTypeDropdown from '@baserow/modules/database/components/view/ViewFilterTypeFileTypeDropdown'
|
||||
import ViewFilterTypeCollaborators from '@baserow/modules/database/components/view/ViewFilterTypeCollaborators'
|
||||
|
@ -70,7 +74,7 @@ export class ViewFilterType extends Registerable {
|
|||
* almost all cases this should be an empty string, but with timezone sensitive
|
||||
* filters we might want use the current timezone.
|
||||
*/
|
||||
getDefaultValue() {
|
||||
getDefaultValue(field) {
|
||||
return ''
|
||||
}
|
||||
|
||||
|
@ -78,7 +82,7 @@ export class ViewFilterType extends Registerable {
|
|||
* Optionally, right before updating the string value can be prepared. This could for
|
||||
* example be used to convert the value to a number.
|
||||
*/
|
||||
prepareValue(value) {
|
||||
prepareValue(value, field) {
|
||||
return value
|
||||
}
|
||||
|
||||
|
@ -443,7 +447,31 @@ export class LengthIsLowerThanViewFilterType extends ViewFilterType {
|
|||
}
|
||||
}
|
||||
|
||||
export class DateEqualViewFilterType extends ViewFilterType {
|
||||
class LocalizedDateViewFilterType extends ViewFilterType {
|
||||
getSeparator() {
|
||||
return DATE_FILTER_TIMEZONE_VALUE_SEPARATOR
|
||||
}
|
||||
|
||||
getDateFormat() {
|
||||
return 'YYYY-MM-DD'
|
||||
}
|
||||
|
||||
getDefaultTimezone(field) {
|
||||
return field.date_force_timezone || moment.tz.guess()
|
||||
}
|
||||
|
||||
splitTimezoneAndValue(value) {
|
||||
return splitTimezoneAndFilterValue(value, this.getSeparator())
|
||||
}
|
||||
|
||||
prepareValue(value, field, filterChanged = false) {
|
||||
const [, filterValue] = this.splitTimezoneAndValue(value)
|
||||
const timezone = this.getDefaultTimezone(field)
|
||||
return value && !filterChanged ? value : `${timezone}?${filterValue}`
|
||||
}
|
||||
}
|
||||
|
||||
export class DateEqualViewFilterType extends LocalizedDateViewFilterType {
|
||||
static getType() {
|
||||
return 'date_equal'
|
||||
}
|
||||
|
@ -472,127 +500,22 @@ export class DateEqualViewFilterType extends ViewFilterType {
|
|||
|
||||
matches(rowValue, filterValue, field, fieldType) {
|
||||
if (rowValue === null) {
|
||||
rowValue = ''
|
||||
}
|
||||
|
||||
if (field.timezone) {
|
||||
rowValue = moment.utc(rowValue).tz(field.timezone).format('YYYY-MM-DD')
|
||||
} else {
|
||||
rowValue = rowValue.toString().toLowerCase().trim()
|
||||
rowValue = rowValue.slice(0, 10)
|
||||
}
|
||||
|
||||
return filterValue === '' || rowValue === filterValue
|
||||
}
|
||||
}
|
||||
|
||||
export class DateBeforeViewFilterType extends ViewFilterType {
|
||||
static getType() {
|
||||
return 'date_before'
|
||||
}
|
||||
|
||||
getName() {
|
||||
const { i18n } = this.app
|
||||
return i18n.t('viewFilter.isBeforeDate')
|
||||
}
|
||||
|
||||
getExample() {
|
||||
return '2020-01-01'
|
||||
}
|
||||
|
||||
getInputComponent() {
|
||||
return ViewFilterTypeDate
|
||||
}
|
||||
|
||||
getCompatibleFieldTypes() {
|
||||
return [
|
||||
'date',
|
||||
'last_modified',
|
||||
'created_on',
|
||||
FormulaFieldType.compatibleWithFormulaTypes('date'),
|
||||
]
|
||||
}
|
||||
|
||||
matches(rowValue, filterValue, field, fieldType) {
|
||||
// parse the provided string values as moment objects in order to make
|
||||
// date comparisons
|
||||
let rowDate = moment.utc(rowValue)
|
||||
const filterDate = moment.utc(filterValue)
|
||||
|
||||
if (field.timezone) {
|
||||
rowDate = rowDate.tz(field.timezone)
|
||||
}
|
||||
|
||||
// if the filter date is not a valid date we can immediately return
|
||||
// true because without a valid date the filter won't be applied
|
||||
if (!filterDate.isValid()) {
|
||||
return true
|
||||
}
|
||||
|
||||
// if the row value is null or the rowDate is not valid we can immediately return
|
||||
// false since it does not match the filter and the row won't be in the resultset
|
||||
if (rowValue === null || !rowDate.isValid()) {
|
||||
return false
|
||||
}
|
||||
|
||||
return rowDate.isBefore(filterDate, 'day')
|
||||
const [timezone, dateValue] = this.splitTimezoneAndValue(filterValue)
|
||||
const filterDate = moment.utc(dateValue, this.getDateFormat(), true)
|
||||
const rowDate = moment.utc(rowValue)
|
||||
if (timezone !== null) {
|
||||
filterDate.tz(timezone, true)
|
||||
rowDate.tz(timezone)
|
||||
}
|
||||
|
||||
return dateValue === '' || rowDate.isSame(filterDate, 'date')
|
||||
}
|
||||
}
|
||||
|
||||
export class DateAfterViewFilterType extends ViewFilterType {
|
||||
static getType() {
|
||||
return 'date_after'
|
||||
}
|
||||
|
||||
getName() {
|
||||
const { i18n } = this.app
|
||||
return i18n.t('viewFilter.isAfterDate')
|
||||
}
|
||||
|
||||
getExample() {
|
||||
return '2020-01-01'
|
||||
}
|
||||
|
||||
getInputComponent() {
|
||||
return ViewFilterTypeDate
|
||||
}
|
||||
|
||||
getCompatibleFieldTypes() {
|
||||
return [
|
||||
'date',
|
||||
'last_modified',
|
||||
'created_on',
|
||||
FormulaFieldType.compatibleWithFormulaTypes('date'),
|
||||
]
|
||||
}
|
||||
|
||||
matches(rowValue, filterValue, field, fieldType) {
|
||||
// parse the provided string values as moment objects in order to make
|
||||
// date comparisons
|
||||
let rowDate = moment.utc(rowValue)
|
||||
const filterDate = moment.utc(filterValue)
|
||||
|
||||
if (field.timezone) {
|
||||
rowDate = rowDate.tz(field.timezone)
|
||||
}
|
||||
|
||||
// if the filter date is not a valid date we can immediately return
|
||||
// true because without a valid date the filter won't be applied
|
||||
if (!filterDate.isValid()) {
|
||||
return true
|
||||
}
|
||||
|
||||
// if the row value is null or the rowDate is not valid we can immediately return
|
||||
// false since it does not match the filter and the row won't be in the resultset
|
||||
if (rowValue === null || !rowDate.isValid()) {
|
||||
return false
|
||||
}
|
||||
|
||||
return rowDate.isAfter(filterDate, 'day')
|
||||
}
|
||||
}
|
||||
|
||||
export class DateNotEqualViewFilterType extends ViewFilterType {
|
||||
export class DateNotEqualViewFilterType extends LocalizedDateViewFilterType {
|
||||
static getType() {
|
||||
return 'date_not_equal'
|
||||
}
|
||||
|
@ -621,24 +544,126 @@ export class DateNotEqualViewFilterType extends ViewFilterType {
|
|||
|
||||
matches(rowValue, filterValue, field, fieldType) {
|
||||
if (rowValue === null) {
|
||||
rowValue = ''
|
||||
return true
|
||||
}
|
||||
|
||||
if (field.timezone) {
|
||||
rowValue = moment.utc(rowValue).tz(field.timezone).format('YYYY-MM-DD')
|
||||
} else {
|
||||
rowValue = rowValue.toString().toLowerCase().trim()
|
||||
rowValue = rowValue.slice(0, 10)
|
||||
const [timezone, dateValue] = this.splitTimezoneAndValue(filterValue)
|
||||
const filterDate = moment.utc(dateValue, this.getDateFormat(), true)
|
||||
const rowDate = moment.utc(rowValue)
|
||||
|
||||
if (timezone !== null) {
|
||||
filterDate.tz(timezone, true)
|
||||
rowDate.tz(timezone)
|
||||
}
|
||||
|
||||
return filterValue === '' || rowValue !== filterValue
|
||||
return dateValue === '' || !rowDate.isSame(filterDate, 'date')
|
||||
}
|
||||
}
|
||||
|
||||
export class DateBeforeViewFilterType extends LocalizedDateViewFilterType {
|
||||
static getType() {
|
||||
return 'date_before'
|
||||
}
|
||||
|
||||
getName() {
|
||||
const { i18n } = this.app
|
||||
return i18n.t('viewFilter.isBeforeDate')
|
||||
}
|
||||
|
||||
getExample() {
|
||||
return '2020-01-01'
|
||||
}
|
||||
|
||||
getInputComponent() {
|
||||
return ViewFilterTypeDate
|
||||
}
|
||||
|
||||
getCompatibleFieldTypes() {
|
||||
return [
|
||||
'date',
|
||||
'last_modified',
|
||||
'created_on',
|
||||
FormulaFieldType.compatibleWithFormulaTypes('date'),
|
||||
]
|
||||
}
|
||||
|
||||
matches(rowValue, filterValue, field, fieldType) {
|
||||
const [timezone, dateValue] = this.splitTimezoneAndValue(filterValue)
|
||||
const filterDate = moment.utc(dateValue, this.getDateFormat(), true)
|
||||
const rowDate = moment.utc(rowValue)
|
||||
|
||||
// without a valid date the filter won't be applied
|
||||
if (!filterDate.isValid()) {
|
||||
return true
|
||||
}
|
||||
|
||||
// an invalid date will be filtered out
|
||||
if (rowValue === null || !rowDate.isValid()) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (timezone !== null) {
|
||||
filterDate.tz(timezone, true)
|
||||
rowDate.tz(timezone)
|
||||
}
|
||||
|
||||
return rowDate.isBefore(filterDate, 'day')
|
||||
}
|
||||
}
|
||||
|
||||
export class DateAfterViewFilterType extends LocalizedDateViewFilterType {
|
||||
static getType() {
|
||||
return 'date_after'
|
||||
}
|
||||
|
||||
getName() {
|
||||
const { i18n } = this.app
|
||||
return i18n.t('viewFilter.isAfterDate')
|
||||
}
|
||||
|
||||
getExample() {
|
||||
return '2020-01-01'
|
||||
}
|
||||
|
||||
getInputComponent() {
|
||||
return ViewFilterTypeDate
|
||||
}
|
||||
|
||||
getCompatibleFieldTypes() {
|
||||
return [
|
||||
'date',
|
||||
'last_modified',
|
||||
'created_on',
|
||||
FormulaFieldType.compatibleWithFormulaTypes('date'),
|
||||
]
|
||||
}
|
||||
|
||||
matches(rowValue, filterValue, field, fieldType) {
|
||||
const [timezone, dateValue] = this.splitTimezoneAndValue(filterValue)
|
||||
const filterDate = moment.utc(dateValue, this.getDateFormat(), true)
|
||||
const rowDate = moment.utc(rowValue)
|
||||
if (timezone !== null) {
|
||||
filterDate.tz(timezone, true)
|
||||
rowDate.tz(timezone)
|
||||
}
|
||||
|
||||
// without a valid date the filter won't be applied
|
||||
if (!filterDate.isValid()) {
|
||||
return true
|
||||
}
|
||||
|
||||
// an invalid date will be filtered out
|
||||
if (rowValue === null || !rowDate.isValid()) {
|
||||
return false
|
||||
}
|
||||
return rowDate.isAfter(filterDate, 'day')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for compare dates with today.
|
||||
*/
|
||||
export class DateCompareTodayViewFilterType extends ViewFilterType {
|
||||
export class DateCompareTodayViewFilterType extends LocalizedDateViewFilterType {
|
||||
static getType() {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
@ -647,7 +672,7 @@ export class DateCompareTodayViewFilterType extends ViewFilterType {
|
|||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
getCompareFunction() {
|
||||
isDateMatching(rowValue, today) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
|
@ -664,37 +689,32 @@ export class DateCompareTodayViewFilterType extends ViewFilterType {
|
|||
]
|
||||
}
|
||||
|
||||
getDefaultValue() {
|
||||
return new Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
getDefaultValue(field) {
|
||||
return this.getDefaultTimezone(field)
|
||||
}
|
||||
|
||||
prepareValue() {
|
||||
return this.getDefaultValue()
|
||||
prepareValue(value, field, filterChanged = false) {
|
||||
return value && !filterChanged ? value : `${this.getDefaultValue(field)}?`
|
||||
}
|
||||
|
||||
getExample() {
|
||||
return ''
|
||||
}
|
||||
|
||||
getSliceLength() {
|
||||
// 10: YYYY-MM-DD, 7: YYYY-MM, 4: YYYY
|
||||
return 10
|
||||
return 'UTC'
|
||||
}
|
||||
|
||||
matches(rowValue, filterValue, field) {
|
||||
if (rowValue === null) {
|
||||
if (rowValue === null || !moment.utc(rowValue).isValid()) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (field.timezone) {
|
||||
rowValue = moment.utc(rowValue).tz(field.timezone)
|
||||
} else {
|
||||
rowValue = rowValue.toString().toLowerCase().trim()
|
||||
rowValue = moment.utc(rowValue.slice(0, this.getSliceLength()))
|
||||
}
|
||||
const [timezone] = this.splitTimezoneAndValue(filterValue)
|
||||
|
||||
const today = moment().tz(filterValue)
|
||||
return this.getCompareFunction(rowValue, today)
|
||||
const rowDate = moment.utc(rowValue)
|
||||
const today = moment.utc()
|
||||
if (timezone !== null) {
|
||||
today.tz(timezone)
|
||||
rowDate.tz(timezone)
|
||||
}
|
||||
return this.isDateMatching(rowDate, today)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -708,10 +728,10 @@ export class DateEqualsTodayViewFilterType extends DateCompareTodayViewFilterTyp
|
|||
return i18n.t('viewFilter.isToday')
|
||||
}
|
||||
|
||||
getCompareFunction(value, today) {
|
||||
isDateMatching(rowValue, today) {
|
||||
const minTime = today.clone().startOf('day')
|
||||
const maxtime = today.clone().endOf('day')
|
||||
return value.isBetween(minTime, maxtime, null, '[]')
|
||||
return rowValue.isBetween(minTime, maxtime, null, '[]')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -725,9 +745,9 @@ export class DateBeforeTodayViewFilterType extends DateCompareTodayViewFilterTyp
|
|||
return i18n.t('viewFilter.beforeToday')
|
||||
}
|
||||
|
||||
getCompareFunction(value, today) {
|
||||
isDateMatching(rowValue, today) {
|
||||
const minTime = today.clone().startOf('day')
|
||||
return value.isBefore(minTime)
|
||||
return rowValue.isBefore(minTime)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -741,9 +761,9 @@ export class DateAfterTodayViewFilterType extends DateCompareTodayViewFilterType
|
|||
return i18n.t('viewFilter.afterToday')
|
||||
}
|
||||
|
||||
getCompareFunction(value, today) {
|
||||
isDateMatching(rowValue, today) {
|
||||
const maxtime = today.clone().endOf('day')
|
||||
return value.isAfter(maxtime)
|
||||
return rowValue.isAfter(maxtime)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -757,10 +777,10 @@ export class DateEqualsCurrentWeekViewFilterType extends DateCompareTodayViewFil
|
|||
return i18n.t('viewFilter.inThisWeek')
|
||||
}
|
||||
|
||||
getCompareFunction(value, today) {
|
||||
isDateMatching(rowValue, today) {
|
||||
const firstDay = today.clone().startOf('isoWeek')
|
||||
const lastDay = today.clone().endOf('isoWeek')
|
||||
return value.isBetween(firstDay, lastDay, null, '[]')
|
||||
return rowValue.isBetween(firstDay, lastDay, null, '[]')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -774,10 +794,10 @@ export class DateEqualsCurrentMonthViewFilterType extends DateCompareTodayViewFi
|
|||
return i18n.t('viewFilter.inThisMonth')
|
||||
}
|
||||
|
||||
getCompareFunction(value, today) {
|
||||
isDateMatching(rowValue, today) {
|
||||
const firstDay = today.clone().startOf('month')
|
||||
const lastDay = today.clone().endOf('month')
|
||||
return value.isBetween(firstDay, lastDay, null, '[]')
|
||||
return rowValue.isBetween(firstDay, lastDay, null, '[]')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -791,21 +811,17 @@ export class DateEqualsCurrentYearViewFilterType extends DateEqualsTodayViewFilt
|
|||
return i18n.t('viewFilter.inThisYear')
|
||||
}
|
||||
|
||||
getCompareFunction(value, today) {
|
||||
isDateMatching(rowValue, today) {
|
||||
const firstDay = today.clone().startOf('year')
|
||||
const lastDay = today.clone().endOf('year')
|
||||
return value.isBetween(firstDay, lastDay, null, '[]')
|
||||
return rowValue.isBetween(firstDay, lastDay, null, '[]')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for days, months, years ago filters.
|
||||
*/
|
||||
export class DateEqualsXAgoViewFilterType extends ViewFilterType {
|
||||
getSeparator() {
|
||||
return '?'
|
||||
}
|
||||
|
||||
export class DateEqualsXAgoViewFilterType extends LocalizedDateViewFilterType {
|
||||
getInputComponent() {
|
||||
return ViewFilterTypeNumberWithTimeZone
|
||||
}
|
||||
|
@ -820,38 +836,41 @@ export class DateEqualsXAgoViewFilterType extends ViewFilterType {
|
|||
}
|
||||
|
||||
getExample() {
|
||||
const tzone = new Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
const tzone = moment.tz.guess()
|
||||
const xAgo = 1
|
||||
return `${tzone}${this.getSeparator()}${xAgo}`
|
||||
}
|
||||
|
||||
getValidNumberWithTimezone(rawValue = null) {
|
||||
let tzone, xAgo, rawXAgo
|
||||
// keep the original filter timezone if any, otherwise take the default from the browser
|
||||
if (rawValue) {
|
||||
;[tzone, rawXAgo] = rawValue.split(this.getSeparator())
|
||||
xAgo = parseInt(rawXAgo)
|
||||
} else {
|
||||
tzone = new Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
splitTimezoneAndXago(field, rawValue) {
|
||||
const [timezone, value] = this.splitTimezoneAndValue(rawValue)
|
||||
|
||||
let filterValue = value
|
||||
if (filterValue !== null) {
|
||||
filterValue = parseInt(filterValue)
|
||||
}
|
||||
xAgo = isNaN(xAgo) ? '' : xAgo
|
||||
return `${tzone}${this.getSeparator()}${xAgo}`
|
||||
|
||||
filterValue = isNaN(filterValue) ? '' : filterValue
|
||||
return [timezone, filterValue]
|
||||
}
|
||||
|
||||
getDefaultValue() {
|
||||
return this.getValidNumberWithTimezone()
|
||||
getValidNumberWithTimezone(rawValue, field) {
|
||||
const [timezone, filterValue] = this.splitTimezoneAndXago(field, rawValue)
|
||||
return `${timezone}${this.getSeparator()}${filterValue}`
|
||||
}
|
||||
|
||||
prepareValue(value) {
|
||||
return this.getValidNumberWithTimezone(value)
|
||||
getDefaultValue(field) {
|
||||
return this.getValidNumberWithTimezone(null, field)
|
||||
}
|
||||
|
||||
getSliceLength() {
|
||||
// 10: YYYY-MM-DD, 7: YYYY-MM, 4: YYYY
|
||||
prepareValue(value, field) {
|
||||
return this.getValidNumberWithTimezone(value, field)
|
||||
}
|
||||
|
||||
getDateToCompare(xAgo) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
getWhen(xAgo, timezone, format) {
|
||||
isDateMatching(rowValue, dateToCompare) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
|
@ -860,32 +879,20 @@ export class DateEqualsXAgoViewFilterType extends ViewFilterType {
|
|||
rowValue = ''
|
||||
}
|
||||
|
||||
const separator = this.getSeparator()
|
||||
if (filterValue.includes(separator) === -1) {
|
||||
return true
|
||||
}
|
||||
|
||||
const [rawTimezone, rawXAgo] = filterValue.split(separator)
|
||||
const timezone = moment.tz.zone(rawTimezone) ? rawTimezone : 'UTC'
|
||||
const xAgo = parseInt(rawXAgo)
|
||||
const [timezone, xAgo] = this.splitTimezoneAndXago(field, filterValue)
|
||||
|
||||
// an invalid daysAgo will result in an empty filter
|
||||
if (isNaN(xAgo)) {
|
||||
if (xAgo === '') {
|
||||
return true
|
||||
}
|
||||
|
||||
const sliceLength = this.getSliceLength()
|
||||
const format = 'YYYY-MM-DD'.slice(0, sliceLength)
|
||||
const when = this.getWhen(xAgo, timezone, format)
|
||||
|
||||
if (field.timezone) {
|
||||
rowValue = moment.utc(rowValue).tz(field.timezone).format(format)
|
||||
} else {
|
||||
rowValue = rowValue.toString().toLowerCase().trim()
|
||||
rowValue = rowValue.slice(0, sliceLength)
|
||||
const dateToCompare = this.getDateToCompare(xAgo)
|
||||
const rowDate = moment.utc(rowValue)
|
||||
if (timezone) {
|
||||
dateToCompare.tz(timezone)
|
||||
rowDate.tz(timezone)
|
||||
}
|
||||
|
||||
return rowValue === when
|
||||
return this.isDateMatching(rowDate, dateToCompare)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -899,12 +906,12 @@ export class DateEqualsDaysAgoViewFilterType extends DateEqualsXAgoViewFilterTyp
|
|||
return i18n.t('viewFilter.isDaysAgo')
|
||||
}
|
||||
|
||||
getWhen(xAgo, timezone, format) {
|
||||
return moment().tz(timezone).subtract(parseInt(xAgo), 'days').format(format)
|
||||
getDateToCompare(xAgo) {
|
||||
return moment.utc().subtract(parseInt(xAgo), 'days')
|
||||
}
|
||||
|
||||
getSliceLength() {
|
||||
return 10
|
||||
isDateMatching(rowValue, dateToCompare) {
|
||||
return rowValue.isSame(dateToCompare, 'day')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -918,15 +925,12 @@ export class DateEqualsMonthsAgoViewFilterType extends DateEqualsXAgoViewFilterT
|
|||
return i18n.t('viewFilter.isMonthsAgo')
|
||||
}
|
||||
|
||||
getWhen(xAgo, timezone, format) {
|
||||
return moment()
|
||||
.tz(timezone)
|
||||
.subtract(parseInt(xAgo), 'months')
|
||||
.format(format)
|
||||
getDateToCompare(xAgo) {
|
||||
return moment.utc().subtract(parseInt(xAgo), 'months')
|
||||
}
|
||||
|
||||
getSliceLength() {
|
||||
return 7
|
||||
isDateMatching(rowValue, dateToCompare) {
|
||||
return rowValue.isSame(dateToCompare, 'month')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -940,19 +944,16 @@ export class DateEqualsYearsAgoViewFilterType extends DateEqualsXAgoViewFilterTy
|
|||
return i18n.t('viewFilter.isYearsAgo')
|
||||
}
|
||||
|
||||
getWhen(xAgo, timezone, format) {
|
||||
return moment()
|
||||
.tz(timezone)
|
||||
.subtract(parseInt(xAgo), 'years')
|
||||
.format(format)
|
||||
getDateToCompare(xAgo) {
|
||||
return moment.utc().subtract(parseInt(xAgo), 'years')
|
||||
}
|
||||
|
||||
getSliceLength() {
|
||||
return 4
|
||||
isDateMatching(rowValue, dateToCompare) {
|
||||
return rowValue.isSame(dateToCompare, 'year')
|
||||
}
|
||||
}
|
||||
|
||||
export class DateEqualsDayOfMonthViewFilterType extends ViewFilterType {
|
||||
export class DateEqualsDayOfMonthViewFilterType extends LocalizedDateViewFilterType {
|
||||
static getType() {
|
||||
return 'date_equals_day_of_month'
|
||||
}
|
||||
|
@ -967,35 +968,38 @@ export class DateEqualsDayOfMonthViewFilterType extends ViewFilterType {
|
|||
}
|
||||
|
||||
getInputComponent() {
|
||||
return ViewFilterTypeNumber
|
||||
return ViewFilterTypeNumberWithTimeZone
|
||||
}
|
||||
|
||||
isDateMatching(rowValue, dayOfMonth) {
|
||||
return rowValue.date() === dayOfMonth
|
||||
}
|
||||
|
||||
getCompatibleFieldTypes() {
|
||||
return ['date', 'last_modified', 'created_on']
|
||||
}
|
||||
|
||||
matches(rowValue, filterValue, field) {
|
||||
// Check if the filter value is empty and immediately return true
|
||||
if (filterValue === '') {
|
||||
matches(rowValue, filterValue, field, fieldType) {
|
||||
if (rowValue === null) {
|
||||
rowValue = ''
|
||||
}
|
||||
|
||||
const [timezone, rawDayOfMonth] = this.splitTimezoneAndValue(filterValue)
|
||||
if (rawDayOfMonth === '') {
|
||||
return true
|
||||
}
|
||||
|
||||
// an invalid daysAgo will result in an empty filter
|
||||
const dayOfMonth = parseInt(rawDayOfMonth)
|
||||
if (isNaN(dayOfMonth) || dayOfMonth < 1 || dayOfMonth > 31) {
|
||||
return false
|
||||
}
|
||||
|
||||
let rowDate = moment.utc(rowValue)
|
||||
|
||||
if (field.timezone) {
|
||||
rowDate = rowDate.tz(field.timezone)
|
||||
if (timezone !== null) {
|
||||
rowDate = rowDate.tz(timezone)
|
||||
}
|
||||
|
||||
// Check if the row's date matches the filter value
|
||||
// in either the D (1) or DD (01) format for the day of month
|
||||
if (
|
||||
rowDate.format('D') === filterValue ||
|
||||
rowDate.format('DD') === filterValue
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
return this.isDateMatching(rowDate, dayOfMonth)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,271 +22,239 @@ import {
|
|||
DateEqualsCurrentYearViewFilterType,
|
||||
} from '@baserow/modules/database/viewFilters'
|
||||
|
||||
const dateBeforeCasesWithTimezone = [
|
||||
const dateBeforeCases = [
|
||||
{
|
||||
rowValue: '2021-08-10T21:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T22:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/London?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T22:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T23:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/London?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
]
|
||||
|
||||
const dateBeforeCasesWithoutTimezone = [
|
||||
{
|
||||
rowValue: '2021-08-10T23:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T00:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
]
|
||||
|
||||
const dateAfterCasesWithTimezone = [
|
||||
const dateAfterCases = [
|
||||
{
|
||||
rowValue: '2021-08-11T22:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-12',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T23:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T21:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T22:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
expected: false,
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
]
|
||||
|
||||
const dateAfterCasesWithoutTimezone = [
|
||||
{
|
||||
rowValue: '2021-08-12T00:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-12',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T23:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
]
|
||||
|
||||
const dateEqualCasesWithTimezone = [
|
||||
const dateEqualCases = [
|
||||
{
|
||||
rowValue: '2021-08-11T21:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T22:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/London?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T22:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T23:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/London?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T21:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T22:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/London?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
]
|
||||
|
||||
const dateEqualWithoutTimezone = [
|
||||
{
|
||||
rowValue: '2021-08-11T23:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'CET?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T00:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-12T00:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-12',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
]
|
||||
|
||||
const dateNotEqualCasesWithTimezone = [
|
||||
const dateNotEqualCases = [
|
||||
{
|
||||
rowValue: '2021-08-11T22:30:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T21:30:37.940086Z',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-12',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T23:30:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/London?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T22:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10T23:01:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
timezone: 'Europe/London',
|
||||
filterValue: 'Europe/London?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
]
|
||||
|
||||
const dateNotEqualCasesWithoutTimezone = [
|
||||
{
|
||||
rowValue: '2021-08-11T23:59:37.940086Z',
|
||||
filterValue: '2021-08-12',
|
||||
filterValue: 'UTC?2021-08-12',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-13T00:01:37.940086Z',
|
||||
filterValue: '2021-08-12',
|
||||
filterValue: 'UTC?2021-08-12',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-10',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-12',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11T22:59:37.940086Z',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2021-08-11',
|
||||
filterValue: '2021-08-11',
|
||||
filterValue: 'UTC?2021-08-11',
|
||||
expected: false,
|
||||
},
|
||||
]
|
||||
|
@ -376,12 +344,32 @@ const dateInThisWeek = [
|
|||
const dateInThisMonth = [
|
||||
{
|
||||
rowValue: '2022-05-01T12:00:00.000000Z',
|
||||
filterValue: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2022-05-31T21:59:00.000000Z',
|
||||
filterValue: 'Europe/Berlin?',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2022-06-01T12:00:00.000000Z',
|
||||
filterValue: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2022-06-30T21:59:00.000000Z',
|
||||
filterValue: 'Europe/Berlin?',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
rowValue: '2022-07-01T00:01:00.000000Z',
|
||||
filterValue: 'Europe/Berlin?',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2022-05-31T23:59:00.000000Z',
|
||||
filterValue: 'Europe/London?',
|
||||
expected: true,
|
||||
},
|
||||
]
|
||||
|
@ -389,12 +377,12 @@ const dateInThisMonth = [
|
|||
const dateInThisYear = [
|
||||
{
|
||||
rowValue: '2021-06-01T12:00:00.000000Z',
|
||||
filterValue: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
rowValue: '2022-06-01T12:00:00.000000Z',
|
||||
filterValue: 'Europe/Berlin',
|
||||
filterValue: 'Europe/Berlin?',
|
||||
expected: true,
|
||||
},
|
||||
]
|
||||
|
@ -616,8 +604,8 @@ describe('Date in this week, month and year tests', () => {
|
|||
|
||||
beforeAll(() => {
|
||||
testApp = new TestApp()
|
||||
// Wed Jun 01 2022 00:00:00
|
||||
dateNowSpy = jest.spyOn(Date, 'now').mockImplementation(() => 1654038000000)
|
||||
// Wed Jun 01 2022 00:00:00 UTC
|
||||
dateNowSpy = jest.spyOn(Date, 'now').mockImplementation(() => 1654041600000)
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
@ -632,7 +620,7 @@ describe('Date in this week, month and year tests', () => {
|
|||
const result = new DateEqualsCurrentWeekViewFilterType({
|
||||
app: testApp,
|
||||
}).matches(values.rowValue, values.filterValue, {
|
||||
timezone: values.timezone,
|
||||
date_include_time: true,
|
||||
})
|
||||
expect(result).toBe(values.expected)
|
||||
})
|
||||
|
@ -736,97 +724,41 @@ describe('All Tests', () => {
|
|||
testApp.afterEach()
|
||||
})
|
||||
|
||||
test.each(dateBeforeCasesWithTimezone)(
|
||||
'BeforeViewFilter with Timezone',
|
||||
(values) => {
|
||||
const result = new DateBeforeViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ timezone: values.timezone }
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
}
|
||||
)
|
||||
|
||||
test.each(dateBeforeCasesWithoutTimezone)(
|
||||
'BeforeViewFilter without Timezone',
|
||||
(values) => {
|
||||
const result = new DateBeforeViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{}
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
}
|
||||
)
|
||||
|
||||
test.each(dateAfterCasesWithTimezone)(
|
||||
'AfterViewFilter with Timezone',
|
||||
(values) => {
|
||||
const result = new DateAfterViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ timezone: values.timezone }
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
}
|
||||
)
|
||||
|
||||
test.each(dateAfterCasesWithoutTimezone)(
|
||||
'AfterViewFilter without Timezone',
|
||||
(values) => {
|
||||
const result = new DateAfterViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{}
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
}
|
||||
)
|
||||
|
||||
test.each(dateEqualCasesWithTimezone)('DateEqual with Timezone', (values) => {
|
||||
const result = new DateEqualViewFilterType({ app: testApp }).matches(
|
||||
test.each(dateBeforeCases)('BeforeViewFilter', (values) => {
|
||||
const result = new DateBeforeViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ timezone: values.timezone }
|
||||
{ date_include_time: true }
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
})
|
||||
|
||||
test.each(dateEqualWithoutTimezone)(
|
||||
'DateEqual without Timezone',
|
||||
(values) => {
|
||||
const result = new DateEqualViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ timezone: values.timezone }
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
}
|
||||
)
|
||||
test.each(dateNotEqualCasesWithTimezone)(
|
||||
'DateNotEqual with Timezone',
|
||||
(values) => {
|
||||
const result = new DateNotEqualViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ timezone: values.timezone }
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
}
|
||||
)
|
||||
test.each(dateNotEqualCasesWithoutTimezone)(
|
||||
'DateNotEqual without Timezone',
|
||||
(values) => {
|
||||
const result = new DateNotEqualViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{}
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
}
|
||||
)
|
||||
test.each(dateAfterCases)('AfterViewFilter with Timezone', (values) => {
|
||||
const result = new DateAfterViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ date_include_time: true }
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
})
|
||||
|
||||
test.each(dateEqualCases)('DateEqual', (values) => {
|
||||
const result = new DateEqualViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ date_include_time: true }
|
||||
)
|
||||
expect(result).toBe(values.expected)
|
||||
})
|
||||
test.each(dateNotEqualCases)('DateNotEqual', (values) => {
|
||||
const result = new DateNotEqualViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
values.filterValue,
|
||||
{ date_include_time: true }
|
||||
)
|
||||
|
||||
expect(result).toBe(values.expected)
|
||||
})
|
||||
test.each(dateToday)('DateToday', (values) => {
|
||||
const result = new DateEqualsTodayViewFilterType({ app: testApp }).matches(
|
||||
values.rowValue,
|
||||
|
|
Loading…
Add table
Reference in a new issue