1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-14 17:18:33 +00:00

Resolve "Converting text to date doesn't respect date format"

This commit is contained in:
Bram Wiepjes 2021-02-18 16:30:37 +00:00
parent 62652bf32b
commit a8ef123ca9
10 changed files with 498 additions and 191 deletions

View file

@ -23,21 +23,21 @@ class PostgresqlLenientDatabaseSchemaEditor:
$$
begin
begin
%(alter_column_prepare_value)s
return %(alert_column_type_function)s::%(type)s;
exception
when others then
return p_default;
%(alter_column_prepare_old_value)s
%(alter_column_prepare_new_value)s
return p_in::%(type)s;
exception when others then
return p_default;
end;
end;
$$
language plpgsql;
"""
def __init__(self, *args, alter_column_prepare_value='',
alert_column_type_function='p_in'):
self.alter_column_prepare_value = alter_column_prepare_value
self.alert_column_type_function = alert_column_type_function
def __init__(self, *args, alter_column_prepare_old_value='',
alter_column_prepare_new_value=''):
self.alter_column_prepare_old_value = alter_column_prepare_old_value
self.alter_column_prepare_new_value = alter_column_prepare_new_value
super().__init__(*args)
def _alter_field(self, model, old_field, new_field, old_type, new_type,
@ -45,24 +45,24 @@ class PostgresqlLenientDatabaseSchemaEditor:
if old_type != new_type:
variables = {}
if isinstance(self.alter_column_prepare_value, tuple):
alter_column_prepare_value, v = self.alter_column_prepare_value
if isinstance(self.alter_column_prepare_old_value, tuple):
alter_column_prepare_old_value, v = self.alter_column_prepare_old_value
variables = {**variables, **v}
else:
alter_column_prepare_value = self.alter_column_prepare_value
alter_column_prepare_old_value = self.alter_column_prepare_old_value
if isinstance(self.alert_column_type_function, tuple):
alert_column_type_function, v = self.alert_column_type_function
if isinstance(self.alter_column_prepare_new_value, tuple):
alter_column_prepare_new_value, v = self.alter_column_prepare_new_value
variables = {**variables, **v}
else:
alert_column_type_function = self.alert_column_type_function
alter_column_prepare_new_value = self.alter_column_prepare_new_value
self.execute(self.sql_drop_try_cast)
self.execute(self.sql_create_try_cast % {
"column": self.quote_name(new_field.column),
"type": new_type,
"alter_column_prepare_value": alter_column_prepare_value,
"alert_column_type_function": alert_column_type_function
"alter_column_prepare_old_value": alter_column_prepare_old_value,
"alter_column_prepare_new_value": alter_column_prepare_new_value
}, variables)
return super()._alter_field(model, old_field, new_field, old_type, new_type,
@ -70,8 +70,8 @@ class PostgresqlLenientDatabaseSchemaEditor:
@contextlib.contextmanager
def lenient_schema_editor(connection, alter_column_prepare_value=None,
alert_column_type_function=None):
def lenient_schema_editor(connection, alter_column_prepare_old_value=None,
alter_column_prepare_new_value=None):
"""
A contextual function that yields a modified version of the connection's schema
editor. This temporary version is more lenient then the regular editor. Normally
@ -83,13 +83,12 @@ def lenient_schema_editor(connection, alter_column_prepare_value=None,
:param connection: The current connection for which to generate the schema editor
for.
:type connection: DatabaseWrapper
:param alter_column_prepare_value: Optionally a query statement converting the
:param alter_column_prepare_old_value: Optionally a query statement converting the
`p_in` value to a string format.
:type alter_column_prepare_value: None or str
:param alert_column_type_function: Optionally the string of a SQL function to
convert the data value to the the new type. The function will have the variable
`p_in` as old value.
:type alert_column_type_function: None or str
:type alter_column_prepare_old_value: None or str
:param alter_column_prepare_new_value: Optionally a query statement converting the
`p_in` text value to the new type.
:type alter_column_prepare_new_value: None or str
:raises ValueError: When the provided connection is not supported. For now only
`postgresql` is supported.
"""
@ -112,11 +111,11 @@ def lenient_schema_editor(connection, alter_column_prepare_value=None,
kwargs = {}
if alter_column_prepare_value:
kwargs['alter_column_prepare_value'] = alter_column_prepare_value
if alter_column_prepare_old_value:
kwargs['alter_column_prepare_old_value'] = alter_column_prepare_old_value
if alert_column_type_function:
kwargs['alert_column_type_function'] = alert_column_type_function
if alter_column_prepare_new_value:
kwargs['alter_column_prepare_new_value'] = alter_column_prepare_new_value
try:
with connection.schema_editor(**kwargs) as schema_editor:

View file

@ -28,8 +28,9 @@ from baserow.contrib.database.api.fields.errors import (
from .handler import FieldHandler
from .registries import FieldType, field_type_registry
from .models import (
NUMBER_TYPE_INTEGER, NUMBER_TYPE_DECIMAL, TextField, LongTextField, URLField,
NumberField, BooleanField, DateField, LinkRowField, EmailField, FileField,
NUMBER_TYPE_INTEGER, NUMBER_TYPE_DECIMAL, DATE_FORMAT, DATE_TIME_FORMAT,
TextField, LongTextField, URLField, NumberField, BooleanField, DateField,
LinkRowField, EmailField, FileField,
SingleSelectField, SelectOption
)
from .exceptions import (
@ -94,17 +95,18 @@ class URLFieldType(FieldType):
def random_value(self, instance, fake, cache):
return fake.url()
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
if connection.vendor == 'postgresql':
return r"""(
return r"""p_in = (
case
when p_in::text ~* '(https?|ftps?)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?'
then p_in::text
else ''
end
)"""
);"""
return super().get_alter_column_type_function(connection, from_field, to_field)
return super().get_alter_column_prepare_new_value(connection, from_field,
to_field)
class NumberFieldType(FieldType):
@ -169,7 +171,7 @@ class NumberFieldType(FieldType):
positive=not instance.number_negative
)
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
if connection.vendor == 'postgresql':
decimal_places = 0
if to_field.number_type == NUMBER_TYPE_DECIMAL:
@ -180,9 +182,10 @@ class NumberFieldType(FieldType):
if not to_field.number_negative:
function = f"greatest({function}, 0)"
return function
return f'p_in = {function};'
return super().get_alter_column_type_function(connection, from_field, to_field)
return super().get_alter_column_prepare_new_value(connection, from_field,
to_field)
def after_update(self, from_field, to_field, from_model, to_model, user, connection,
altered_column, before):
@ -288,6 +291,52 @@ class DateFieldType(FieldType):
else:
return fake.date_object()
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
"""
If the field type has changed then we want to convert the date or timestamp to
a human readable text following the old date format.
"""
to_field_type = field_type_registry.get_by_model(to_field)
if to_field_type.type != self.type and connection.vendor == 'postgresql':
sql_type = 'date'
sql_format = DATE_FORMAT[from_field.date_format]['sql']
if from_field.date_include_time:
sql_type = 'timestamp'
sql_format += ' ' + DATE_TIME_FORMAT[from_field.date_time_format]['sql']
return f"""p_in = TO_CHAR(p_in::{sql_type}, '{sql_format}');"""
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
"""
If the field type has changed into a date field then we want to parse the old
text value following the format of the new field and convert it to a date or
timestamp. If that fails we want to fallback on the default ::date or
::timestamp conversion that has already been added.
"""
from_field_type = field_type_registry.get_by_model(from_field)
if from_field_type.type != self.type and connection.vendor == 'postgresql':
sql_function = 'TO_DATE'
sql_format = DATE_FORMAT[to_field.date_format]['sql']
if to_field.date_include_time:
sql_function = 'TO_TIMESTAMP'
sql_format += ' ' + DATE_TIME_FORMAT[to_field.date_time_format]['sql']
return f"""
begin
p_in = {sql_function}(p_in::text, 'FM{sql_format}');
exception when others then end;
"""
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
class LinkRowFieldType(FieldType):
"""
@ -622,17 +671,18 @@ class EmailFieldType(FieldType):
def random_value(self, instance, fake, cache):
return fake.email()
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
if connection.vendor == 'postgresql':
return r"""(
return r"""p_in = (
case
when p_in::text ~* '[A-Z0-9._+-]+@[A-Z0-9.-]+\.[A-Z]{2,}'
then p_in::text
else ''
end
)"""
);"""
return super().get_alter_column_type_function(connection, from_field, to_field)
return super().get_alter_column_prepare_new_value(connection, from_field,
to_field)
class FileFieldType(FieldType):
@ -811,7 +861,7 @@ class SingleSelectFieldType(FieldType):
)
to_field_values.pop('select_options')
def get_alter_column_prepare_value(self, connection, from_field, to_field):
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
"""
If the new field type isn't a single select field we can convert the plain
text value of the option and maybe that can be used by the new field.
@ -839,9 +889,10 @@ class SingleSelectFieldType(FieldType):
"""
return sql, variables
return super().get_alter_column_prepare_value(connection, from_field, to_field)
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
"""
If the old field wasn't a single select field we can try to match the old text
values to the new options.
@ -863,15 +914,16 @@ class SingleSelectFieldType(FieldType):
if len(values_mapping) == 0:
return None
return f"""(
return f"""p_in = (
SELECT value FROM (
VALUES {','.join(values_mapping)}
) AS values (key, value)
WHERE key = lower(p_in)
)
);
""", variables
return super().get_alter_column_prepare_value(connection, from_field, to_field)
return super().get_alter_column_prepare_old_value(connection, from_field,
to_field)
def get_order(self, field, field_name, view_sort):
"""

View file

@ -221,9 +221,11 @@ class FieldHandler:
# the lenient schema editor.
with lenient_schema_editor(
connection,
old_field_type.get_alter_column_prepare_value(
old_field_type.get_alter_column_prepare_old_value(
connection, old_field, field),
field_type.get_alter_column_type_function(connection, old_field, field)
field_type.get_alter_column_prepare_new_value(
connection, old_field, field
)
) as schema_editor:
try:
schema_editor.alter_field(from_model, from_model_field,

View file

@ -25,15 +25,18 @@ NUMBER_DECIMAL_PLACES_CHOICES = (
DATE_FORMAT = {
'EU': {
'name': 'European (D/M/Y)',
'format': '%d/%m/%Y'
'format': '%d/%m/%Y',
'sql': 'DD/MM/YYYY'
},
'US': {
'name': 'US (M/D/Y)',
'format': '%m/%d/%Y'
'format': '%m/%d/%Y',
'sql': 'MM/DD/YYYY'
},
'ISO': {
'name': 'ISO (Y-M-D)',
'format': '%Y-%m-%d'
'format': '%Y-%m-%d',
'sql': 'YYYY-MM-DD'
},
}
DATE_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_FORMAT.items()]
@ -41,11 +44,13 @@ DATE_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_FORMAT.items()]
DATE_TIME_FORMAT = {
'24': {
'name': '24 hour',
'format': '%H:%M'
'format': '%H:%M',
'sql': 'HH24:MI'
},
'12': {
'name': '12 hour',
'format': '%I:%M %p'
'format': '%I:%M %p',
'sql': 'HH12:MIAM'
}
}
DATE_TIME_FORMAT_CHOICES = [(k, v['name']) for k, v in DATE_TIME_FORMAT.items()]

View file

@ -189,10 +189,10 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
return None
def get_alter_column_prepare_value(self, connection, from_field, to_field):
def get_alter_column_prepare_old_value(self, connection, from_field, to_field):
"""
Can return a small SQL statement to convert the `p_in` variable to a readable
text format for the new field.
Can return an SQL statement to convert the `p_in` variable to a readable text
format for the new field.
Example: return "p_in = lower(p_in);"
@ -210,15 +210,13 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
return None
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
"""
Can optionally return a SQL function as string to convert the old field's value
when changing the field type. If None is returned no function will be
applied. The connection can be used to see which engine is used, postgresql,
mysql or sqlite.
Can return a SQL statement to convert the `p_in` variable from text to a
desired format for the new field.
Example when a string is converted to a number, the function could be:
REGEXP_REPLACE(p_in, '[^0-9]', '', 'g') which would remove all non numeric
Example when a string is converted to a number, to statement could be:
`REGEXP_REPLACE(p_in, '[^0-9]', '', 'g')` which would remove all non numeric
characters. The p_in variable is the old value as a string.
:param connection: The used connection. This can for example be used to check
@ -228,7 +226,8 @@ class FieldType(MapAPIExceptionsInstanceMixin, APIUrlsInstanceMixin,
:type to_field: Field
:param to_field: The new field instance.
:type to_field: Field
:return: The SQL function to convert the value.
:return: The SQL statement converting the old text value into the correct
format.
:rtype: None or str
"""

View file

@ -24,18 +24,20 @@ def test_lenient_schema_editor():
with lenient_schema_editor(connection) as schema_editor:
assert isinstance(schema_editor, PostgresqlLenientDatabaseSchemaEditor)
assert isinstance(schema_editor, BaseDatabaseSchemaEditor)
assert schema_editor.alter_column_prepare_value == ''
assert schema_editor.alert_column_type_function == 'p_in'
assert schema_editor.alter_column_prepare_old_value == ''
assert schema_editor.alter_column_prepare_new_value == ''
assert connection.SchemaEditorClass != PostgresqlDatabaseSchemaEditor
assert connection.SchemaEditorClass == PostgresqlDatabaseSchemaEditor
with lenient_schema_editor(
connection,
'p_in = p_in;',
"REGEXP_REPLACE(p_in, 'test', '', 'g')"
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');",
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');"
) as schema_editor:
assert schema_editor.alter_column_prepare_value == "p_in = p_in;"
assert schema_editor.alert_column_type_function == (
"REGEXP_REPLACE(p_in, 'test', '', 'g')"
assert schema_editor.alter_column_prepare_old_value == (
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');"
)
assert schema_editor.alter_column_prepare_new_value == (
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');"
)

View file

@ -0,0 +1,363 @@
import pytest
from pytz import timezone
from datetime import date
from django.core.exceptions import ValidationError
from django.utils.timezone import make_aware, datetime
from baserow.contrib.database.fields.field_types import DateFieldType
from baserow.contrib.database.fields.models import DateField
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.rows.handler import RowHandler
@pytest.mark.django_db
def test_date_field_type_prepare_value(data_fixture):
d = DateFieldType()
f = data_fixture.create_date_field(date_include_time=True)
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('UTC')
expected_date = make_aware(datetime(2020, 4, 10, 0, 0, 0), utc)
expected_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
with pytest.raises(ValidationError):
assert d.prepare_value_for_db(f, 'TEST')
assert d.prepare_value_for_db(f, None) is None
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = datetime(2020, 4, 10, 12, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_date = date(2020, 4, 10)
assert d.prepare_value_for_db(f, unprepared_date) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_datetime
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_datetime
f = data_fixture.create_date_field(date_include_time=False)
expected_date = date(2020, 4, 10)
unprepared_datetime = datetime(2020, 4, 10, 14, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_date
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_date
@pytest.mark.django_db
def test_date_field_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field_handler = FieldHandler()
row_handler = RowHandler()
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('utc')
date_field_1 = field_handler.create_field(user=user, table=table, type_name='date',
name='Date')
date_field_2 = field_handler.create_field(user=user, table=table, type_name='date',
name='Datetime', date_include_time=True)
assert date_field_1.date_include_time is False
assert date_field_2.date_include_time is True
assert len(DateField.objects.all()) == 2
model = table.get_model(attribute_names=True)
row = row_handler.create_row(user=user, table=table, values={}, model=model)
assert row.date is None
assert row.datetime is None
row = row_handler.create_row(user=user, table=table, values={
'date': '2020-4-1',
'datetime': '2020-4-1 12:30:30'
}, model=model)
row.refresh_from_db()
assert row.date == date(2020, 4, 1)
assert row.datetime == datetime(2020, 4, 1, 12, 30, 30, tzinfo=utc)
row = row_handler.create_row(user=user, table=table, values={
'datetime': make_aware(datetime(2020, 4, 1, 12, 30, 30), amsterdam)
}, model=model)
row.refresh_from_db()
assert row.date is None
assert row.datetime == datetime(2020, 4, 1, 10, 30, 30, tzinfo=timezone('UTC'))
date_field_1 = field_handler.update_field(user=user, field=date_field_1,
date_include_time=True)
date_field_2 = field_handler.update_field(user=user, field=date_field_2,
date_include_time=False)
assert date_field_1.date_include_time is True
assert date_field_2.date_include_time is False
model = table.get_model(attribute_names=True)
rows = model.objects.all()
assert rows[0].date is None
assert rows[0].datetime is None
assert rows[1].date == datetime(2020, 4, 1, tzinfo=timezone('UTC'))
assert rows[1].datetime == date(2020, 4, 1)
assert rows[2].date is None
assert rows[2].datetime == date(2020, 4, 1)
field_handler.delete_field(user=user, field=date_field_1)
field_handler.delete_field(user=user, field=date_field_2)
assert len(DateField.objects.all()) == 0
@pytest.mark.django_db
def test_converting_date_field_value(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field_handler = FieldHandler()
row_handler = RowHandler()
utc = timezone('utc')
date_field_eu = data_fixture.create_text_field(table=table)
date_field_us = data_fixture.create_text_field(table=table)
date_field_iso = data_fixture.create_text_field(table=table)
date_field_eu_12 = data_fixture.create_text_field(table=table)
date_field_us_12 = data_fixture.create_text_field(table=table)
date_field_iso_12 = data_fixture.create_text_field(table=table)
date_field_eu_24 = data_fixture.create_text_field(table=table)
date_field_us_24 = data_fixture.create_text_field(table=table)
date_field_iso_24 = data_fixture.create_text_field(table=table)
model = table.get_model()
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '22/07/2021',
f'field_{date_field_us.id}': '07/22/2021',
f'field_{date_field_iso.id}': '2021-07-22',
f'field_{date_field_eu_12.id}': '22/07/2021 12:45 PM',
f'field_{date_field_us_12.id}': '07/22/2021 12:45 PM',
f'field_{date_field_iso_12.id}': '2021-07-22 12:45 PM',
f'field_{date_field_eu_24.id}': '22/07/2021 12:45',
f'field_{date_field_us_24.id}': '07/22/2021 12:45',
f'field_{date_field_iso_24.id}': '2021-07-22 12:45',
})
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '22-7-2021',
f'field_{date_field_us.id}': '7-22-2021',
f'field_{date_field_iso.id}': '2021/7/22',
f'field_{date_field_eu_12.id}': '22-7-2021 12:45am',
f'field_{date_field_us_12.id}': '7-22-2021 12:45am',
f'field_{date_field_iso_12.id}': '2021/7/22 12:45am',
f'field_{date_field_eu_24.id}': '22-7-2021 7:45',
f'field_{date_field_us_24.id}': '7-22-2021 7:45',
f'field_{date_field_iso_24.id}': '2021/7/22 7:45',
})
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '22/07/2021 12:00',
f'field_{date_field_us.id}': '07/22/2021 12:00am',
f'field_{date_field_iso.id}': '2021-07-22 12:00 PM',
f'field_{date_field_eu_12.id}': 'INVALID',
f'field_{date_field_us_12.id}': '2222-2222-2222',
f'field_{date_field_iso_12.id}': 'x-7--1',
f'field_{date_field_eu_24.id}': '22-7-2021 7:45:12',
f'field_{date_field_us_24.id}': '7-22-2021 7:45:23',
f'field_{date_field_iso_24.id}': '2021/7/22 7:45:70'
})
row_handler.create_row(user=user, table=table, model=model, values={
f'field_{date_field_eu.id}': '2018-08-20T13:20:10',
f'field_{date_field_us.id}': '2017 Mar 03 05:12:41.211',
f'field_{date_field_iso.id}': '19/Apr/2017:06:36:15',
f'field_{date_field_eu_12.id}': 'Dec 2, 2017 2:39:58 AM',
f'field_{date_field_us_12.id}': 'Jun 09 2018 15:28:14',
f'field_{date_field_iso_12.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_eu_24.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_us_24.id}': '2018-02-27 15:35:20.311',
f'field_{date_field_iso_24.id}': '10-04-19 12:00:17'
})
date_field_eu = field_handler.update_field(
user=user, field=date_field_eu, new_type_name='date', date_format='EU'
)
date_field_us = field_handler.update_field(
user=user, field=date_field_us, new_type_name='date', date_format='US'
)
date_field_iso = field_handler.update_field(
user=user, field=date_field_iso, new_type_name='date', date_format='ISO'
)
date_field_eu_12 = field_handler.update_field(
user=user, field=date_field_eu_12, new_type_name='date', date_format='EU',
date_include_time=True, date_time_format='12'
)
date_field_us_12 = field_handler.update_field(
user=user, field=date_field_us_12, new_type_name='date', date_format='US',
date_include_time=True, date_time_format='12'
)
date_field_iso_12 = field_handler.update_field(
user=user, field=date_field_iso_12, new_type_name='date', date_format='ISO',
date_include_time=True, date_time_format='12'
)
date_field_eu_24 = field_handler.update_field(
user=user, field=date_field_eu_24, new_type_name='date', date_format='EU',
date_include_time=True, date_time_format='24'
)
date_field_us_24 = field_handler.update_field(
user=user, field=date_field_us_24, new_type_name='date', date_format='US',
date_include_time=True, date_time_format='24'
)
date_field_iso_24 = field_handler.update_field(
user=user, field=date_field_iso_24, new_type_name='date', date_format='ISO',
date_include_time=True, date_time_format='24'
)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{date_field_eu.id}') == date(2021, 7, 22)
assert getattr(rows[0], f'field_{date_field_us.id}') == date(2021, 7, 22)
assert getattr(rows[0], f'field_{date_field_iso.id}') == date(2021, 7, 22)
assert getattr(rows[0], f'field_{date_field_eu_12.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_us_12.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_iso_12.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_eu_24.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_us_24.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[0], f'field_{date_field_iso_24.id}') == (
datetime(2021, 7, 22, 12, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_eu.id}') == date(2021, 7, 22)
assert getattr(rows[1], f'field_{date_field_us.id}') == date(2021, 7, 22)
assert getattr(rows[1], f'field_{date_field_iso.id}') == date(2021, 7, 22)
assert getattr(rows[1], f'field_{date_field_eu_12.id}') == (
datetime(2021, 7, 22, 0, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_us_12.id}') == (
datetime(2021, 7, 22, 0, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_iso_12.id}') == (
datetime(2021, 7, 22, 0, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_eu_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_us_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[1], f'field_{date_field_iso_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[2], f'field_{date_field_eu.id}') == date(2021, 7, 22)
assert getattr(rows[2], f'field_{date_field_us.id}') == date(2021, 7, 22)
assert getattr(rows[2], f'field_{date_field_iso.id}') == date(2021, 7, 22)
assert getattr(rows[2], f'field_{date_field_eu_12.id}') is None
assert getattr(rows[2], f'field_{date_field_us_12.id}') is None
assert getattr(rows[2], f'field_{date_field_iso_12.id}') is None
assert getattr(rows[2], f'field_{date_field_eu_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[2], f'field_{date_field_us_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
assert getattr(rows[2], f'field_{date_field_iso_24.id}') == (
datetime(2021, 7, 22, 7, 45, 0, tzinfo=utc)
)
"""
f'field_{date_field_eu.id}': '2018-08-20T13:20:10',
f'field_{date_field_us.id}': '2017 Mar 03 05:12:41.211',
f'field_{date_field_iso.id}': '19/Apr/2017:06:36:15',
f'field_{date_field_eu_12.id}': 'Dec 2, 2017 2:39:58 AM',
f'field_{date_field_us_12.id}': 'Jun 09 2018 15:28:14',
f'field_{date_field_iso_12.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_eu_24.id}': 'Apr 20 00:00:35 2010',
f'field_{date_field_us_24.id}': '2018-02-27 15:35:20.311',
f'field_{date_field_iso_24.id}': '10-04-19 12:00:17'
"""
assert getattr(rows[3], f'field_{date_field_eu.id}') == date(2018, 8, 20)
assert getattr(rows[3], f'field_{date_field_us.id}') == date(2017, 3, 3)
assert getattr(rows[3], f'field_{date_field_iso.id}') == date(2017, 4, 19)
assert getattr(rows[3], f'field_{date_field_eu_12.id}') == (
datetime(2017, 12, 2, 2, 39, 58, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_us_12.id}') == (
datetime(2018, 6, 9, 15, 28, 14, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_iso_12.id}') == (
datetime(2010, 4, 20, 0, 0, 35, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_eu_24.id}') == (
datetime(2010, 4, 20, 0, 0, 35, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_us_24.id}') == (
datetime(2018, 2, 27, 15, 35, 20, 311000, tzinfo=utc)
)
assert getattr(rows[3], f'field_{date_field_iso_24.id}') == (
datetime(10, 4, 19, 12, 0, tzinfo=utc)
)
date_field_eu = field_handler.update_field(
user=user, field=date_field_eu, new_type_name='text'
)
date_field_us = field_handler.update_field(
user=user, field=date_field_us, new_type_name='text'
)
date_field_iso = field_handler.update_field(
user=user, field=date_field_iso, new_type_name='text'
)
date_field_eu_12 = field_handler.update_field(
user=user, field=date_field_eu_12, new_type_name='text'
)
date_field_us_12 = field_handler.update_field(
user=user, field=date_field_us_12, new_type_name='text'
)
date_field_iso_12 = field_handler.update_field(
user=user, field=date_field_iso_12, new_type_name='text'
)
date_field_eu_24 = field_handler.update_field(
user=user, field=date_field_eu_24, new_type_name='text'
)
date_field_us_24 = field_handler.update_field(
user=user, field=date_field_us_24, new_type_name='text'
)
date_field_iso_24 = field_handler.update_field(
user=user, field=date_field_iso_24, new_type_name='text'
)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{date_field_eu.id}') == '22/07/2021'
assert getattr(rows[0], f'field_{date_field_us.id}') == '07/22/2021'
assert getattr(rows[0], f'field_{date_field_iso.id}') == '2021-07-22'
assert getattr(rows[0], f'field_{date_field_eu_12.id}') == '22/07/2021 12:45PM'
assert getattr(rows[0], f'field_{date_field_us_12.id}') == '07/22/2021 12:45PM'
assert getattr(rows[0], f'field_{date_field_iso_12.id}') == '2021-07-22 12:45PM'
assert getattr(rows[0], f'field_{date_field_eu_24.id}') == '22/07/2021 12:45'
assert getattr(rows[0], f'field_{date_field_us_24.id}') == '07/22/2021 12:45'
assert getattr(rows[0], f'field_{date_field_iso_24.id}') == '2021-07-22 12:45'
assert getattr(rows[2], f'field_{date_field_eu_12.id}') is None

View file

@ -248,7 +248,7 @@ def test_update_field_failing(data_fixture):
# This failing field type triggers the CannotChangeFieldType error if a field is
# changed into this type.
class FailingFieldType(TextFieldType):
def get_alter_column_type_function(self, connection, from_field, to_field):
def get_alter_column_prepare_new_value(self, connection, from_field, to_field):
return 'p_in::NOT_VALID_SQL_SO_IT_WILL_FAIL('
user = data_fixture.create_user()

View file

@ -1,19 +1,15 @@
import pytest
import json
from pytz import timezone
from datetime import date
from faker import Faker
from decimal import Decimal
from django.core.exceptions import ValidationError
from django.utils.timezone import make_aware, datetime
from baserow.core.user_files.exceptions import (
InvalidUserFileNameError, UserFileDoesNotExist
)
from baserow.contrib.database.fields.field_types import DateFieldType
from baserow.contrib.database.fields.models import (
LongTextField, URLField, DateField, EmailField, FileField
LongTextField, URLField, EmailField, FileField
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.rows.handler import RowHandler
@ -288,118 +284,6 @@ def test_url_field_type(data_fixture):
assert len(URLField.objects.all()) == 2
@pytest.mark.django_db
def test_date_field_type_prepare_value(data_fixture):
d = DateFieldType()
f = data_fixture.create_date_field(date_include_time=True)
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('UTC')
expected_date = make_aware(datetime(2020, 4, 10, 0, 0, 0), utc)
expected_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
with pytest.raises(ValidationError):
assert d.prepare_value_for_db(f, 'TEST')
assert d.prepare_value_for_db(f, None) is None
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = make_aware(datetime(2020, 4, 10, 12, 30, 30), utc)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_datetime = datetime(2020, 4, 10, 12, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_datetime
unprepared_date = date(2020, 4, 10)
assert d.prepare_value_for_db(f, unprepared_date) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_datetime
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_datetime
f = data_fixture.create_date_field(date_include_time=False)
expected_date = date(2020, 4, 10)
unprepared_datetime = datetime(2020, 4, 10, 14, 30, 30)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
unprepared_datetime = make_aware(datetime(2020, 4, 10, 14, 30, 30), amsterdam)
assert d.prepare_value_for_db(f, unprepared_datetime) == expected_date
assert d.prepare_value_for_db(f, '2020-04-10') == expected_date
assert d.prepare_value_for_db(f, '2020-04-11') != expected_date
assert d.prepare_value_for_db(f, '2020-04-10 12:30:30') == expected_date
assert d.prepare_value_for_db(f, '2020-04-10 00:30:30 PM') == expected_date
@pytest.mark.django_db
def test_date_field_type(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field_handler = FieldHandler()
row_handler = RowHandler()
amsterdam = timezone('Europe/Amsterdam')
utc = timezone('utc')
date_field_1 = field_handler.create_field(user=user, table=table, type_name='date',
name='Date')
date_field_2 = field_handler.create_field(user=user, table=table, type_name='date',
name='Datetime', date_include_time=True)
assert date_field_1.date_include_time is False
assert date_field_2.date_include_time is True
assert len(DateField.objects.all()) == 2
model = table.get_model(attribute_names=True)
row = row_handler.create_row(user=user, table=table, values={}, model=model)
assert row.date is None
assert row.datetime is None
row = row_handler.create_row(user=user, table=table, values={
'date': '2020-4-1',
'datetime': '2020-4-1 12:30:30'
}, model=model)
row.refresh_from_db()
assert row.date == date(2020, 4, 1)
assert row.datetime == datetime(2020, 4, 1, 12, 30, 30, tzinfo=utc)
row = row_handler.create_row(user=user, table=table, values={
'datetime': make_aware(datetime(2020, 4, 1, 12, 30, 30), amsterdam)
}, model=model)
row.refresh_from_db()
assert row.date is None
assert row.datetime == datetime(2020, 4, 1, 10, 30, 30, tzinfo=timezone('UTC'))
date_field_1 = field_handler.update_field(user=user, field=date_field_1,
date_include_time=True)
date_field_2 = field_handler.update_field(user=user, field=date_field_2,
date_include_time=False)
assert date_field_1.date_include_time is True
assert date_field_2.date_include_time is False
model = table.get_model(attribute_names=True)
rows = model.objects.all()
assert rows[0].date is None
assert rows[0].datetime is None
assert rows[1].date == datetime(2020, 4, 1, tzinfo=timezone('UTC'))
assert rows[1].datetime == date(2020, 4, 1)
assert rows[2].date is None
assert rows[2].datetime == date(2020, 4, 1)
field_handler.delete_field(user=user, field=date_field_1)
field_handler.delete_field(user=user, field=date_field_2)
assert len(DateField.objects.all()) == 0
@pytest.mark.django_db
def test_email_field_type(data_fixture):
user = data_fixture.create_user()

View file

@ -23,6 +23,7 @@
* Made the public REST API docs compatible with smaller screens.
* Made it possible for the admin to disable new signups.
* Reduced the amount of queries when using the link row field.
* Respect the date format when converting to a date field.
## Released (2021-02-04)