1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-10 23:50:12 +00:00

Merge branch '496-csv-export-for-linked-row-field-pointing-at-table-with-decimal-primary-key-fails' into 'develop'

Resolve "CSV Export for linked row field pointing at table with decimal primary key fails"

Closes 

See merge request 
This commit is contained in:
Nigel Gott 2021-06-17 09:33:08 +00:00
commit 943d716b2c
10 changed files with 333 additions and 245 deletions
backend
src/baserow/contrib/database
tests
premium/backend/tests/baserow_premium/export

View file

@ -197,12 +197,12 @@ class QuerysetSerializer(abc.ABC):
"""
def serializer_func(row):
attr = getattr(row, field_object["name"])
value = getattr(row, field_object["name"])
if attr is None:
if value is None:
result = ""
else:
result = field_object["type"].get_export_value(row, field_object)
result = field_object["type"].get_export_value(value, field_object)
return (
field_object["name"],

View file

@ -109,4 +109,4 @@ class CsvQuerysetSerializer(QuerysetSerializer):
for key, inner_val in val.items()
]
)
return val
return str(val)

View file

@ -3,7 +3,9 @@ from typing import Dict, Any, List
from baserow.contrib.database.fields.registries import field_type_registry
def construct_all_possible_field_kwargs(link_table) -> Dict[str, List[Dict[str, Any]]]:
def construct_all_possible_field_kwargs(
link_table, decimal_link_table, file_link_table
) -> Dict[str, List[Dict[str, Any]]]:
"""
Some baserow field types have multiple different 'modes' which result in
different different database columns and modes of operation being
@ -40,7 +42,11 @@ def construct_all_possible_field_kwargs(link_table) -> Dict[str, List[Dict[str,
{"name": "datetime_eu", "date_include_time": True, "date_format": "EU"},
{"name": "date_eu", "date_include_time": False, "date_format": "EU"},
],
"link_row": [{"name": "link_row", "link_row_table": link_table}],
"link_row": [
{"name": "link_row", "link_row_table": link_table},
{"name": "decimal_link_row", "link_row_table": decimal_link_table},
{"name": "file_link_row", "link_row_table": file_link_table},
],
"file": [{"name": "file"}],
"single_select": [
{

View file

@ -180,12 +180,11 @@ class NumberFieldType(FieldType):
**kwargs,
)
def get_export_value(self, row, field_object):
def get_export_value(self, value, field_object):
# If the number is an integer we want it to be a literal json number and so
# don't convert it to a string. However if a decimal to preserve any precision
# we keep it as a string.
instance = field_object["field"]
value = getattr(row, field_object["name"])
if instance.number_type == NUMBER_TYPE_INTEGER:
return int(value)
@ -320,8 +319,7 @@ class DateFieldType(FieldType):
"The value should be a date/time string, date object or " "datetime object."
)
def get_export_value(self, row, field_object):
value = getattr(row, field_object["name"])
def get_export_value(self, value, field_object):
if value is None:
return value
python_format = field_object["field"].get_python_format()
@ -498,7 +496,7 @@ class LinkRowFieldType(FieldType):
models.Prefetch(name, queryset=related_queryset)
)
def get_export_value(self, row, field_object):
def get_export_value(self, value, field_object):
instance = field_object["field"]
if hasattr(instance, "_related_model"):
@ -510,16 +508,37 @@ class LinkRowFieldType(FieldType):
)
if primary_field:
primary_field_name = primary_field["name"]
value = getattr(row, field_object["name"])
primary_field_type = primary_field["type"]
primary_field_values = []
for sub in value.all():
linked_row_primary_name = getattr(sub, primary_field_name)
if linked_row_primary_name is None:
linked_row_primary_name = f"unnamed row {sub.id}"
primary_field_values.append(linked_row_primary_name)
# Ensure we also convert the value from the other table to it's
# export form as it could be an odd field type!
linked_value = getattr(sub, primary_field_name)
if self._is_unnamed_primary_field_value(linked_value):
export_linked_value = f"unnamed row {sub.id}"
else:
export_linked_value = primary_field_type.get_export_value(
getattr(sub, primary_field_name), primary_field
)
primary_field_values.append(export_linked_value)
return primary_field_values
return []
@staticmethod
def _is_unnamed_primary_field_value(primary_field_value):
"""
Checks if the value for a linked primary field is considered "empty".
:param primary_field_value: The value of a primary field row in a linked table.
:return: If this value is considered an empty primary field value.
"""
if isinstance(primary_field_value, list):
return len(primary_field_value) == 0
elif isinstance(primary_field_value, dict):
return len(primary_field_value.keys()) == 0
else:
return primary_field_value is None
def get_serializer_field(self, instance, **kwargs):
"""
If the value is going to be updated we want to accept a list of integers
@ -968,9 +987,8 @@ class FileFieldType(FieldType):
**kwargs,
)
def get_export_value(self, row, field_object):
def get_export_value(self, value, field_object):
files = []
value = getattr(row, field_object["name"])
for file in value:
if "name" in file:
path = UserFileHandler().user_file_path(file["name"])
@ -1132,8 +1150,7 @@ class SingleSelectFieldType(FieldType):
"color is exposed."
)
def get_export_value(self, row, field_object):
value = getattr(row, field_object["name"])
def get_export_value(self, value, field_object):
return value.value
def get_model_field(self, instance, **kwargs):

View file

@ -12,7 +12,6 @@ from baserow.core.registry import (
APIUrlsInstanceMixin,
ImportExportMixin,
)
from .exceptions import FieldTypeAlreadyRegistered, FieldTypeDoesNotExist
from .models import SelectOption
@ -604,18 +603,18 @@ class FieldType(
setattr(row, field_name, value)
def get_export_value(self, row, field_object):
def get_export_value(self, value, field_object):
"""
Gets this fields value from the provided row in a form suitable for exporting
to a standalone file.
Should convert this field type's internal baserow value to a form suitable
for exporting to a standalone file.
:param row: The row instance where the value be set on.
:type row: Object
:param value: The internal value to convert to a suitable export format
:type value: Object
:param field_object: The field object for the field to extract
:type field_object: FieldObject
"""
return getattr(row, field_object["name"])
return value
class FieldTypeRegistry(

View file

@ -85,7 +85,7 @@ class Command(BaseCommand):
@staticmethod
def create_a_column_for_every_type(table):
field_handler = FieldHandler()
all_kwargs_per_type = construct_all_possible_field_kwargs(None)
all_kwargs_per_type = construct_all_possible_field_kwargs(None, None, None)
for field_type_name, all_possible_kwargs in all_kwargs_per_type.items():
if field_type_name == "link_row":
continue

View file

@ -1,4 +1,3 @@
from decimal import Decimal
from io import BytesIO
from typing import Type, List
from unittest.mock import patch
@ -35,14 +34,11 @@ from baserow.contrib.database.export.registries import (
table_exporter_registry,
TableExporter,
)
from baserow.contrib.database.fields.field_helpers import (
construct_all_possible_field_kwargs,
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import SelectOption
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.views.models import GridView
from baserow.contrib.database.views.exceptions import ViewNotInTable
from baserow.contrib.database.views.models import GridView
from tests.test_utils import setup_interesting_test_table
def _parse_datetime(datetime):
@ -214,110 +210,32 @@ def test_columns_are_exported_by_order_then_id(storage_mock, data_fixture):
def test_can_export_every_interesting_different_field_to_csv(
storage_mock, data_fixture
):
datetime = _parse_datetime("2020-02-01 01:23")
date = _parse_date("2020-02-01")
upload_url_prefix = "http://localhost:8000/media/user_files/"
expected = {
"text": "text",
"long_text": "long_text",
"url": "http://www.google.com",
"email": "test@example.com",
"negative_int": (-1, "-1"),
"positive_int": (1, "1"),
"negative_decimal": (Decimal("-1.2"), "-1.2"),
"positive_decimal": (Decimal("1.2"), "1.2"),
"boolean": (True, "True"),
"datetime_us": (datetime, "02/01/2020 01:23"),
"date_us": (date, "02/01/2020"),
"datetime_eu": (datetime, "01/02/2020 01:23"),
"date_eu": (date, "01/02/2020"),
"link_row": (None, '"linked_row_1,linked_row_2,unnamed row 3"'),
"file": (
[
{"name": "hashed_name.txt", "visible_name": "a.txt"},
{"name": "other_name.txt", "visible_name": "b.txt"},
],
f'"visible_name=a.txt url={upload_url_prefix}hashed_name.txt,'
f'visible_name=b.txt url={upload_url_prefix}other_name.txt"',
),
"single_select": (lambda: SelectOption.objects.get(value="A"), "A"),
"phone_number": "+4412345678",
}
contents = wide_test(data_fixture, storage_mock, expected, {"exporter_type": "csv"})
expected_header = ",".join(expected.keys())
expected_values = ",".join(
[v[1] if isinstance(v, tuple) else v for v in expected.values()]
contents = run_export_job_over_interesting_table(
data_fixture, storage_mock, {"exporter_type": "csv"}
)
# noinspection HttpUrlsUsage
expected = (
"\ufeff"
f"id,{expected_header}\r\n"
f"1,,,,,,,,,False,,,,,,,,\r\n"
f"2,{expected_values}\r\n"
"\ufeffid,text,long_text,url,email,negative_int,positive_int,"
"negative_decimal,positive_decimal,boolean,datetime_us,date_us,datetime_eu,"
"date_eu,link_row,decimal_link_row,file_link_row,file,single_select,"
"phone_number\r\n"
"1,,,,,,,,,False,,,,,,,,,,\r\n"
"2,text,long_text,https://www.google.com,test@example.com,-1,1,-1.2,1.2,True,"
"02/01/2020 01:23,02/01/2020,01/02/2020 01:23,01/02/2020,"
'"linked_row_1,linked_row_2,unnamed row 3","1.234,-123.456,unnamed row 3",'
'"visible_name=name.txt url=http://localhost:8000/media/user_files/test_hash'
'.txt,unnamed row 2",'
'"visible_name=a.txt url=http://localhost:8000/media/user_files/hashed_name.txt'
',visible_name=b.txt url=http://localhost:8000/media/user_files/other_name.txt"'
",A,+4412345678\r\n"
)
assert expected == contents
def wide_test(data_fixture, storage_mock, expected, options):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(database=database, user=user)
link_table = data_fixture.create_database_table(database=database, user=user)
handler = FieldHandler()
row_handler = RowHandler()
all_possible_kwargs_per_type = construct_all_possible_field_kwargs(link_table)
name_to_field_id = {}
i = 0
for field_type_name, all_possible_kwargs in all_possible_kwargs_per_type.items():
for kwargs in all_possible_kwargs:
field = handler.create_field(
user=user,
table=table,
type_name=field_type_name,
order=i,
**kwargs,
)
i += 1
name_to_field_id[kwargs["name"]] = field.id
def run_export_job_over_interesting_table(data_fixture, storage_mock, options):
table, user = setup_interesting_test_table(data_fixture)
grid_view = data_fixture.create_grid_view(table=table)
row_handler = RowHandler()
other_table_primary_text_field = data_fixture.create_text_field(
table=link_table, name="text_field", primary=True
)
def add_linked_row(text):
return row_handler.create_row(
user=user,
table=link_table,
values={
other_table_primary_text_field.id: text,
},
)
model = table.get_model()
# A dictionary of field names to a tuple of (value to create the row model with,
# the expected value of this value after being exported to csv)
assert expected.keys() == name_to_field_id.keys(), (
"Please update the dictionary above with what your new field type should look "
"like when serialized to csv. "
)
row_values = {}
for field_type, val in expected.items():
if isinstance(val, tuple):
val = val[0]
if callable(val):
val = val()
if val is not None:
row_values[f"field_{name_to_field_id[field_type]}"] = val
# Make a blank row to test empty field conversion also.
model.objects.create(**{})
row = model.objects.create(**row_values)
linked_row_1 = add_linked_row("linked_row_1")
linked_row_2 = add_linked_row("linked_row_2")
linked_row_3 = add_linked_row(None)
getattr(row, f"field_{name_to_field_id['link_row']}").add(
linked_row_1.id, linked_row_2.id, linked_row_3.id
)
job, contents = run_export_job_with_mock_storage(
table, grid_view, storage_mock, user, options
)

View file

@ -50,8 +50,27 @@ def test_can_convert_between_all_fields(data_fixture):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(database=database, user=user)
# Link tables
link_table = data_fixture.create_database_table(database=database, user=user)
data_fixture.create_text_field(table=link_table, name="text_field", primary=True)
decimal_link_table = data_fixture.create_database_table(
database=database, user=user
)
data_fixture.create_number_field(
table=decimal_link_table,
name="text_field",
primary=True,
number_type="DECIMAL",
number_decimal_places=3,
number_negative=True,
)
file_link_table = data_fixture.create_database_table(database=database, user=user)
data_fixture.create_file_field(
table=file_link_table,
name="file_field",
primary=True,
)
handler = FieldHandler()
row_handler = RowHandler()
fake = Faker()
@ -66,7 +85,9 @@ def test_can_convert_between_all_fields(data_fixture):
# different conversion behaviour or entirely different database columns being
# created. Here the kwargs which control these modes are enumerated so we can then
# generate every possible type of conversion.
all_possible_kwargs_per_type = construct_all_possible_field_kwargs(link_table)
all_possible_kwargs_per_type = construct_all_possible_field_kwargs(
link_table, decimal_link_table, file_link_table
)
i = 1
for field_type_name, all_possible_kwargs in all_possible_kwargs_per_type.items():

188
backend/tests/test_utils.py Normal file
View file

@ -0,0 +1,188 @@
from decimal import Decimal
from django.utils.dateparse import parse_datetime, parse_date
from django.utils.timezone import make_aware, utc
from baserow.contrib.database.fields.field_helpers import (
construct_all_possible_field_kwargs,
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import SelectOption
from baserow.contrib.database.rows.handler import RowHandler
def _parse_datetime(datetime):
return make_aware(parse_datetime(datetime), timezone=utc)
def _parse_date(date):
return parse_date(date)
def setup_interesting_test_table(data_fixture):
"""
Constructs a testing table with every field type, their sub types and any other
interesting baserow edge cases worth testing when writing a comphensive "does this
feature work with all the baserow fields" test.
:param data_fixture: The baserow testing data_fixture object
:return:
"""
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(database=database, user=user)
link_table = data_fixture.create_database_table(database=database, user=user)
decimal_link_table = data_fixture.create_database_table(
database=database, user=user
)
file_link_table = data_fixture.create_database_table(database=database, user=user)
handler = FieldHandler()
all_possible_kwargs_per_type = construct_all_possible_field_kwargs(
link_table, decimal_link_table, file_link_table
)
name_to_field_id = {}
i = 0
for field_type_name, all_possible_kwargs in all_possible_kwargs_per_type.items():
for kwargs in all_possible_kwargs:
field = handler.create_field(
user=user,
table=table,
type_name=field_type_name,
order=i,
**kwargs,
)
i += 1
name_to_field_id[kwargs["name"]] = field.id
row_handler = RowHandler()
other_table_primary_text_field = data_fixture.create_text_field(
table=link_table, name="text_field", primary=True
)
other_table_primary_decimal_field = data_fixture.create_number_field(
table=decimal_link_table,
name="text_field",
primary=True,
number_type="DECIMAL",
number_decimal_places=3,
number_negative=True,
)
other_table_primary_file_field = data_fixture.create_file_field(
table=file_link_table,
name="file_field",
primary=True,
)
model = table.get_model()
datetime = _parse_datetime("2020-02-01 01:23")
date = _parse_date("2020-02-01")
values = {
"text": "text",
"long_text": "long_text",
"url": "https://www.google.com",
"email": "test@example.com",
"negative_int": -1,
"positive_int": 1,
"negative_decimal": Decimal("-1.2"),
"positive_decimal": Decimal("1.2"),
"boolean": "True",
"datetime_us": datetime,
"date_us": date,
"datetime_eu": datetime,
"date_eu": date,
# We will setup link rows manually later
"link_row": None,
"decimal_link_row": None,
"file_link_row": None,
"file": [
{"name": "hashed_name.txt", "visible_name": "a.txt"},
{"name": "other_name.txt", "visible_name": "b.txt"},
],
"single_select": SelectOption.objects.get(value="A"),
"phone_number": "+4412345678",
}
missing_fields = set(name_to_field_id.keys()) - set(values.keys())
assert values.keys() == name_to_field_id.keys(), (
"Please update the dictionary above with interesting test values for your new "
f"field type. In the values dict you are missing the fields {missing_fields}."
)
row_values = {}
for field_type, val in values.items():
if val is not None:
row_values[f"field_{name_to_field_id[field_type]}"] = val
# Make a blank row to test empty field conversion also.
model.objects.create(**{})
row = model.objects.create(**row_values)
# Setup the link rows
linked_row_1 = row_handler.create_row(
user=user,
table=link_table,
values={
other_table_primary_text_field.id: "linked_row_1",
},
)
linked_row_2 = row_handler.create_row(
user=user,
table=link_table,
values={
other_table_primary_text_field.id: "linked_row_2",
},
)
linked_row_3 = row_handler.create_row(
user=user,
table=link_table,
values={
other_table_primary_text_field.id: None,
},
)
linked_row_4 = row_handler.create_row(
user=user,
table=decimal_link_table,
values={
other_table_primary_decimal_field.id: "1.234",
},
)
linked_row_5 = row_handler.create_row(
user=user,
table=decimal_link_table,
values={
other_table_primary_decimal_field.id: "-123.456",
},
)
linked_row_6 = row_handler.create_row(
user=user,
table=decimal_link_table,
values={
other_table_primary_decimal_field.id: None,
},
)
user_file_1 = data_fixture.create_user_file(
original_name="name.txt", unique="test", sha256_hash="hash"
)
linked_row_7 = row_handler.create_row(
user=user,
table=file_link_table,
values={
other_table_primary_file_field.id: [{"name": user_file_1.name}],
},
)
linked_row_8 = row_handler.create_row(
user=user,
table=file_link_table,
values={
other_table_primary_file_field.id: None,
},
)
getattr(row, f"field_{name_to_field_id['link_row']}").add(
linked_row_1.id, linked_row_2.id, linked_row_3.id
)
getattr(row, f"field_{name_to_field_id['decimal_link_row']}").add(
linked_row_4.id, linked_row_5.id, linked_row_6.id
)
getattr(row, f"field_{name_to_field_id['file_link_row']}").add(
linked_row_7.id, linked_row_8.id
)
return table, user

View file

@ -1,4 +1,3 @@
from decimal import Decimal
from io import BytesIO
from unittest.mock import patch
@ -7,12 +6,8 @@ from django.utils.dateparse import parse_date, parse_datetime
from django.utils.timezone import utc, make_aware
from baserow.contrib.database.export.handler import ExportHandler
from baserow.contrib.database.fields.field_helpers import (
construct_all_possible_field_kwargs,
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import SelectOption
from baserow.contrib.database.rows.handler import RowHandler
from tests.test_utils import setup_interesting_test_table
def _parse_datetime(datetime):
@ -28,29 +23,8 @@ def _parse_date(date):
def test_can_export_every_interesting_different_field_to_json(
storage_mock, data_fixture
):
datetime = _parse_datetime("2020-02-01 01:23")
date = _parse_date("2020-02-01")
expected = {
"text": "text",
"long_text": "long_text",
"url": "http://www.google.com",
"email": "test@example.com",
"negative_int": -1,
"positive_int": 1,
"negative_decimal": Decimal("-1.2"),
"positive_decimal": Decimal("1.2"),
"boolean": True,
"datetime_us": datetime,
"date_us": date,
"datetime_eu": datetime,
"date_eu": date,
"link_row": None,
"file": ([{"name": "hashed_name.txt", "visible_name": "a.txt"}],),
"single_select": lambda: SelectOption.objects.get(value="A"),
"phone_number": "+4412345678",
}
contents = wide_test(
data_fixture, storage_mock, expected, {"exporter_type": "json"}
contents = run_export_over_interesting_test_table(
data_fixture, storage_mock, {"exporter_type": "json"}
)
assert (
contents
@ -71,6 +45,8 @@ def test_can_export_every_interesting_different_field_to_json(
"datetime_eu": "",
"date_eu": "",
"link_row": [],
"decimal_link_row": [],
"file_link_row": [],
"file": [],
"single_select": "",
"phone_number": ""
@ -79,7 +55,7 @@ def test_can_export_every_interesting_different_field_to_json(
"id": 2,
"text": "text",
"long_text": "long_text",
"url": "http://www.google.com",
"url": "https://www.google.com",
"email": "test@example.com",
"negative_int": -1,
"positive_int": 1,
@ -95,10 +71,28 @@ def test_can_export_every_interesting_different_field_to_json(
"linked_row_2",
"unnamed row 3"
],
"decimal_link_row": [
"1.234",
"-123.456",
"unnamed row 3"
],
"file_link_row": [
[
{
"visible_name": "name.txt",
"url": "http://localhost:8000/media/user_files/test_hash.txt"
}
],
"unnamed row 2"
],
"file": [
{
"visible_name": "a.txt",
"url": "http://localhost:8000/media/user_files/hashed_name.txt"
},
{
"visible_name": "b.txt",
"url": "http://localhost:8000/media/user_files/other_name.txt"
}
],
"single_select": "A",
@ -146,28 +140,9 @@ def test_if_duplicate_field_names_json_export(storage_mock, data_fixture):
def test_can_export_every_interesting_different_field_to_xml(
storage_mock, data_fixture
):
datetime = _parse_datetime("2020-02-01 01:23")
date = _parse_date("2020-02-01")
expected = {
"text": "text",
"long_text": "long_text",
"url": "http://www.google.com",
"email": "test@example.com",
"negative_int": -1,
"positive_int": 1,
"negative_decimal": Decimal("-1.2"),
"positive_decimal": Decimal("1.2"),
"boolean": True,
"datetime_us": datetime,
"date_us": date,
"datetime_eu": datetime,
"date_eu": date,
"link_row": None,
"file": ([{"name": "hashed_name.txt", "visible_name": "a.txt"}],),
"single_select": lambda: SelectOption.objects.get(value="A"),
"phone_number": "+4412345678",
}
xml = wide_test(data_fixture, storage_mock, expected, {"exporter_type": "xml"})
xml = run_export_over_interesting_test_table(
data_fixture, storage_mock, {"exporter_type": "xml"}
)
expected_xml = f"""<?xml version="1.0" encoding="utf-8" ?>
<rows>
<row>
@ -186,6 +161,8 @@ def test_can_export_every_interesting_different_field_to_xml(
<datetime-eu/>
<date-eu/>
<link-row/>
<decimal-link-row/>
<file-link-row/>
<file/>
<single-select/>
<phone-number/>
@ -194,7 +171,7 @@ def test_can_export_every_interesting_different_field_to_xml(
<id>2</id>
<text>text</text>
<long-text>long_text</long-text>
<url>http://www.google.com</url>
<url>https://www.google.com</url>
<email>test@example.com</email>
<negative-int>-1</negative-int>
<positive-int>1</positive-int>
@ -210,11 +187,31 @@ def test_can_export_every_interesting_different_field_to_xml(
<item>linked_row_2</item>
<item>unnamed row 3</item>
</link-row>
<decimal-link-row>
<item>1.234</item>
<item>-123.456</item>
<item>unnamed row 3</item>
</decimal-link-row>
<file-link-row>
<item>
<item>
<visible_name>name.txt</visible_name>
<url>http://localhost:8000/media/user_files/test_hash.txt</url>
</item>
</item>
<item>
unnamed row 2
</item>
</file-link-row>
<file>
<item>
<visible_name>a.txt</visible_name>
<url>http://localhost:8000/media/user_files/hashed_name.txt</url>
</item>
<item>
<visible_name>b.txt</visible_name>
<url>http://localhost:8000/media/user_files/other_name.txt</url>
</item>
</file>
<single-select>A</single-select>
<phone-number>+4412345678</phone-number>
@ -269,67 +266,9 @@ def strip_indents_and_newlines(xml):
return "".join([line.strip() for line in xml.split("\n")])
def wide_test(data_fixture, storage_mock, expected, options):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table = data_fixture.create_database_table(database=database, user=user)
link_table = data_fixture.create_database_table(database=database, user=user)
handler = FieldHandler()
row_handler = RowHandler()
all_possible_kwargs_per_type = construct_all_possible_field_kwargs(link_table)
name_to_field_id = {}
i = 0
for field_type_name, all_possible_kwargs in all_possible_kwargs_per_type.items():
for kwargs in all_possible_kwargs:
field = handler.create_field(
user=user,
table=table,
type_name=field_type_name,
order=i,
**kwargs,
)
i += 1
name_to_field_id[kwargs["name"]] = field.id
def run_export_over_interesting_test_table(data_fixture, storage_mock, options):
table, user = setup_interesting_test_table(data_fixture)
grid_view = data_fixture.create_grid_view(table=table)
row_handler = RowHandler()
other_table_primary_text_field = data_fixture.create_text_field(
table=link_table, name="text_field", primary=True
)
def add_linked_row(text):
return row_handler.create_row(
user=user,
table=link_table,
values={
other_table_primary_text_field.id: text,
},
)
model = table.get_model()
# A dictionary of field names to a tuple of (value to create the row model with,
# the expected value of this value after being exported to csv)
assert expected.keys() == name_to_field_id.keys(), (
"Please update the dictionary above with what your new field type should look "
"like when serialized to csv. "
)
row_values = {}
for field_type, val in expected.items():
if isinstance(val, tuple):
val = val[0]
if callable(val):
val = val()
if val is not None:
row_values[f"field_{name_to_field_id[field_type]}"] = val
# Make a blank row to test empty field conversion also.
model.objects.create(**{})
row = model.objects.create(**row_values)
linked_row_1 = add_linked_row("linked_row_1")
linked_row_2 = add_linked_row("linked_row_2")
linked_row_3 = add_linked_row(None)
getattr(row, f"field_{name_to_field_id['link_row']}").add(
linked_row_1.id, linked_row_2.id, linked_row_3.id
)
job, contents = run_export_job_with_mock_storage(
table, grid_view, storage_mock, user, options
)