1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-04-07 06:15:36 +00:00

Resolve "Relationship change of the reversed relationship should be visible in row history"

This commit is contained in:
Davide Silvestri 2025-03-18 20:32:47 +01:00 committed by Bram Wiepjes
parent 10499efe93
commit fd8b9db89c
8 changed files with 709 additions and 56 deletions
backend
src/baserow
contrib/database
test_utils/fixtures
tests/baserow/contrib/database
changelog/entries/unreleased/feature

View file

@ -3456,6 +3456,9 @@ class LinkRowFieldType(
**already_serialized_linked_rows,
**new_serialized_linked_rows,
},
"linked_table_id": field.link_row_table_id,
"linked_field_id": field.link_row_related_field_id,
"primary_value": str(row),
}
def are_row_values_equal(self, value1: any, value2: any) -> bool:
@ -6839,7 +6842,7 @@ class PasswordFieldType(FieldType):
# `False` as string depending on whether the value is set.
return bool(value)
def prepare_row_history_value_from_action_meta_data(self, value):
def prepare_value_for_row_history(self, value):
# We don't want to expose the hash of the password, so we just show `True` or
# `False` as string depending on whether the value is set.
return bool(value)

View file

@ -284,9 +284,9 @@ class FieldType(
return getattr(row, field_name)
def prepare_row_history_value_from_action_meta_data(self, value):
def prepare_value_for_row_history(self, value):
"""
Prepare the row action update action meta data value for the row history.
Prepare the value for the row history.
This can be used to change the value to a different format if needed. It's
for example used by the password field to mask the hash.
"""

View file

@ -1,4 +1,7 @@
from collections import defaultdict
from dataclasses import dataclass
from datetime import datetime
from itertools import groupby
from typing import Any, Dict, List, NamedTuple, NewType, Optional
from django.conf import settings
@ -21,6 +24,19 @@ tracer = trace.get_tracer(__name__)
FieldName = NewType("FieldName", str)
# Dict of table_id -> row_id -> field_name ->
# {added: List[row_id], removed:List[row_id], metadata: Dict}
RelatedRowsDiff = Dict[int, Dict[int, Dict[str, Dict[str, Any]]]]
@dataclass
class ActionData:
uuid: str
type: str
timestamp: datetime
command_type: ActionCommandType
params: Dict[str, Any]
class RowChangeDiff(NamedTuple):
"""
@ -29,6 +45,8 @@ class RowChangeDiff(NamedTuple):
and after values of those fields.
"""
row_id: int
table_id: int
changed_field_names: List[FieldName]
before_values: Dict[FieldName, Any]
after_values: Dict[FieldName, Any]
@ -38,38 +56,34 @@ class RowHistoryHandler:
@classmethod
def _construct_entry_from_action_and_diff(
cls,
user,
table_id,
row_id,
field_names,
row_fields_metadata,
action_type,
action_uuid,
action_timestamp,
action_command_type,
diff,
user: AbstractBaseUser,
action: ActionData,
fields_metadata: Dict[str, Any],
row_diff: RowChangeDiff,
):
return RowHistory(
user_id=user.id,
user_name=user.first_name,
table_id=table_id,
row_id=row_id,
field_names=field_names,
fields_metadata=row_fields_metadata,
action_uuid=action_uuid,
action_command_type=action_command_type.value,
action_timestamp=action_timestamp,
action_type=action_type,
before_values=diff.before_values,
after_values=diff.after_values,
table_id=row_diff.table_id,
row_id=row_diff.row_id,
field_names=row_diff.changed_field_names,
fields_metadata=fields_metadata,
action_uuid=action.uuid,
action_command_type=action.command_type.value,
action_timestamp=action.timestamp,
action_type=action.type,
before_values=row_diff.before_values,
after_values=row_diff.after_values,
)
@classmethod
def _extract_row_diff(
cls,
table_id: int,
row_id: int,
fields_metadata: Dict[str, Any],
before_values: Dict[str, Any],
after_values: Dict[str, Any],
fields_metadata,
) -> Optional[RowChangeDiff]:
"""
Extracts the fields that have changed between the before and after values of a
@ -94,18 +108,20 @@ class RowHistoryHandler:
before_fields = {
k: field_type_registry.get(
fields_metadata[k]["type"]
).prepare_row_history_value_from_action_meta_data(v)
).prepare_value_for_row_history(v)
for k, v in before_values.items()
if k in changed_fields
}
after_fields = {
k: field_type_registry.get(
fields_metadata[k]["type"]
).prepare_row_history_value_from_action_meta_data(v)
).prepare_value_for_row_history(v)
for k, v in after_values.items()
if k in changed_fields
}
return RowChangeDiff(list(changed_fields), before_fields, after_fields)
return RowChangeDiff(
row_id, table_id, list(changed_fields), before_fields, after_fields
)
@classmethod
def _raise_if_ids_mismatch(cls, before_values, after_values, fields_metadata):
@ -120,62 +136,205 @@ class RowHistoryHandler:
"are the same. This should never happen."
)
@classmethod
def _update_related_tables_entries(
cls,
related_rows_diff: RelatedRowsDiff,
fields_metadata: Dict[str, Any],
row_diff: RowChangeDiff,
) -> RelatedRowsDiff:
"""
Updates the record of changes in related tables when link_row fields are
modified.
When a row's link_row field is updated (adding or removing connections to rows
in another table), this method tracks those changes from the perspective of the
rows in the related table, so that history can be properly displayed for both
sides of the relationship.
The method updates related_rows_diff in-place, maintaining a record of which
rows were added or removed from each link relationship.
:param related_rows_diff: Nested dictionary tracking changes for each affected
related row
:param fields_metadata: Metadata about the fields that were changed in
this update
:param row_diff: The changes made to the current row, including before/after
values
:return: The updated related_rows_diff dictionary
"""
def _init_linked_row_diff(linked_field_id):
return {
"added": [],
"removed": [],
"metadata": {
"id": linked_field_id,
"type": "link_row",
"linked_rows": {},
},
}
def _update_linked_row_diff(
field_metadata: Dict[str, Any], row_ids_set: set[int], key: str
):
linked_table_id = field_metadata["linked_table_id"]
linked_field_id = field_metadata["linked_field_id"]
linked_field_name = f"field_{linked_field_id}"
for linked_row_id in row_ids_set:
linked_diff = related_rows_diff[linked_table_id][linked_row_id][
linked_field_name
]
if not linked_diff:
linked_diff = _init_linked_row_diff(linked_field_id)
related_rows_diff[linked_table_id][linked_row_id][
linked_field_name
] = linked_diff
linked_diff[key].append(row_id)
linked_diff["metadata"]["linked_rows"][row_id] = {
"value": field_metadata["primary_value"]
}
row_id = row_diff.row_id
for field_name in row_diff.changed_field_names:
field_metadata = fields_metadata[field_name]
# Ignore fields that are not link_row fields or that doesn't have a related
# field in the linked table.
if (
field_metadata["type"] != "link_row"
or not field_metadata["linked_field_id"]
):
continue
after_set = set(row_diff.after_values[field_name])
before_set = set(row_diff.before_values[field_name])
row_ids_added = after_set - before_set
_update_linked_row_diff(field_metadata, row_ids_added, "added")
row_ids_removed = before_set - after_set
_update_linked_row_diff(field_metadata, row_ids_removed, "removed")
return related_rows_diff
@classmethod
def _construct_related_rows_entries(
cls,
related_rows_diff: RelatedRowsDiff,
user: AbstractBaseUser,
action: ActionData,
) -> List[RowHistory]:
"""
Creates RowHistory entries for rows in related tables that were affected by
changes to the current row. Specifically, when a link_row field is updated,
this method ensures that the changes are also tracked from the perspective of
the related rows.
:param related_rows_diff: A nested dictionary that tracks changes for each
affected related row. It includes details about rows added or removed
from link_row relationships.
:param user: The user who performed the action that triggered the changes.
:param action: The action metadata that describes the operation performed.
:return: A list of RowHistory entries representing the changes for the
related rows.
"""
entries = []
for linked_table_id, table_changes in related_rows_diff.items():
for linked_row_id, row_changes in table_changes.items():
field_names = list(row_changes.keys())
fields_metadata, before_values, after_values = {}, {}, {}
for field_name in field_names:
row_field_changes = row_changes[field_name]
fields_metadata[field_name] = row_field_changes["metadata"]
before_values[field_name] = row_field_changes["removed"]
after_values[field_name] = row_field_changes["added"]
linked_entry = RowHistory(
user_id=user.id,
user_name=user.first_name,
table_id=linked_table_id,
row_id=linked_row_id,
field_names=field_names,
fields_metadata=fields_metadata,
action_uuid=action.uuid,
action_command_type=action.command_type.value,
action_timestamp=action.timestamp,
action_type=action.type,
before_values=before_values,
after_values=after_values,
)
entries.append(linked_entry)
return entries
@classmethod
@baserow_trace(tracer)
def record_history_from_update_rows_action(
cls,
user: AbstractBaseUser,
action_uuid: str,
action_params: Dict[str, Any],
action_timestamp: datetime,
action_command_type: ActionCommandType,
action: ActionData,
):
params = UpdateRowsActionType.serialized_to_params(action_params)
params = UpdateRowsActionType.serialized_to_params(action.params)
table_id = params.table_id
after_values = params.row_values
before_values = [
params.original_rows_values_by_id[r["id"]] for r in after_values
]
if action_command_type == ActionCommandType.UNDO:
if action.command_type == ActionCommandType.UNDO:
before_values, after_values = after_values, before_values
row_history_entries = []
related_rows_diff: RelatedRowsDiff = defaultdict(
lambda: defaultdict(lambda: defaultdict(dict))
)
for i, after in enumerate(after_values):
before = before_values[i]
fields_metadata = params.updated_fields_metadata_by_row_id[after["id"]]
cls._raise_if_ids_mismatch(before, after, fields_metadata)
diff = cls._extract_row_diff(before, after, fields_metadata)
if diff is None:
row_id = after["id"]
row_diff = cls._extract_row_diff(
table_id, row_id, fields_metadata, before, after
)
if row_diff is None:
continue
changed_fields_metadata = {
k: v
for k, v in fields_metadata.items()
if k in diff.changed_field_names
if k in row_diff.changed_field_names
}
row_id = after["id"]
entry = cls._construct_entry_from_action_and_diff(
user,
params.table_id,
row_id,
diff.changed_field_names,
action,
changed_fields_metadata,
UpdateRowsActionType.type,
action_uuid,
action_timestamp,
action_command_type,
diff,
row_diff,
)
row_history_entries.append(entry)
cls._update_related_tables_entries(
related_rows_diff, changed_fields_metadata, row_diff
)
related_entries = cls._construct_related_rows_entries(
related_rows_diff, user, action
)
row_history_entries.extend(related_entries)
if row_history_entries:
row_history_entries = RowHistory.objects.bulk_create(row_history_entries)
rows_history_updated.send(
RowHistoryHandler,
table_id=params.table_id,
row_history_entries=row_history_entries,
)
for table_id, per_table_row_history_entries in groupby(
row_history_entries, lambda e: e.table_id
):
rows_history_updated.send(
RowHistoryHandler,
table_id=table_id,
row_history_entries=list(per_table_row_history_entries),
)
@classmethod
@baserow_trace(tracer)
@ -233,5 +392,12 @@ def on_action_done_update_row_history(
if action_type and action_type.type in ROW_HISTORY_ACTIONS:
add_entry_handler = ROW_HISTORY_ACTIONS[action_type.type]
add_entry_handler(
user, action_uuid, action_params, action_timestamp, action_command_type
user,
ActionData(
action_uuid,
action_type.type,
action_timestamp,
action_command_type,
action_params,
),
)

View file

@ -101,12 +101,15 @@ class TableFixtures:
if not table_b.field_set.filter(primary=True).exists():
self.create_text_field(table=table_b, name="primary", primary=True)
has_related_field = kwargs.pop("has_related_field", True)
link_field = FieldHandler().create_field(
user=user,
table=table_a,
type_name="link_row",
name="link",
link_row_table=table_b,
has_related_field=has_related_field,
)
return table_a, table_b, link_field

View file

@ -3960,6 +3960,9 @@ def test_list_row_history_for_different_fields(data_fixture, api_client):
"value": f"unnamed row {table2_row2.id}"
},
},
"linked_table_id": linkrow_field.link_row_table_id,
"linked_field_id": linkrow_field.link_row_related_field_id,
"primary_value": "unnamed row 1",
},
},
},

View file

@ -2067,6 +2067,9 @@ def test_link_row_serialize_metadata_for_row_history(
table2_row3.id: {"value": f"unnamed row {table2_row3.id}"},
},
"type": "link_row",
"linked_table_id": field.link_row_table_id,
"linked_field_id": field.link_row_related_field_id,
"primary_value": "unnamed row 1",
}
# empty values
@ -2085,6 +2088,9 @@ def test_link_row_serialize_metadata_for_row_history(
"id": AnyInt(),
"linked_rows": {},
"type": "link_row",
"linked_table_id": field.link_row_table_id,
"linked_field_id": field.link_row_related_field_id,
"primary_value": "unnamed row 2",
}

View file

@ -1,4 +1,5 @@
from datetime import datetime, timezone
from unittest.mock import patch
import pytest
from freezegun import freeze_time
@ -28,8 +29,8 @@ def test_update_rows_insert_multiple_entries_in_row_history(data_fixture):
row_handler = RowHandler()
row_one = row_handler.create_row(user, table, {name_field.id: "Original 1"})
row_two = row_handler.create_row(user, table, {name_field.id: "Original 2"})
row_one = row_handler.force_create_row(user, table, {name_field.id: "Original 1"})
row_two = row_handler.force_create_row(user, table, {name_field.id: "Original 2"})
with freeze_time("2021-01-01 12:00"):
action_type_registry.get_by_type(UpdateRowsActionType).do(
@ -107,7 +108,7 @@ def test_history_handler_only_save_changed_fields(data_fixture):
row_handler = RowHandler()
row = row_handler.create_row(user, table, {name_field.id: "Original 1"})
row = row_handler.force_create_row(user, table, {name_field.id: "Original 1"})
with freeze_time("2021-01-01 12:00"):
action_type_registry.get_by_type(UpdateRowsActionType).do(
@ -172,8 +173,8 @@ def test_update_rows_action_doesnt_insert_entries_if_row_doesnt_change(data_fixt
row_handler = RowHandler()
row_one = row_handler.create_row(user, table, {name_field.id: "Original 1"})
row_two = row_handler.create_row(user, table, {name_field.id: "Original 2"})
row_one = row_handler.force_create_row(user, table, {name_field.id: "Original 1"})
row_two = row_handler.force_create_row(user, table, {name_field.id: "Original 2"})
with freeze_time("2021-01-01 12:00"):
action_type_registry.get_by_type(UpdateRowsActionType).do(
@ -362,3 +363,466 @@ def test_row_history_not_recorded_with_retention_zero_days(settings, data_fixtur
)
assert RowHistory.objects.count() == 1
@pytest.mark.django_db
@pytest.mark.row_history
def test_update_rows_insert_entries_in_linked_rows_history(data_fixture):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables(
user=user, database=database
)
primary_a = table_a.get_primary_field()
primary_b = table_b.get_primary_field()
link_b_to_a = link_a_to_b.link_row_related_field
row_handler = RowHandler()
row_b1, row_b2 = row_handler.force_create_rows(
user, table_b, [{primary_b.db_column: "b1"}, {primary_b.db_column: "b2"}]
)
row_a1 = row_handler.force_create_row(user, table_a, {primary_a.id: "a1"})
with freeze_time("2021-01-01 12:00"):
action_type_registry.get_by_type(UpdateRowsActionType).do(
user,
table_a,
[
{"id": row_a1.id, link_a_to_b.db_column: [row_b1.id, row_b2.id]},
],
)
assert RowHistory.objects.count() == 3
history_entries = RowHistory.objects.order_by("table_id", "row_id").values(
"user_id",
"user_name",
"table_id",
"row_id",
"action_timestamp",
"action_type",
"before_values",
"after_values",
"fields_metadata",
)
expected_entries = [
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_a.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_a_to_b.db_column: []},
"after_values": {link_a_to_b.db_column: [1, 2]},
"fields_metadata": {
link_a_to_b.db_column: {
"id": link_a_to_b.id,
"type": "link_row",
"linked_rows": {"1": {"value": "b1"}, "2": {"value": "b2"}},
"primary_value": "a1",
"linked_field_id": link_b_to_a.id,
"linked_table_id": table_b.id,
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_b.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_b_to_a.db_column: []},
"after_values": {link_b_to_a.db_column: [1]},
"fields_metadata": {
link_b_to_a.db_column: {
"id": link_b_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}},
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_b.id,
"row_id": 2,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_b_to_a.db_column: []},
"after_values": {link_b_to_a.db_column: [1]},
"fields_metadata": {
link_b_to_a.db_column: {
"id": link_b_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}},
}
},
},
]
assert list(history_entries) == expected_entries
# Now remove one link
with freeze_time("2021-01-01 12:30"):
action_type_registry.get_by_type(UpdateRowsActionType).do(
user,
table_a,
[
{"id": row_a1.id, link_a_to_b.db_column: [row_b2.id]},
],
)
history_entries = RowHistory.objects.order_by(
"-action_timestamp", "table_id", "row_id"
).values(
"user_id",
"user_name",
"table_id",
"row_id",
"action_timestamp",
"action_type",
"before_values",
"after_values",
"fields_metadata",
)
assert RowHistory.objects.count() == 5
last_entries = list(history_entries)[:2]
expected_entries = [
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_a.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 30, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_a_to_b.db_column: [1, 2]},
"after_values": {link_a_to_b.db_column: [2]},
"fields_metadata": {
link_a_to_b.db_column: {
"id": link_a_to_b.id,
"type": "link_row",
"linked_rows": {"1": {"value": "b1"}, "2": {"value": "b2"}},
"primary_value": "a1",
"linked_field_id": link_b_to_a.id,
"linked_table_id": table_b.id,
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_b.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 30, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_b_to_a.db_column: [1]},
"after_values": {link_b_to_a.db_column: []},
"fields_metadata": {
link_b_to_a.db_column: {
"id": link_b_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}},
}
},
},
]
assert last_entries == expected_entries
@pytest.mark.django_db
@pytest.mark.row_history
def test_update_rows_dont_insert_entries_in_linked_rows_history_without_related_field(
data_fixture,
):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables(
user=user, database=database, has_related_field=False
)
primary_a = table_a.get_primary_field()
primary_b = table_b.get_primary_field()
row_handler = RowHandler()
row_b1, row_b2 = row_handler.force_create_rows(
user, table_b, [{primary_b.db_column: "b1"}, {primary_b.db_column: "b2"}]
)
row_a1 = row_handler.force_create_row(user, table_a, {primary_a.id: "a1"})
with freeze_time("2021-01-01 12:00"):
action_type_registry.get_by_type(UpdateRowsActionType).do(
user,
table_a,
[
{"id": row_a1.id, link_a_to_b.db_column: [row_b1.id, row_b2.id]},
],
)
assert RowHistory.objects.count() == 1
history_entries = RowHistory.objects.values(
"user_id",
"user_name",
"table_id",
"row_id",
"action_timestamp",
"action_type",
"before_values",
"after_values",
"fields_metadata",
)
expected_entries = [
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_a.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_a_to_b.db_column: []},
"after_values": {link_a_to_b.db_column: [1, 2]},
"fields_metadata": {
link_a_to_b.db_column: {
"id": link_a_to_b.id,
"type": "link_row",
"linked_rows": {"1": {"value": "b1"}, "2": {"value": "b2"}},
"primary_value": "a1",
"linked_field_id": None,
"linked_table_id": table_b.id,
}
},
}
]
assert list(history_entries) == expected_entries
@pytest.mark.django_db
@pytest.mark.row_history
def test_update_rows_insert_entries_in_linked_rows_history_in_multiple_tables(
data_fixture,
):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables(
user=user, database=database
)
table_c, _, link_c_to_a = data_fixture.create_two_linked_tables(
user=user, database=database, table_b=table_a
)
primary_a = table_a.get_primary_field()
primary_b = table_b.get_primary_field()
primary_c = table_c.get_primary_field()
link_b_to_a = link_a_to_b.link_row_related_field
link_a_to_c = link_c_to_a.link_row_related_field
row_handler = RowHandler()
row_b1, row_b2 = row_handler.force_create_rows(
user, table_b, [{primary_b.db_column: "b1"}, {primary_b.db_column: "b2"}]
)
row_c1, row_c2 = row_handler.force_create_rows(
user, table_c, [{primary_c.db_column: "c1"}, {primary_c.db_column: "c2"}]
)
row_a1, row_a2 = row_handler.force_create_rows(
user, table_a, [{primary_a.db_column: "a1"}, {primary_a.db_column: "a2"}]
)
with freeze_time("2021-01-01 12:00"), patch(
"baserow.contrib.database.rows.signals.rows_history_updated.send"
) as mock_signal:
action_type_registry.get_by_type(UpdateRowsActionType).do(
user,
table_a,
[
{
"id": row_a1.id,
link_a_to_b.db_column: [row_b1.id, row_b2.id],
link_a_to_c.db_column: [row_c1.id, row_c2.id],
},
{
"id": row_a2.id,
link_a_to_b.db_column: [row_b1.id, row_b2.id],
link_a_to_c.db_column: [row_c1.id, row_c2.id],
},
],
)
assert RowHistory.objects.count() == 6
history_entries = RowHistory.objects.order_by("table_id", "row_id").values(
"user_id",
"user_name",
"table_id",
"row_id",
"action_timestamp",
"action_type",
"before_values",
"after_values",
"fields_metadata",
)
# Signal should be called once per table with row history entries for that table
entry_ids = [rhe.id for rhe in RowHistory.objects.order_by("table_id", "row_id")]
assert mock_signal.call_count == 3
per_table_args = {}
for args in mock_signal.call_args_list:
per_table_args[args[1]["table_id"]] = [
rhe.id for rhe in args[1]["row_history_entries"]
]
assert len(per_table_args) == 3
assert len(entry_ids) == 6
# table_a
assert table_a.id in per_table_args
assert per_table_args[table_a.id] == entry_ids[:2]
# table_b
assert table_b.id in per_table_args
assert per_table_args[table_b.id] == entry_ids[2:4]
# table_c
assert table_c.id in per_table_args
assert per_table_args[table_c.id] == entry_ids[4:]
expected_entries = [
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_a.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_a_to_b.db_column: [], link_a_to_c.db_column: []},
"after_values": {
link_a_to_b.db_column: [1, 2],
link_a_to_c.db_column: [1, 2],
},
"fields_metadata": {
link_a_to_b.db_column: {
"id": link_a_to_b.id,
"type": "link_row",
"linked_rows": {"1": {"value": "b1"}, "2": {"value": "b2"}},
"primary_value": "a1",
"linked_field_id": link_b_to_a.id,
"linked_table_id": table_b.id,
},
link_a_to_c.db_column: {
"id": link_a_to_c.id,
"type": "link_row",
"linked_rows": {"1": {"value": "c1"}, "2": {"value": "c2"}},
"primary_value": "a1",
"linked_field_id": link_c_to_a.id,
"linked_table_id": table_c.id,
},
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_a.id,
"row_id": 2,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_a_to_b.db_column: [], link_a_to_c.db_column: []},
"after_values": {
link_a_to_b.db_column: [1, 2],
link_a_to_c.db_column: [1, 2],
},
"fields_metadata": {
link_a_to_b.db_column: {
"id": link_a_to_b.id,
"type": "link_row",
"linked_rows": {"1": {"value": "b1"}, "2": {"value": "b2"}},
"primary_value": "a2",
"linked_field_id": link_b_to_a.id,
"linked_table_id": table_b.id,
},
link_a_to_c.db_column: {
"id": link_a_to_c.id,
"type": "link_row",
"linked_rows": {"1": {"value": "c1"}, "2": {"value": "c2"}},
"primary_value": "a2",
"linked_field_id": link_c_to_a.id,
"linked_table_id": table_c.id,
},
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_b.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_b_to_a.db_column: []},
"after_values": {link_b_to_a.db_column: [1, 2]},
"fields_metadata": {
link_b_to_a.db_column: {
"id": link_b_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}, "2": {"value": "a2"}},
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_b.id,
"row_id": 2,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_b_to_a.db_column: []},
"after_values": {link_b_to_a.db_column: [1, 2]},
"fields_metadata": {
link_b_to_a.db_column: {
"id": link_b_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}, "2": {"value": "a2"}},
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_c.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_c_to_a.db_column: []},
"after_values": {link_c_to_a.db_column: [1, 2]},
"fields_metadata": {
link_c_to_a.db_column: {
"id": link_c_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}, "2": {"value": "a2"}},
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_c.id,
"row_id": 2,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_c_to_a.db_column: []},
"after_values": {link_c_to_a.db_column: [1, 2]},
"fields_metadata": {
link_c_to_a.db_column: {
"id": link_c_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}, "2": {"value": "a2"}},
}
},
},
]
assert list(history_entries) == expected_entries

View file

@ -0,0 +1,8 @@
{
"type": "feature",
"message": "Show relationship changes (added or removed) in the related row's change history.",
"domain": "database",
"issue_number": 3264,
"bullet_points": [],
"created_at": "2025-03-18"
}