1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-03-29 10:25:44 +00:00

Merge branch 'develop' into 'master'

Row history entries creation bugfix

See merge request 
This commit is contained in:
Davide Silvestri 2025-03-19 12:09:48 +01:00
commit c0f67712d6
15 changed files with 284 additions and 40 deletions
backend
changelog.md
changelog/entries/1.32.0/bug
web-frontend/modules
builder
components/elements/components
themeConfigBlockTypes.js
core
assets/scss/components/builder/elements
utils

View file

@ -226,6 +226,10 @@ class UserMentionInRichTextFieldNotificationType(
updated_field_ids: Optional[List[int]] = None,
) -> Iterable[Tuple["Field", "GeneratedTableModel", List[AbstractUser]]]:
row_id_map = {row.id: row for row in rows}
if not rows:
return
model = rows[0]._meta.model
table = model.baserow_table
workspace_user_ids = set(

View file

@ -838,7 +838,7 @@ class UpdateRowsActionType(UndoableActionType):
table.database.id,
table.database.name,
[row.id for row in updated_rows],
rows_values,
result.updated_rows_values,
result.original_rows_values_by_id,
result.updated_fields_metadata_by_row_id,
)

View file

@ -340,11 +340,11 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
values = {}
for field_id in updated_field_ids:
field = row._field_objects[field_id]
field_type = field["type"]
field_object = row.get_field_object_by_id(field_id, include_trash=True)
field_type = field_object["type"]
if field_type.read_only:
continue
field_name = f"field_{field_id}"
field_name = field_object["name"]
field_value = field_type.get_internal_value_from_db(row, field_name)
values[field_name] = field_value
return values
@ -1207,21 +1207,24 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
)
rows_values_refreshed_from_db = True
rows_created.send(
self,
rows=rows_to_return,
before=before_row,
user=user,
table=table,
model=model,
rows_values_refreshed_from_db=rows_values_refreshed_from_db,
send_realtime_update=send_realtime_update,
send_webhook_events=send_webhook_events,
prepared_rows_values=prepared_rows_values,
m2m_change_tracker=m2m_change_tracker,
fields=updated_fields,
dependant_fields=dependant_fields,
)
# rows_to_return might be empty if all the values were invalid, so don't
# send the signal and run callbacks on an empty list.
if rows_to_return:
rows_created.send(
self,
rows=rows_to_return,
before=before_row,
user=user,
table=table,
model=model,
rows_values_refreshed_from_db=rows_values_refreshed_from_db,
send_realtime_update=send_realtime_update,
send_webhook_events=send_webhook_events,
prepared_rows_values=prepared_rows_values,
m2m_change_tracker=m2m_change_tracker,
fields=updated_fields,
dependant_fields=dependant_fields,
)
return CreatedRowsData(rows_to_return, report)
@ -2029,8 +2032,16 @@ class RowHandler(metaclass=baserow_trace_methods(tracer)):
fields_metadata_by_row_id = self.get_fields_metadata_for_rows(
updated_rows_to_return, updated_fields, fields_metadata_by_row_id
)
updated_rows_values = [
{
"id": updated_row.id,
**self.get_internal_values_for_fields(updated_row, updated_field_ids),
}
for updated_row in updated_rows_to_return
]
updated_rows = UpdatedRowsData(
updated_rows_to_return,
updated_rows_values,
original_row_values_by_id,
fields_metadata_by_row_id,
report,

View file

@ -29,6 +29,7 @@ RowId = NewType("RowId", int)
class UpdatedRowsData(NamedTuple):
updated_rows: list[GeneratedTableModelForUpdate]
updated_rows_values: list[RowValues]
original_rows_values_by_id: dict[RowId, RowValues]
updated_fields_metadata_by_row_id: dict[RowId, FieldsMetadata]
errors: dict[int, dict[str, Any]] | None = None

View file

@ -78,6 +78,12 @@ tracer = trace.get_tracer(__name__)
User = get_user_model()
class FieldObject(TypedDict):
type: FieldType
field: Field
name: str
def get_row_needs_background_update_index(table):
return models.Index(
fields=[ROW_NEEDS_BACKGROUND_UPDATE_COLUMN_NAME],
@ -501,12 +507,6 @@ class TableModelManager(TableModelTrashAndObjectsManager):
return super().get_queryset().filter(trashed=False)
class FieldObject(TypedDict):
type: FieldType
field: Field
name: str
class GeneratedTableModel(HierarchicalModelMixin, models.Model):
"""
Mixed into Model classes which have been generated by Baserow.
@ -602,7 +602,9 @@ class GeneratedTableModel(HierarchicalModelMixin, models.Model):
]
@classmethod
def get_field_object(cls, field_name: str, include_trash: bool = False):
def get_field_object(
cls, field_name: str, include_trash: bool = False
) -> FieldObject:
field_objects = cls.get_field_objects(include_trash)
try:
@ -611,7 +613,9 @@ class GeneratedTableModel(HierarchicalModelMixin, models.Model):
raise ValueError(f"Field {field_name} not found.")
@classmethod
def get_field_object_by_id(cls, field_id: int, include_trash: bool = False):
def get_field_object_by_id(
cls, field_id: int, include_trash: bool = False
) -> FieldObject:
field_objects = cls.get_field_objects(include_trash)
try:
@ -622,7 +626,7 @@ class GeneratedTableModel(HierarchicalModelMixin, models.Model):
@classmethod
def get_field_object_by_user_field_name(
cls, field_name: str, include_trash: bool = False
):
) -> FieldObject:
field_objects = cls.get_field_objects(include_trash)
try:
@ -631,7 +635,7 @@ class GeneratedTableModel(HierarchicalModelMixin, models.Model):
raise ValueError(f"Field {field_name} not found.")
@classmethod
def get_field_objects(cls, include_trash: bool = False):
def get_field_objects(cls, include_trash: bool = False) -> List[FieldObject]:
field_objects = cls._field_objects.values()
if include_trash:
field_objects = itertools.chain(
@ -640,7 +644,7 @@ class GeneratedTableModel(HierarchicalModelMixin, models.Model):
return field_objects
@classmethod
def get_field_objects_to_always_update(cls):
def get_field_objects_to_always_update(cls) -> List[FieldObject]:
field_objects = cls.get_field_objects(True)
return [
field_object
@ -693,11 +697,11 @@ class GeneratedTableModel(HierarchicalModelMixin, models.Model):
yield field
@classmethod
def get_fields(cls, include_trash=False):
def get_fields(cls, include_trash=False) -> List[Field]:
return [o["field"] for o in cls.get_field_objects(include_trash)]
@classmethod
def get_primary_field(self):
def get_primary_field(self) -> Field:
field_objects = self.get_field_objects()
try:

View file

@ -3802,7 +3802,7 @@ def test_list_row_history_for_different_fields(data_fixture, api_client):
HTTP_AUTHORIZATION=f"JWT {jwt_token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == {
expected_output = {
"count": 1,
"next": None,
"previous": None,
@ -3854,10 +3854,28 @@ def test_list_row_history_for_different_fields(data_fixture, api_client):
f"field_{boolean_field.id}": True,
f"field_{phone_field.id}": "123456790",
f"field_{date_field.id}": "2023-06-07",
f"field_{datetime_field.id}": "2023-06-06T13:00",
f"field_{datetime_field.id}": "2023-06-06 13:00:00+00:00",
f"field_{file_field.id}": [
{"name": file1.name, "visible_name": "file 1"},
{"name": file2.name, "visible_name": "file 2"},
{
"image_height": None,
"image_width": None,
"is_image": True,
"mime_type": "text/plain",
"name": AnyStr(),
"size": 100,
"uploaded_at": AnyStr(),
"visible_name": "file 1",
},
{
"image_height": None,
"image_width": None,
"is_image": True,
"mime_type": "text/plain",
"name": AnyStr(),
"size": 100,
"uploaded_at": AnyStr(),
"visible_name": "file 2",
},
],
f"field_{single_select_field.id}": option_b.id,
f"field_{multiple_select_field.id}": [
@ -3984,6 +4002,7 @@ def test_list_row_history_for_different_fields(data_fixture, api_client):
},
],
}
assert response.json() == expected_output
@pytest.mark.django_db

View file

@ -23,11 +23,14 @@ from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.models import SelectOption, SingleSelectField
from baserow.contrib.database.fields.registries import field_type_registry
from baserow.contrib.database.fields.utils import DeferredForeignKeyUpdater
from baserow.contrib.database.rows.actions import UpdateRowsActionType
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.rows.history import RowHistoryHandler
from baserow.contrib.database.table.models import GeneratedTableModel, Table
from baserow.contrib.database.views.handler import ViewHandler
from baserow.contrib.database.views.models import GridView
from baserow.contrib.database.views.registries import view_filter_type_registry
from baserow.core.action.registries import action_type_registry
from baserow.core.handler import CoreHandler
from baserow.core.registries import ImportExportConfig
from baserow.test_utils.helpers import AnyInt
@ -1844,3 +1847,49 @@ def test_single_select_is_none_of_filter_type(field_name, data_fixture):
ids = [r.id for r in handler.apply_filters(grid_view, model.objects.all()).all()]
# only the empty row is selected
assert ids == [rows[4].id]
@pytest.mark.django_db
@pytest.mark.field_single_select
@pytest.mark.row_history
def test_single_select_serialize_metadata_for_row_history_using_option_values(
data_fixture, django_assert_num_queries
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field_handler = FieldHandler()
field = field_handler.create_field(
user=user,
table=table,
type_name="single_select",
name="Single select",
select_options=[
{"value": "Option 1", "color": "blue"},
{"value": "Option 2", "color": "red"},
],
)
model = table.get_model()
row_handler = RowHandler()
select_options = field.select_options.order_by("id").all()
option_1_id = select_options[0].id
option_2_id = select_options[1].id
row = row_handler.create_row(
user=user,
table=table,
model=model,
values={f"field_{field.id}": "Option 1"},
)
action_type_registry.get_by_type(UpdateRowsActionType).do(
user,
table,
[{"id": row.id, f"field_{field.id}": "Option 2"}],
model=model,
)
entries = RowHistoryHandler.list_row_history(
table.database.workspace, table.id, row.id
)
assert len(entries) == 1
assert entries[0].before_values == {field.db_column: option_1_id}
assert entries[0].after_values == {field.db_column: option_2_id}

View file

@ -826,3 +826,104 @@ def test_update_rows_insert_entries_in_linked_rows_history_in_multiple_tables(
]
assert list(history_entries) == expected_entries
@pytest.mark.django_db
@pytest.mark.row_history
def test_update_rows_insert_entries_in_linked_rows_history_with_values(data_fixture):
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
table_a, table_b, link_a_to_b = data_fixture.create_two_linked_tables(
user=user, database=database
)
primary_a = table_a.get_primary_field()
primary_b = table_b.get_primary_field()
link_b_to_a = link_a_to_b.link_row_related_field
row_handler = RowHandler()
row_b1, row_b2 = row_handler.force_create_rows(
user, table_b, [{primary_b.db_column: "b1"}, {primary_b.db_column: "b2"}]
).created_rows
row_a1 = row_handler.force_create_row(user, table_a, {primary_a.id: "a1"})
with freeze_time("2021-01-01 12:00"):
action_type_registry.get_by_type(UpdateRowsActionType).do(
user,
table_a,
[
{"id": row_a1.id, link_a_to_b.db_column: ["b1", "b2"]},
],
)
assert RowHistory.objects.count() == 3
history_entries = RowHistory.objects.order_by("table_id", "row_id").values(
"user_id",
"user_name",
"table_id",
"row_id",
"action_timestamp",
"action_type",
"before_values",
"after_values",
"fields_metadata",
)
expected_entries = [
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_a.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_a_to_b.db_column: []},
"after_values": {link_a_to_b.db_column: [1, 2]},
"fields_metadata": {
link_a_to_b.db_column: {
"id": link_a_to_b.id,
"type": "link_row",
"linked_rows": {"1": {"value": "b1"}, "2": {"value": "b2"}},
"primary_value": "a1",
"linked_field_id": link_b_to_a.id,
"linked_table_id": table_b.id,
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_b.id,
"row_id": 1,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_b_to_a.db_column: []},
"after_values": {link_b_to_a.db_column: [1]},
"fields_metadata": {
link_b_to_a.db_column: {
"id": link_b_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}},
}
},
},
{
"user_id": user.id,
"user_name": user.first_name,
"table_id": table_b.id,
"row_id": 2,
"action_timestamp": datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc),
"action_type": "update_rows",
"before_values": {link_b_to_a.db_column: []},
"after_values": {link_b_to_a.db_column: [1]},
"fields_metadata": {
link_b_to_a.db_column: {
"id": link_b_to_a.id,
"type": "link_row",
"linked_rows": {"1": {"value": "a1"}},
}
},
},
]
assert list(history_entries) == expected_entries

View file

@ -1797,3 +1797,13 @@ def test_can_move_rows_and_formulas_are_updated_correctly(data_fixture):
row_a2.refresh_from_db()
assert getattr(row_a1, lookup_a.db_column) == "b1"
assert getattr(row_a2, lookup_a.db_column) == "b2"
@pytest.mark.django_db
def test_rows_created_is_not_sent_if_there_are_no_rows_to_create(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table()
with patch("baserow.contrib.database.rows.signals.rows_created.send") as mock:
RowHandler().force_create_rows(user, table, [])
assert mock.call_count == 0

View file

@ -58,6 +58,7 @@
* [Builder] Fix crash when creating a builder application after deleting another one
* Fix error when syncing a table with a trashed field.
* [Database] Fix the progress bar percentage when importing applications into the workspace.
* [Database] Fix select option values in row history after API changes.
### Refactors
* Upgrade vuelidate [#2805](https://gitlab.com/baserow/baserow/-/issues/2805)

View file

@ -0,0 +1,8 @@
{
"type": "bug",
"message": "Fix select option values in row history after API changes",
"domain": "database",
"issue_number": 2509,
"bullet_points": [],
"created_at": "2025-03-18"
}

View file

@ -67,11 +67,10 @@
@shown="toggleExpanded(item.id)"
@hidden="toggleExpanded(item.id)"
>
<ThemeProvider>
<ThemeProvider class="menu-element__sub-links">
<div
v-for="child in item.children"
:key="child.id"
class="menu-element__sub-links"
:style="getStyleOverride('menu')"
>
<ABLink
@ -205,7 +204,7 @@ export default {
contextRef.hide()
} else {
const containerRef = event.currentTarget
contextRef.show(containerRef, 'bottom', 'left', 0)
contextRef.show(containerRef, 'bottom', 'left', 10)
}
},
getItemUrl(item) {

View file

@ -11,6 +11,7 @@ import { FONT_WEIGHTS } from '@baserow/modules/builder/fontWeights'
import {
resolveColor,
colorRecommendation,
colorContrast,
} from '@baserow/modules/core/utils/colors'
import {
WIDTHS_NEW,
@ -50,6 +51,12 @@ export class ThemeStyle {
)
}
addColorContrastIfExists(theme, propName, styleName) {
return this.addIfExists(theme, propName, styleName, (v) =>
colorContrast(resolveColor(v, this.colorVariables))
)
}
addFontFamilyIfExists(theme, propName, styleName) {
return this.addIfExists(theme, propName, styleName, (v) => {
const fontFamilyType = this.$registry.get('fontFamily', v)
@ -647,6 +654,16 @@ export class PageThemeConfigBlockType extends ThemeConfigBlockType {
$registry: this.app.$registry,
})
style.addColorIfExists(theme, 'page_background_color')
style.addColorRecommendationIfExists(
theme,
'page_background_color',
'--page-background-color-complement'
)
style.addColorContrastIfExists(
theme,
'page_background_color',
'--page-background-color-contrast'
)
style.addIfExists(
theme,
'page_background_file',

View file

@ -61,6 +61,10 @@
width: 100%;
gap: 10px;
padding: 5px;
border-radius: 6px;
// We want the same color as page background
background-color: var(--page-background-color, #fff);
}
.menu-element__sub-link {
@ -71,5 +75,5 @@
}
.menu-element__sub-link:hover {
background: $palette-neutral-100;
background: var(--page-background-color-contrast, $palette-neutral-100);
}

View file

@ -263,3 +263,19 @@ export const colorRecommendation = (hexColor) => {
return 'white'
}
}
/**
* Return the color lighten or darken depending on the initial color.
* @param {string} hexColor The hex string of the color.
* @returns The contrasted color.
*/
export const colorContrast = (hexColor, amount = 10) => {
// l is the luminance
const hsl = conversionsMap.hex.hsl(hexColor)
if (hsl.l > 0.5) {
hsl.l -= amount / 100
} else {
hsl.l += amount / 100
}
return conversionsMap.hsl.hex(hsl)
}