From 96d4d1bc654f2465b27f08c12823de9af4db1246 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9mie=20Pardou?= <571533+jrmi@users.noreply.github.com> Date: Mon, 3 Nov 2025 14:22:46 +0100 Subject: [PATCH] feat: local baserow services return human field names (#4143) See related issue. --- .../data_providers/data_provider_types.py | 6 +- .../contrib/automation/nodes/handler.py | 6 +- .../data_providers/data_provider_types.py | 9 +- .../contrib/builder/data_sources/service.py | 33 ++--- .../builder/workflow_actions/service.py | 20 +--- .../integrations/core/service_types.py | 7 +- .../local_baserow/service_types.py | 59 ++++++++- .../src/baserow/core/services/registries.py | 37 ++++++ .../test_data_provider_types.py | 6 +- .../automation/nodes/test_node_handler.py | 10 +- .../data_sources/test_data_source_views.py | 88 +++++++------- .../test_public_data_source_views.py | 38 +++--- .../api/domains/test_domain_public_views.py | 30 +++-- .../elements/test_record_selector_element.py | 2 +- .../test_workflow_actions_views.py | 20 ++-- .../data_sources/test_data_source_handler.py | 20 ++-- .../data_sources/test_data_source_service.py | 88 ++------------ .../core/test_iterator_service_type.py | 20 +++- .../test_get_row_service_type.py | 7 +- .../test_list_rows_service_type.py | 9 +- .../test_upsert_row_service_type.py | 9 +- .../baserow/core/service/test_service_type.py | 75 ++++++++++++ ...content_with_human_property_names_ins.json | 8 ++ .../builder/elements/test_element_types.py | 2 +- .../local_baserow/test_user_source_types.py | 4 +- .../elements/components/TableElement.vue | 3 + .../modules/builder/dataProviderTypes.js | 113 ++++++------------ .../modules/builder/elementTypeMixins.js | 15 +++ web-frontend/modules/builder/elementTypes.js | 106 ++++++++++++++++ .../builder/mixins/collectionElement.js | 6 +- .../builder/mixins/collectionElementForm.js | 4 - .../modules/builder/store/elementContent.js | 102 ++-------------- .../modules/builder/workflowActionTypes.js | 8 ++ web-frontend/modules/core/formula/index.js | 1 + web-frontend/modules/core/serviceTypes.js | 8 ++ .../modules/core/workflowActionTypes.js | 8 ++ .../integrations/localBaserow/serviceTypes.js | 36 +++++- .../components/RecordSelectorElement.spec.js | 58 +++++++-- 38 files changed, 634 insertions(+), 447 deletions(-) create mode 100644 changelog/entries/unreleased/breaking_change/4135_data_source_now_return_content_with_human_property_names_ins.json diff --git a/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py b/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py index a7bb1ed1fd..15de4d6f46 100644 --- a/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py +++ b/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py @@ -45,7 +45,9 @@ def get_data_chunk( ) raise InvalidFormulaContext(message) from exc else: - return get_value_at_path(previous_node_results, rest) + return previous_node.service.get_type().get_value_at_path( + previous_node.service.specific, previous_node_results, rest + ) def import_path(self, path, id_mapping, **kwargs): """ @@ -100,7 +102,7 @@ def get_data_chunk( ) raise InvalidFormulaContext(message) from exc - current_item = parent_node_results[current_iteration] + current_item = parent_node_results["results"][current_iteration] data = {"index": current_iteration, "item": current_item} return get_value_at_path(data, rest) diff --git a/backend/src/baserow/contrib/automation/nodes/handler.py b/backend/src/baserow/contrib/automation/nodes/handler.py index bf0dd864e2..fc14aebf00 100644 --- a/backend/src/baserow/contrib/automation/nodes/handler.py +++ b/backend/src/baserow/contrib/automation/nodes/handler.py @@ -369,11 +369,7 @@ def dispatch_node( return if children := node.get_children(): - node_data = ( - dispatch_result.data - if isinstance(dispatch_result.data, list) - else [dispatch_result.data] - ) + node_data = dispatch_result.data["results"] if dispatch_context.simulate_until_node: iterations = [0] diff --git a/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py b/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py index 2193bdfd36..26cf684c8d 100644 --- a/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py +++ b/backend/src/baserow/contrib/builder/data_providers/data_provider_types.py @@ -180,7 +180,9 @@ def get_data_chunk(self, dispatch_context: BuilderDispatchContext, path: List[st if data_source.service.get_type().returns_list: dispatch_result = dispatch_result["results"] - return get_value_at_path(dispatch_result, rest) + return data_source.service.get_type().get_value_at_path( + data_source.service.specific, dispatch_result, rest + ) def import_path(self, path, id_mapping, **kwargs): """ @@ -482,8 +484,11 @@ def get_data_chunk(self, dispatch_context: DispatchContext, path: List[str]): cache_key = self.get_dispatch_action_cache_key( dispatch_id, workflow_action.id ) - return get_value_at_path(cache.get(cache_key), rest) + return workflow_action.service.get_type().get_value_at_path( + workflow_action.service.specific, cache.get(cache_key), rest + ) else: + # Frontend actions return get_value_at_path(previous_action_results[previous_action_id], rest) def post_dispatch( diff --git a/backend/src/baserow/contrib/builder/data_sources/service.py b/backend/src/baserow/contrib/builder/data_sources/service.py index 990fb57c3d..e9d3cc82ac 100644 --- a/backend/src/baserow/contrib/builder/data_sources/service.py +++ b/backend/src/baserow/contrib/builder/data_sources/service.py @@ -278,18 +278,6 @@ def delete_data_source(self, user: AbstractUser, data_source: DataSourceForUpdat self, data_source_id=data_source.id, page=page, user=user ) - def remove_unused_field_names( - self, - row: Dict[str, Any], - field_names: List[str], - ) -> Dict[str, Any]: - """ - Given a row dictionary, return a version of it that only contains keys - existing in the field_names list. - """ - - return {key: value for key, value in row.items() if key in field_names} - def dispatch_data_sources( self, user, @@ -330,22 +318,17 @@ def dispatch_data_sources( new_results[data_source.id] = results[data_source.id] continue - field_names = dispatch_context.public_allowed_properties.get( + allowed_field_names = dispatch_context.public_allowed_properties.get( "external", {} ).get(data_source.service.id, []) - if data_source.service.get_type().returns_list: - new_results[data_source.id] = { - **results[data_source.id], - "results": [ - self.remove_unused_field_names(row, field_names) - for row in results[data_source.id]["results"] - ], - } - else: - new_results[data_source.id] = self.remove_unused_field_names( - results[data_source.id], field_names - ) + new_results[ + data_source.id + ] = data_source.service.get_type().sanitize_result( + data_source.service.specific, + results[data_source.id], + allowed_field_names, + ) return new_results diff --git a/backend/src/baserow/contrib/builder/workflow_actions/service.py b/backend/src/baserow/contrib/builder/workflow_actions/service.py index dc02ca5f8d..e6a1b0b5e6 100644 --- a/backend/src/baserow/contrib/builder/workflow_actions/service.py +++ b/backend/src/baserow/contrib/builder/workflow_actions/service.py @@ -291,18 +291,6 @@ def order_workflow_actions( return full_order - def remove_unused_field_names( - self, - row: dict[str, Any], - field_names: List[str], - ) -> dict[str, Any]: - """ - Given a row dictionary, return a version of it that only contains keys - existing in the field_names list. - """ - - return {key: value for key, value in row.items() if key in field_names} - def dispatch_action( self, user, @@ -338,11 +326,15 @@ def dispatch_action( ) # Remove unfiltered fields - field_names = dispatch_context.public_allowed_properties.get( + allowed_field_names = dispatch_context.public_allowed_properties.get( "external", {} ).get(workflow_action.service.id, []) + data = workflow_action.service.get_type().sanitize_result( + workflow_action.service.specific, result.data, allowed_field_names + ) + return DispatchResult( - data=self.remove_unused_field_names(result.data, field_names), + data=data, status=result.status, ) diff --git a/backend/src/baserow/contrib/integrations/core/service_types.py b/backend/src/baserow/contrib/integrations/core/service_types.py index d4eca7e76c..af68312fa1 100644 --- a/backend/src/baserow/contrib/integrations/core/service_types.py +++ b/backend/src/baserow/contrib/integrations/core/service_types.py @@ -66,6 +66,7 @@ from baserow.core.services.models import Service from baserow.core.services.registries import ( DispatchTypes, + ListServiceTypeMixin, ServiceType, TriggerServiceTypeMixin, ) @@ -1566,7 +1567,7 @@ def export_prepared_values( return values -class CoreIteratorServiceType(ServiceType): +class CoreIteratorServiceType(ListServiceTypeMixin, ServiceType): type = "iterator" model_class = CoreIteratorService dispatch_types = DispatchTypes.ACTION @@ -1609,7 +1610,7 @@ def generate_schema( allowed_fields is None or "items" in allowed_fields ): schema_builder = SchemaBuilder() - schema_builder.add_object(service.sample_data["data"]) + schema_builder.add_object(service.sample_data["data"]["results"]) schema = schema_builder.to_schema() # Sometimes there is no items if the array is empty @@ -1643,7 +1644,7 @@ def dispatch_data( resolved_values: Dict[str, Any], dispatch_context: DispatchContext, ) -> Any: - return resolved_values["source"] + return {"results": resolved_values["source"], "has_next_page": False} def dispatch_transform( self, diff --git a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py index c6317e8f21..74c58fbb05 100644 --- a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py +++ b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py @@ -203,6 +203,58 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType): class SerializedDict(ServiceDict): table_id: int + def _convert_allowed_field_names(self, service, allowed_fields): + """ + Convert the `field_x` to human fields. + """ + + mapping = { + field_obj["field"].db_column: field_obj["field"].name + for field_obj in self.get_table_field_objects(service) + } + return [mapping.get(f, f) for f in allowed_fields] + + def sanitize_result(self, service, result, allowed_field_names): + """ + Remove the non public fields from the result. + """ + + allowed_field_names = self._convert_allowed_field_names( + service, allowed_field_names + ) + + return super().sanitize_result(service, result, allowed_field_names) + + def get_value_at_path(self, service: Service, context: Any, path: List[str]): + """ + Convert the field name to a human name. + """ + + if self.returns_list: + if len(path) < 2: + return super().get_value_at_path(service, context, path) + + row_index, db_column, *rest = path + else: + if len(path) < 1: + return super().get_value_at_path(service, context, path) + + db_column, *rest = path + + human_name = db_column + + for field_obj in self.get_table_field_objects(service) or []: + if field_obj["field"].db_column == db_column: + human_name = field_obj["field"].name + break + + if self.returns_list: + return super().get_value_at_path( + service, context, [row_index, human_name, *rest] + ) + else: + return super().get_value_at_path(service, context, [human_name, *rest]) + def build_queryset( self, service: LocalBaserowTableService, @@ -1052,6 +1104,7 @@ def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult: RowSerializer, is_response=True, field_ids=field_ids, + user_field_names=True, ) return DispatchResult( @@ -1591,6 +1644,7 @@ def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult: RowSerializer, is_response=True, field_ids=field_ids, + user_field_names=True, ) serialized_row = serializer(dispatch_data["data"]).data @@ -2081,6 +2135,7 @@ def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult: RowSerializer, is_response=True, field_ids=field_ids, + user_field_names=True, ) serialized_row = serializer(dispatch_data["data"]).data @@ -2243,9 +2298,7 @@ def _handle_signal( **kwargs, ): serializer = get_row_serializer_class( - model, - RowSerializer, - is_response=True, + model, RowSerializer, is_response=True, user_field_names=True ) data_to_process = { diff --git a/backend/src/baserow/core/services/registries.py b/backend/src/baserow/core/services/registries.py index b4d848c4ef..d5605a1e9c 100644 --- a/backend/src/baserow/core/services/registries.py +++ b/backend/src/baserow/core/services/registries.py @@ -33,6 +33,7 @@ ) from baserow.core.services.dispatch_context import DispatchContext from baserow.core.services.types import DispatchResult, FormulaToResolve +from baserow.core.utils import get_value_at_path from .exceptions import ( DispatchException, @@ -311,6 +312,14 @@ def resolve_service_formulas( return resolved_values + def get_value_at_path(self, service: Service, context: Any, path: List[str]): + """ + Offers the opportunity to hook into way data are extracted from the context for + a given path. + """ + + return get_value_at_path(context, path) + def dispatch_transform( self, data: Any, @@ -384,6 +393,34 @@ def dispatch( return serialized_data + def remove_unused_field_names( + self, + row: Dict[str, Any], + field_names: List[str], + ) -> Dict[str, Any]: + """ + Given a row dictionary, return a version of it that only contains keys + existing in the field_names list. + """ + + return {key: value for key, value in row.items() if key in field_names} + + def sanitize_result(self, service, result, allowed_field_names): + """ + Remove the non public fields from the result. + """ + + if self.returns_list: + return { + **result, + "results": [ + self.remove_unused_field_names(row, allowed_field_names) + for row in result["results"] + ], + } + else: + return self.remove_unused_field_names(result, allowed_field_names) + def get_schema_name(self, service: Service) -> str: """ The default schema name added to the `title` in a JSON Schema object. diff --git a/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py b/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py index 05a483851f..8ad804c4b2 100644 --- a/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py +++ b/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py @@ -104,11 +104,13 @@ def test_current_iteration_data_provider_get_data_chunk(data_fixture): dispatch_context = AutomationDispatchContext(workflow) dispatch_context.after_dispatch( - trigger, DispatchResult(data=[{"field_1": "Horse"}, {"field_1": "Duck"}]) + trigger, + DispatchResult(data={"results": [{"field_1": "Horse"}, {"field_1": "Duck"}]}), ) dispatch_context.after_dispatch( - iterator, DispatchResult(data=[{"field_1": "Horse"}, {"field_1": "Duck"}]) + iterator, + DispatchResult(data={"results": [{"field_1": "Horse"}, {"field_1": "Duck"}]}), ) dispatch_context.set_current_iteration(iterator, 0) diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py b/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py index dfa62b010b..8551691c6f 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py @@ -314,7 +314,7 @@ def test_simulate_dispatch_node_action(data_fixture): assert action_node.service.sample_data == { "data": { - f"field_{fields[0].id}": "A new row", + fields[0].name: "A new row", "id": row.id, "order": str(row.order), }, @@ -346,7 +346,7 @@ def test_simulate_dispatch_node_action_with_update_sample_data( assert action_node.service.sample_data == { "data": { - f"field_{fields[0].id}": "A new row", + fields[0].name: "A new row", "id": AnyInt(), "order": AnyStr(), }, @@ -387,7 +387,7 @@ def test_simulate_dispatch_node_action_with_simulate_until_node(data_fixture): row = table.get_model().objects.first() assert action_node_1.service.sample_data == { "data": { - f"field_{fields[0].id}": "A new row", + fields[0].name: "A new row", "id": row.id, "order": str(row.order), }, @@ -544,9 +544,9 @@ def test_simulate_dispatch_node_dispatches_correct_edge_node(data_fixture): node_c_2.refresh_from_db() node_c_2.service.refresh_from_db() - field_id = node_c_2.service.specific.table.field_set.all()[0].id + field = node_c_2.service.specific.table.field_set.all()[0] assert node_c_2.service.sample_data == { - "data": {f"field_{field_id}": "cherry", "id": AnyInt(), "order": AnyStr()}, + "data": {field.name: "cherry", "id": AnyInt(), "order": AnyStr()}, "output_uid": AnyStr(), "status": 200, } diff --git a/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py b/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py index 46f77a92bc..f0aef11f75 100644 --- a/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py +++ b/backend/tests/baserow/contrib/builder/api/data_sources/test_data_source_views.py @@ -836,8 +836,8 @@ def test_dispatch_data_source(api_client, data_fixture): assert response.json() == { "id": 2, "order": AnyStr(), - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", } @@ -1039,14 +1039,14 @@ def test_dispatch_data_source_with_adhoc_filters(api_client, data_fixture): { "id": 1, "order": AnyStr(), - filterable_field.db_column: "Peter", - private_field.db_column: "111", + filterable_field.name: "Peter", + private_field.name: "111", }, { "id": 4, "order": AnyStr(), - filterable_field.db_column: "Jérémie", - private_field.db_column: "444", + filterable_field.name: "Jérémie", + private_field.name: "444", }, ], "has_next_page": False, @@ -1138,26 +1138,26 @@ def test_dispatch_data_source_with_adhoc_sortings(api_client, data_fixture): { "id": 3, "order": AnyStr(), - sortable_field.db_column: "Tsering", - private_field.db_column: AnyStr(), + sortable_field.name: "Tsering", + private_field.name: AnyStr(), }, { "id": 1, "order": AnyStr(), - sortable_field.db_column: "Peter", - private_field.db_column: AnyStr(), + sortable_field.name: "Peter", + private_field.name: AnyStr(), }, { "id": 4, "order": AnyStr(), - sortable_field.db_column: "Jérémie", - private_field.db_column: AnyStr(), + sortable_field.name: "Jérémie", + private_field.name: AnyStr(), }, { "id": 2, "order": AnyStr(), - sortable_field.db_column: "Afonso", - private_field.db_column: AnyStr(), + sortable_field.name: "Afonso", + private_field.name: AnyStr(), }, ], "has_next_page": False, @@ -1237,8 +1237,8 @@ def test_dispatch_data_source_with_adhoc_search(api_client, data_fixture): { "id": 1, "order": AnyStr(), - searchable_field.db_column: "Peter", - private_field.db_column: AnyStr(), + searchable_field.name: "Peter", + private_field.name: AnyStr(), } ], "has_next_page": False, @@ -1373,7 +1373,7 @@ def test_dispatch_data_source_with_non_collection_element(api_client, data_fixtu ) assert response.status_code == HTTP_200_OK assert response.json() == { - "results": [{"id": row.id, "order": AnyStr(), field.db_column: "a"}], + "results": [{"id": row.id, "order": AnyStr(), field.name: "a"}], "has_next_page": False, } @@ -1469,8 +1469,8 @@ def test_dispatch_data_source_using_formula(api_client, data_fixture): assert response.json() == { "id": 2, "order": AnyStr(), - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", } @@ -1673,20 +1673,20 @@ def test_dispatch_data_sources(api_client, data_fixture): assert response.status_code == HTTP_200_OK assert response.json() == { str(data_source.id): { - fields[1].db_column: "Orange", - fields[0].db_column: "Audi", + fields[1].name: "Orange", + fields[0].name: "Audi", "id": rows[1].id, "order": AnyStr(), }, str(data_source1.id): { - fields[1].db_column: "Green", - fields[0].db_column: "2Cv", + fields[1].name: "Green", + fields[0].name: "2Cv", "id": rows[2].id, "order": AnyStr(), }, str(data_source2.id): { - fields[1].db_column: "Dark", - fields[0].db_column: "Tesla", + fields[1].name: "Dark", + fields[0].name: "Tesla", "id": rows[3].id, "order": AnyStr(), }, @@ -1776,13 +1776,13 @@ def test_dispatch_data_sources_with_formula_using_datasource_calling_an_other( str(data_source2.id): { "id": 2, "order": "2.00000000000000000000", - fields2[0].db_column: "2", + fields2[0].name: "2", }, str(data_source.id): { "id": 2, "order": "2.00000000000000000000", - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", }, } @@ -1866,8 +1866,8 @@ def test_dispatch_data_sources_with_formula_using_datasource_calling_a_shared_da str(data_source.id): { "id": rows[1].id, "order": "2.00000000000000000000", - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", }, } @@ -1938,8 +1938,8 @@ def test_dispatch_only_shared_data_sources(data_fixture, api_client): str(shared_data_source.id): { "id": rows[1].id, "order": "2.00000000000000000000", - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", }, } @@ -2028,7 +2028,8 @@ def test_dispatch_data_sources_list_rows_with_elements( table=data_source_fixture["table"], ) - field_id = data_source_fixture["fields"][0].id + field = data_source_fixture["fields"][0] + field_id = field.id # Create an element that uses a formula referencing the data source data_fixture.create_builder_table_element( @@ -2068,7 +2069,7 @@ def test_dispatch_data_sources_list_rows_with_elements( { # Although this Data Source has 2 Fields/Columns, only one is # returned since only one field_id is used by the Table. - f"field_{field_id}": getattr(row, f"field_{field_id}"), + field.name: getattr(row, f"field_{field_id}"), "id": row.id, } ) @@ -2112,7 +2113,8 @@ def test_dispatch_data_sources_get_row_with_elements( row_id=table_row_id, ) - field_id = data_source_fixture["fields"][0].id + field = data_source_fixture["fields"][0] + field_id = field.id # Create an element that uses a formula referencing the data source data_fixture.create_builder_table_element( @@ -2151,7 +2153,7 @@ def test_dispatch_data_sources_get_row_with_elements( assert response.status_code == HTTP_200_OK assert response.json() == { str(data_source.id): { - f"field_{field_id}": getattr(rows[db_row_id], f"field_{field_id}"), + field.name: getattr(rows[db_row_id], f"field_{field_id}"), "id": rows[db_row_id].id, } } @@ -2257,7 +2259,7 @@ def test_dispatch_data_sources_get_and_list_rows_with_elements( assert response.status_code == HTTP_200_OK assert response.json() == { str(data_source_1.id): { - f"field_{fields_1[0].id}": getattr(rows_1[0], f"field_{fields_1[0].id}"), + fields_1[0].name: getattr(rows_1[0], f"field_{fields_1[0].id}"), "id": rows_1[0].id, }, # Although this Data Source has 2 Fields/Columns, only one is returned @@ -2266,9 +2268,7 @@ def test_dispatch_data_sources_get_and_list_rows_with_elements( "has_next_page": False, "results": [ { - f"field_{fields_2[0].id}": getattr( - rows_2[0], f"field_{fields_2[0].id}" - ), + fields_2[0].name: getattr(rows_2[0], f"field_{fields_2[0].id}"), "id": rows_2[0].id, }, ], @@ -2377,17 +2377,17 @@ def test_private_dispatch_data_source_view_returns_all_fields(api_client, data_f "has_next_page": False, "results": [ { - f"field_{fields[0].id}": "Paneer Tikka", + fields[0].name: "Paneer Tikka", # Although only field_1 is explicitly used by an element in this # page, field_2 is still returned because the Editor page needs # access to all data source fields. - f"field_{fields[1].id}": "5", + fields[1].name: "5", "id": AnyInt(), "order": AnyStr(), }, { - f"field_{fields[0].id}": "Gobi Manchurian", - f"field_{fields[1].id}": "8", + fields[0].name: "Gobi Manchurian", + fields[1].name: "8", "id": AnyInt(), "order": AnyStr(), }, diff --git a/backend/tests/baserow/contrib/builder/api/data_sources/test_public_data_source_views.py b/backend/tests/baserow/contrib/builder/api/data_sources/test_public_data_source_views.py index 0a78054444..ea3cf8163c 100644 --- a/backend/tests/baserow/contrib/builder/api/data_sources/test_public_data_source_views.py +++ b/backend/tests/baserow/contrib/builder/api/data_sources/test_public_data_source_views.py @@ -184,7 +184,8 @@ def test_dispatch_data_sources_list_rows_with_elements( table=data_source_fixture["table"], ) - field_id = data_source_fixture["fields"][0].id + field = data_source_fixture["fields"][0] + field_id = field.id # Create an element that uses a formula referencing the data source data_fixture.create_builder_table_element( @@ -212,7 +213,7 @@ def test_dispatch_data_sources_list_rows_with_elements( ) expected_results = [ - {f"field_{field_id}": getattr(row, f"field_{field_id}"), "id": row.id} + {field.name: getattr(row, f"field_{field_id}"), "id": row.id} for row in data_source_fixture["rows"] ] @@ -290,7 +291,9 @@ def test_dispatch_data_sources_get_row_with_elements( assert response.status_code == HTTP_200_OK assert response.json() == { str(data_source.id): { - f"field_{field_id}": getattr(rows[db_row_id], f"field_{field_id}"), + data_source_fixture["fields"][0].name: getattr( + rows[db_row_id], f"field_{field_id}" + ), "id": rows[db_row_id].id, } } @@ -390,7 +393,7 @@ def test_dispatch_data_sources_get_and_list_rows_with_elements( assert response.status_code == HTTP_200_OK assert response.json() == { str(data_source_1.id): { - f"field_{fields_1[0].id}": getattr(rows_1[0], f"field_{fields_1[0].id}"), + fields_1[0].name: getattr(rows_1[0], f"field_{fields_1[0].id}"), "id": rows_1[0].id, }, # Although this Data Source has 2 Fields/Columns, only one is returned @@ -399,9 +402,7 @@ def test_dispatch_data_sources_get_and_list_rows_with_elements( "has_next_page": False, "results": [ { - f"field_{fields_2[0].id}": getattr( - rows_2[0], f"field_{fields_2[0].id}" - ), + fields_2[0].name: getattr(rows_2[0], f"field_{fields_2[0].id}"), "id": rows_2[0].id, }, ], @@ -461,7 +462,8 @@ def test_dispatch_data_sources_list_rows_with_elements_and_role( table=data_source_element_roles_fixture["table"], ) - field_id = data_source_element_roles_fixture["fields"][0].id + field = data_source_element_roles_fixture["fields"][0] + field_id = field.id field_name = f"field_{field_id}" # Create an element that uses a formula referencing the data source @@ -499,7 +501,7 @@ def test_dispatch_data_sources_list_rows_with_elements_and_role( # to see the data source fields. expected_results.append( - {field_name: getattr(row, field_name), "id": row.id} + {field.name: getattr(row, field_name), "id": row.id} ) assert response.status_code == HTTP_200_OK @@ -555,9 +557,9 @@ def test_dispatch_data_sources_page_visibility_all_returns_elements( str(data_source.id): { "has_next_page": False, "results": [ - {f"field_{field.id}": "Apple"}, - {f"field_{field.id}": "Banana"}, - {f"field_{field.id}": "Cherry"}, + {field.name: "Apple"}, + {field.name: "Banana"}, + {field.name: "Cherry"}, ], }, } @@ -632,9 +634,9 @@ def test_dispatch_data_sources_page_visibility_logged_in_allow_all_returns_eleme str(data_source.id): { "has_next_page": False, "results": [ - {f"field_{field.id}": "Apple"}, - {f"field_{field.id}": "Banana"}, - {f"field_{field.id}": "Cherry"}, + {field.name: "Apple"}, + {field.name: "Banana"}, + {field.name: "Cherry"}, ], }, } @@ -813,9 +815,9 @@ def test_dispatch_data_sources_page_visibility_logged_in_allow_all_except( if is_allowed: expected_results = [ - {f"field_{field.id}": "Apple"}, - {f"field_{field.id}": "Banana"}, - {f"field_{field.id}": "Cherry"}, + {field.name: "Apple"}, + {field.name: "Banana"}, + {field.name: "Cherry"}, ] else: expected_results = [] diff --git a/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py b/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py index 47673f1216..ed13ed90e5 100644 --- a/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py +++ b/backend/tests/baserow/contrib/builder/api/domains/test_domain_public_views.py @@ -913,13 +913,13 @@ def test_public_dispatch_data_source_view_returns_all_fields( "results": [ { "id": rows[0].id, - f"field_{fields[0].id}": "Paneer Tikka", - f"field_{fields[1].id}": "5", + fields[0].name: "Paneer Tikka", + fields[1].name: "5", }, { "id": rows[1].id, - f"field_{fields[0].id}": "Gobi Manchurian", - f"field_{fields[1].id}": "8", + fields[0].name: "Gobi Manchurian", + fields[1].name: "8", }, ], } @@ -982,10 +982,10 @@ def test_public_dispatch_data_source_view_returns_some_fields( "has_next_page": False, "results": [ { - f"field_{fields[0].id}": "Paneer Tikka", + fields[0].name: "Paneer Tikka", }, { - f"field_{fields[0].id}": "Gobi Manchurian", + fields[0].name: "Gobi Manchurian", }, ], } @@ -1158,7 +1158,8 @@ def test_public_dispatch_data_sources_list_rows_with_elements_and_role( table=data_source_element_roles_fixture["table"], ) - field_id = data_source_element_roles_fixture["fields"][0].id + field = data_source_element_roles_fixture["fields"][0] + field_id = field.id # Create an element that uses a formula referencing the data source data_fixture.create_builder_table_element( @@ -1194,7 +1195,7 @@ def test_public_dispatch_data_sources_list_rows_with_elements_and_role( if expect_fields: # Field should only be visible if the user's role allows them # to see the data source fields. - result[f"field_{field_id}"] = getattr(row, f"field_{field_id}") + result[field.name] = getattr(row, f"field_{field_id}") expected_results.append(result) @@ -1376,7 +1377,7 @@ def test_public_dispatch_data_sources_list_rows_with_page_visibility_all( rows = data_source_element_roles_fixture["rows"] if expect_fields: - field_name = f"field_{field_id}" + field_name = data_source_element_roles_fixture["fields"][0].name assert response.json() == { str(data_source.id): { "has_next_page": False, @@ -1523,7 +1524,8 @@ def test_public_dispatch_data_sources_get_row_with_page_visibility_all( ) # Create an element that uses a formula referencing the data source - field_id = data_source_element_roles_fixture["fields"][0].id + field = data_source_element_roles_fixture["fields"][0] + field_id = field.id data_fixture.create_builder_heading_element( page=page, value=f"get('data_source.{data_source.id}.field_{field_id}')", @@ -1548,7 +1550,7 @@ def test_public_dispatch_data_sources_get_row_with_page_visibility_all( if expect_fields: assert response.json() == { - str(data_source.id): {f"field_{field_id}": "Apple"}, + str(data_source.id): {field.name: "Apple"}, } else: assert response.json() == {str(data_source.id): {}} @@ -1690,7 +1692,7 @@ def test_public_dispatch_data_sources_list_rows_with_page_visibility_logged_in( rows = data_source_element_roles_fixture["rows"] if expect_fields: - field_name = f"field_{field_id}" + field_name = data_source_element_roles_fixture["fields"][0].name assert response.json() == { str(data_source.id): { "has_next_page": False, @@ -1838,7 +1840,9 @@ def test_public_dispatch_data_sources_get_row_with_page_visibility_logged_in( if expect_fields: assert response.json() == { - str(data_source.id): {f"field_{field_id}": "Apple"}, + str(data_source.id): { + data_source_element_roles_fixture["fields"][0].name: "Apple" + }, } else: assert response.json() == {str(data_source.id): {}} diff --git a/backend/tests/baserow/contrib/builder/api/elements/test_record_selector_element.py b/backend/tests/baserow/contrib/builder/api/elements/test_record_selector_element.py index 8da95ec8fe..1eb5a1a0d2 100644 --- a/backend/tests/baserow/contrib/builder/api/elements/test_record_selector_element.py +++ b/backend/tests/baserow/contrib/builder/api/elements/test_record_selector_element.py @@ -126,4 +126,4 @@ def test_record_selector_element_form_submission(api_client, data_fixture): assert "id" in response.json() # The created item should have "field_1" set to the first item of the # record selector list - assert response.json()[f"field_{fields[0].id}"] == f"{rows[0].id}" + assert response.json()[fields[0].name] == f"{rows[0].id}" diff --git a/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py b/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py index f7132f0f0e..84b11aa283 100644 --- a/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py +++ b/backend/tests/baserow/contrib/builder/api/workflow_actions/test_workflow_actions_views.py @@ -600,8 +600,8 @@ def test_dispatch_local_baserow_create_row_workflow_action(api_client, data_fixt response_json = response.json() assert "id" in response_json - assert response_json[color_field.db_column] == "Brown" - assert animal_field.db_column not in response_json + assert response_json[color_field.name] == "Brown" + assert animal_field.name not in response_json @pytest.mark.django_db @@ -658,8 +658,8 @@ def test_dispatch_local_baserow_update_row_workflow_action(api_client, data_fixt response_json = response.json() assert response_json["id"] == first_row.id - assert response_json[color_field.db_column] == "Blue" - assert animal_field.db_column not in response_json + assert response_json[color_field.name] == "Blue" + assert animal_field.name not in response_json @pytest.mark.django_db @@ -719,7 +719,7 @@ def test_dispatch_local_baserow_upsert_row_workflow_action_with_current_record( assert response.status_code == HTTP_200_OK response_json = response.json() assert "id" not in response_json - assert response_json[index.db_column] == "Index 123" + assert response_json[index.name] == "Index 123" @pytest.mark.django_db(transaction=True) @@ -832,7 +832,7 @@ def test_dispatch_local_baserow_upsert_row_workflow_action_with_unmatching_index assert response.status_code == HTTP_200_OK row3 = model.objects.get(pk=rows[2].id) - assert getattr(row3, f"field_{field.id}") == f"Updated row {rows[2].id}" + assert getattr(row3, field.db_column) == f"Updated row {rows[2].id}" payload = { "metadata": json.dumps( @@ -853,7 +853,7 @@ def test_dispatch_local_baserow_upsert_row_workflow_action_with_unmatching_index ) assert response.status_code == HTTP_200_OK row4 = model.objects.get(pk=rows[3].id) - assert getattr(row4, f"field_{field.id}") == f"Updated row {rows[3].id}" + assert getattr(row4, field.db_column) == f"Updated row {rows[3].id}" @pytest.mark.django_db @@ -983,8 +983,8 @@ def test_dispatch_local_baserow_update_row_workflow_action_using_formula_with_da assert response.status_code == HTTP_200_OK response_json = response.json() - assert response_json[color_field.db_column] == "Orange" - assert response_json[animal_field.db_column] == f"{rows[1].id}" + assert response_json[color_field.name] == "Orange" + assert response_json[animal_field.name] == f"{rows[1].id}" @pytest.mark.django_db @@ -1265,7 +1265,7 @@ def test_notification_action_can_access_the_field_of_previous_action( # # Conversely, the other DB columns aren't returned, since they aren't used. assert response.json() == { - fields[0].db_column: "Palak Paneer", + fields[0].name: "Palak Paneer", } diff --git a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py index 4aece79f11..74094cd0d0 100644 --- a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py +++ b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_handler.py @@ -280,8 +280,8 @@ def test_dispatch_data_source(data_fixture): assert result == { "id": rows[1].id, "order": AnyStr(), - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", } @@ -342,15 +342,15 @@ def test_dispatch_data_sources(data_fixture): assert result[data_source.id] == { "id": rows[1].id, "order": AnyStr(), - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", } assert result[data_source2.id] == { "id": rows[2].id, "order": AnyStr(), - fields[0].db_column: "Volkswagen", - fields[1].db_column: "White", + fields[0].name: "Volkswagen", + fields[1].name: "White", } assert isinstance(result[data_source3.id], Exception) @@ -595,14 +595,14 @@ def test_dispatch_data_source_doesnt_return_formula_field_names( { "id": 1, "order": "1.00000000000000000000", - f"field_{fields[0].id}": "Paneer Tikka", - f"field_{fields[1].id}": "5", + fields[0].name: "Paneer Tikka", + fields[1].name: "5", }, { "id": 2, "order": "2.00000000000000000000", - f"field_{fields[0].id}": "Gobi Manchurian", - f"field_{fields[1].id}": "8", + fields[0].name: "Gobi Manchurian", + fields[1].name: "8", }, ], } diff --git a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py index b3c7c926d1..c17d4c2085 100644 --- a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py +++ b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py @@ -436,8 +436,8 @@ def test_dispatch_data_source(data_fixture): assert result == { "id": rows[1].id, "order": AnyStr(), - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", } @@ -498,15 +498,15 @@ def test_dispatch_page_data_sources(data_fixture): assert result[data_source.id] == { "id": rows[1].id, "order": AnyStr(), - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", } assert result[data_source2.id] == { "id": rows[2].id, "order": AnyStr(), - fields[0].db_column: "Volkswagen", - fields[1].db_column: "White", + fields[0].name: "Volkswagen", + fields[1].name: "White", } assert isinstance(result[data_source3.id], Exception) @@ -578,77 +578,6 @@ def test_dispatch_data_source_improperly_configured(data_fixture): DataSourceService().dispatch_data_source(user, data_source, dispatch_context) -@pytest.mark.parametrize( - "row,field_names,updated_row", - [ - ( - {"id": 1, "order": "1.000", "field_100": "foo"}, - ["field_100"], - {"field_100": "foo"}, - ), - ( - {"id": 1, "order": "1.000", "field_100": "foo"}, - ["field_99", "field_100", "field_101"], - {"field_100": "foo"}, - ), - ( - { - "id": 2, - "order": "1.000", - "field_200": {"id": 500, "value": "Delhi", "color": "dark-blue"}, - }, - ["field_200"], - {"field_200": {"id": 500, "value": "Delhi", "color": "dark-blue"}}, - ), - # Expect an empty dict because field_names is empty - ( - {"id": 4, "order": "1.000", "field_300": "foo"}, - [], - {}, - ), - # Expect an empty dict because field_names doesn't contain "field_400" - ( - {"id": 3, "order": "1.000", "field_400": "foo"}, - ["field_301"], - {}, - ), - # Expect an empty dict because field_names doesn't contain "field_500" - ( - # Multiple select will appear as a nested dict - { - "id": 5, - "order": "1.000", - "field_500": {"id": 501, "value": "Delhi", "color": "dark-blue"}, - }, - [], - {}, - ), - # Expect an empty dict because field_names doesn't contain "field_500" - ( - { - "id": 5, - "order": "1.000", - "field_500": {"id": 501, "value": "Delhi", "color": "dark-blue"}, - }, - ["field_502"], - {}, - ), - ], -) -def test_remove_unused_field_names(row, field_names, updated_row): - """ - Test the remove_unused_field_names() method. - - Given a dispatched row, it should a modified version of the row. - - The method should only return the row contents if its key exists in the - field_names list. - """ - - result = DataSourceService().remove_unused_field_names(row, field_names) - assert result == updated_row - - @pytest.mark.django_db @pytest.mark.parametrize( "data_source_row_ids", @@ -890,7 +819,4 @@ def test_dispatch_data_sources_skips_exceptions_in_results(data_fixture): ) result = service.dispatch_data_sources(user, data_sources, dispatch_context) - assert result == { - data_source_1.id: {"results": [{"field_1": "foo"}]}, - data_source_2.id: expected_error, - } + assert result[data_source_2.id] == expected_error diff --git a/backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py b/backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py index 45a49d7cd7..bb458d14f7 100644 --- a/backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py +++ b/backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py @@ -13,7 +13,7 @@ def test_core_iterator_service_type_dispatch_data_simple_value(data_fixture): dispatch_result = service_type.dispatch(service, dispatch_context) - assert dispatch_result.data == [2] + assert dispatch_result.data == {"results": [2], "has_next_page": False} @pytest.mark.django_db @@ -27,13 +27,21 @@ def test_core_iterator_service_type_dispatch_data_array(data_fixture): dispatch_result = service_type.dispatch(service, dispatch_context) - assert dispatch_result.data == [{"test": "data"}, {"test": "data2"}] + assert dispatch_result.data == { + "results": [{"test": "data"}, {"test": "data2"}], + "has_next_page": False, + } @pytest.mark.django_db def test_core_iterator_service_type_schema(data_fixture): service = data_fixture.create_core_iterator_service( - sample_data={"data": [{"test": "data"}, {"test": "data2"}]} + sample_data={ + "data": { + "results": [{"test": "data"}, {"test": "data2"}], + "has_next_page": False, + } + } ) service_type = service.get_type() @@ -52,7 +60,7 @@ def test_core_iterator_service_type_schema(data_fixture): @pytest.mark.django_db def test_core_iterator_service_types_simple_schema(data_fixture): service = data_fixture.create_core_iterator_service( - sample_data={"data": ["string"]} + sample_data={"data": {"results": ["string"]}} ) service_type = service.get_type() @@ -68,7 +76,9 @@ def test_core_iterator_service_types_simple_schema(data_fixture): @pytest.mark.django_db def test_core_iterator_service_type_empty_schema(data_fixture): - service = data_fixture.create_core_iterator_service(sample_data={"data": []}) + service = data_fixture.create_core_iterator_service( + sample_data={"data": {"results": []}} + ) service_type = service.get_type() assert service_type.generate_schema(service) is None diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py index 89db1a1fd1..51ef1af903 100644 --- a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py +++ b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py @@ -219,8 +219,8 @@ def test_local_baserow_get_row_service_dispatch_transform(data_fixture): assert result.data == { "id": rows[1].id, - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", "order": AnyStr(), } @@ -332,7 +332,7 @@ def test_local_baserow_get_row_service_dispatch_data_with_service_integer_search assert result.data == { "id": rows[2].id, - fields[0].db_column: "42", + fields[0].name: "42", "order": AnyStr(), } @@ -794,6 +794,7 @@ def test_dispatch_transform_passes_field_ids(mock_get_serializer, field_names): RowSerializer, is_response=True, field_ids=None, + user_field_names=True, ) diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py index 78c9b26dab..9a70089303 100644 --- a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py +++ b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py @@ -239,14 +239,14 @@ def test_local_baserow_list_rows_service_dispatch_transform(data_fixture): assert [dict(r) for r in result.data["results"]] == [ { "id": rows[0].id, - fields[0].db_column: "BMW", - fields[1].db_column: "Blue", + fields[0].name: "BMW", + fields[1].name: "Blue", "order": AnyStr(), }, { "id": rows[1].id, - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", "order": AnyStr(), }, ] @@ -1146,6 +1146,7 @@ def test_dispatch_transform_passes_field_ids(mock_get_serializer, field_names): RowSerializer, is_response=True, field_ids=None, + user_field_names=True, ) diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_upsert_row_service_type.py b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_upsert_row_service_type.py index a348a9dc8d..9d3cf87bd9 100644 --- a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_upsert_row_service_type.py +++ b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_upsert_row_service_type.py @@ -434,7 +434,7 @@ def test_local_baserow_upsert_row_service_dispatch_transform( assert dict(serialized_row.data) == { "id": dispatch_data["data"].id, "order": "1.00000000000000000000", - ingredient.db_column: str(2), + ingredient.name: str(2), } @@ -552,11 +552,11 @@ def test_local_baserow_upsert_row_service_dispatch_data_convert_value(data_fixtu "id": 1, "order": "1.00000000000000000000", # The string 'true' was converted to a boolean value - table.field_set.get(name="boolean").db_column: True, + table.field_set.get(name="boolean").name: True, # The string 'text' is unchanged - table.field_set.get(name="text").db_column: "text", + table.field_set.get(name="text").name: "text", # The string '1' is converted to a list with a single item - table.field_set.get(name="array").db_column: [ + table.field_set.get(name="array").name: [ {"id": 1, "value": "unnamed row 1", "order": AnyStr()} ], } @@ -802,6 +802,7 @@ def test_dispatch_transform_passes_field_ids( RowSerializer, is_response=True, field_ids=expected, + user_field_names=True, ) diff --git a/backend/tests/baserow/core/service/test_service_type.py b/backend/tests/baserow/core/service/test_service_type.py index ffc6d28f1d..a2f8e911f4 100644 --- a/backend/tests/baserow/core/service/test_service_type.py +++ b/backend/tests/baserow/core/service/test_service_type.py @@ -23,6 +23,81 @@ def test_service_type_generate_schema(): assert service_type_cls().generate_schema(mock_service) is None +@pytest.mark.parametrize( + "row,field_names,updated_row", + [ + ( + {"id": 1, "order": "1.000", "field_100": "foo"}, + ["field_100"], + {"field_100": "foo"}, + ), + ( + {"id": 1, "order": "1.000", "field_100": "foo"}, + ["field_99", "field_100", "field_101"], + {"field_100": "foo"}, + ), + ( + { + "id": 2, + "order": "1.000", + "field_200": {"id": 500, "value": "Delhi", "color": "dark-blue"}, + }, + ["field_200"], + {"field_200": {"id": 500, "value": "Delhi", "color": "dark-blue"}}, + ), + # Expect an empty dict because field_names is empty + ( + {"id": 4, "order": "1.000", "field_300": "foo"}, + [], + {}, + ), + # Expect an empty dict because field_names doesn't contain "field_400" + ( + {"id": 3, "order": "1.000", "field_400": "foo"}, + ["field_301"], + {}, + ), + # Expect an empty dict because field_names doesn't contain "field_500" + ( + # Multiple select will appear as a nested dict + { + "id": 5, + "order": "1.000", + "field_500": {"id": 501, "value": "Delhi", "color": "dark-blue"}, + }, + [], + {}, + ), + # Expect an empty dict because field_names doesn't contain "field_500" + ( + { + "id": 5, + "order": "1.000", + "field_500": {"id": 501, "value": "Delhi", "color": "dark-blue"}, + }, + ["field_502"], + {}, + ), + ], +) +def test_service_type_remove_unused_field_names(row, field_names, updated_row): + """ + Test the remove_unused_field_names() method. + + Given a dispatched row, it should a modified version of the row. + + The method should only return the row contents if its key exists in the + field_names list. + """ + + service_type_cls = ServiceType + service_type_cls.model_class = Mock() + + result = service_type_cls().remove_unused_field_names(row, field_names) + + assert result == updated_row + + @pytest.mark.django_db def test_service_type_prepare_values(data_fixture): user = data_fixture.create_user() diff --git a/changelog/entries/unreleased/breaking_change/4135_data_source_now_return_content_with_human_property_names_ins.json b/changelog/entries/unreleased/breaking_change/4135_data_source_now_return_content_with_human_property_names_ins.json new file mode 100644 index 0000000000..b2d471d866 --- /dev/null +++ b/changelog/entries/unreleased/breaking_change/4135_data_source_now_return_content_with_human_property_names_ins.json @@ -0,0 +1,8 @@ +{ + "type": "breaking_change", + "message": "Data source now return content with human property names instead of technical ones", + "domain": "builder", + "issue_number": 4135, + "bullet_points": [], + "created_at": "2025-11-03" +} \ No newline at end of file diff --git a/enterprise/backend/tests/baserow_enterprise_tests/builder/elements/test_element_types.py b/enterprise/backend/tests/baserow_enterprise_tests/builder/elements/test_element_types.py index 2e1398ac14..4846c35d42 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/builder/elements/test_element_types.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/builder/elements/test_element_types.py @@ -280,7 +280,7 @@ def test_dispatch_local_baserow_update_row_workflow_action_with_file( assert response.status_code == HTTP_200_OK response_json = response.json() - assert response_json[file_field.db_column] == [ + assert response_json[file_field.name] == [ { "image_height": 256, "image_width": 256, diff --git a/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py b/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py index 6866d07f6f..1faba65bf2 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py @@ -1166,8 +1166,8 @@ def test_public_dispatch_data_source_with_ab_user_using_user_source( assert response.json() == { "id": 2, "order": AnyStr(), - fields[0].db_column: "Audi", - fields[1].db_column: "Orange", + fields[0].name: "Audi", + fields[1].name: "Orange", } diff --git a/web-frontend/modules/builder/components/elements/components/TableElement.vue b/web-frontend/modules/builder/components/elements/components/TableElement.vue index cee506c046..c83099d73e 100644 --- a/web-frontend/modules/builder/components/elements/components/TableElement.vue +++ b/web-frontend/modules/builder/components/elements/components/TableElement.vue @@ -102,6 +102,9 @@ export default { })) }, rows() { + if (!this.elementContent) { + return [] + } return this.elementContent.map((row, rowIndex) => { const newRow = Object.fromEntries( this.fields.map((field) => { diff --git a/web-frontend/modules/builder/dataProviderTypes.js b/web-frontend/modules/builder/dataProviderTypes.js index 7394106342..cc1dc364dd 100644 --- a/web-frontend/modules/builder/dataProviderTypes.js +++ b/web-frontend/modules/builder/dataProviderTypes.js @@ -120,9 +120,23 @@ export class DataSourceDataProviderType extends DataProviderType { 'dataSource/getPagesDataSourceById' ](pages, parseInt(dataSourceId)) - const content = this.getDataSourceContent(applicationContext, dataSource) + const rawContent = this.getDataSourceContent(applicationContext, dataSource) - return content ? getValueAtPath(content, rest.join('.')) : null + const serviceType = this.app.$registry.get('service', dataSource.type) + + let content = rawContent + let path = rest + + if (serviceType.returnsList) { + // if it returns a list let's consume the next path token which is the row + const [row, ...afterRow] = rest + content = getValueAtPath(content, row) + path = afterRow + } + + return content + ? serviceType.getValueAtPath(dataSource, content, path) + : null } getDataSourceContent(applicationContext, dataSource) { @@ -437,77 +451,11 @@ export class CurrentRecordDataProviderType extends DataProviderType { } getDataChunk(applicationContext, path) { - const content = this.getDataContent(applicationContext) - return getValueAtPath(content, path.join('.')) - } - - getCollectionAncestors({ page, element, allowSameElement }) { - const allCollectionAncestry = this.app.store.getters[ - 'element/getAncestors' - ](page, element, { - predicate: (ancestor) => - this.app.$registry.get('element', ancestor.type).isCollectionElement, - includeSelf: allowSameElement, - }) - - // Choose the right-most index of the ancestry which points - // to a data source. If `allowSameElement` is `true`, this could - // result in `element`'s `data_source_id`, rather than its parent - // element's `data_source_id`. - const lastIndex = _.findLastIndex( - allCollectionAncestry, - (ancestor) => ancestor.data_source_id !== null - ) - - return allCollectionAncestry.slice(lastIndex) - } - - getDataContent(applicationContext) { - // `recordIndexPath` defaults to `[0]` if it's not present, which can happen in - // places where it can't be provided, such as the elements context. - const { - page, - element, - recordIndexPath = [0], - allowSameElement = false, - } = applicationContext - - const collectionAncestry = this.getCollectionAncestors({ - page, - element, - allowSameElement, - }) - - const elementWithContent = collectionAncestry.at(-1) - const contentRows = - this.app.store.getters['elementContent/getElementContent']( - elementWithContent - ) - - // Copy the record index path, as we'll be shifting the first element. - const mappedRecordIndex = [...recordIndexPath] - const dataPaths = collectionAncestry - .map((ancestor, index) => { - if (ancestor.data_source_id) { - // If we have a data source id, and no schema property, or - // we have a data source id and a schema property - return [mappedRecordIndex.shift()] - } else { - // We have just a schema property - return [ancestor.schema_property, mappedRecordIndex.shift()] - } - }) - .flat() - - // Get the value at the dataPaths path. If the formula is invalid, - // as the ds/property has changed, and the value can't be found, - // we'll return an empty object. - const row = getValueAtPath(contentRows, dataPaths) || {} + const { element } = applicationContext - // Add the index value - row[this.indexKey] = dataPaths.at(-1) + const elementType = this.app.$registry.get('element', element.type) - return row + return elementType.getElementCurrentContent(applicationContext, path) } getDataSourceSchema(dataSource) { @@ -539,9 +487,11 @@ export class CurrentRecordDataProviderType extends DataProviderType { ) { const pages = [page, this.app.store.getters['page/getSharedPage'](builder)] + const elementType = this.app.$registry.get('element', element.type) + // Find the first collection ancestor with a `data_source`. If we // find one, this is what we'll use to generate the schema. - const collectionAncestors = this.getCollectionAncestors({ + const collectionAncestors = elementType.getCollectionAncestry({ page, element, allowSameElement, @@ -573,8 +523,7 @@ export class CurrentRecordDataProviderType extends DataProviderType { getDataSchema(applicationContext) { // `allowSameElement` is set if we want to consider the current element in the // collection ancestry list. If so it will be used to get the data source id if - // it's the first collection element. For instance, we don't - // + // it's the first collection element. // `followSameElementSchemaProperties` can be passed in the `applicationContext` // to control whether we wish to fetch schema property for the current element // or not. @@ -870,7 +819,21 @@ export class PreviousActionDataProviderType extends DataProviderType { getDataChunk(applicationContext, path) { const content = this.getDataContent(applicationContext) - return getValueAtPath(content, path.join('.')) + + const [workflowActionId, ...rest] = path + + const workflowAction = this.app.store.getters[ + 'builderWorkflowAction/getWorkflowActionById' + ](applicationContext.page, workflowActionId) + + const actionType = this.app.$registry.get( + 'workflowAction', + workflowAction.type + ) + + return content + ? actionType.getValueAtPath(workflowAction, content, rest) + : null } getWorkflowActionSchema(workflowAction) { diff --git a/web-frontend/modules/builder/elementTypeMixins.js b/web-frontend/modules/builder/elementTypeMixins.js index 33104aa7d0..f449db76c1 100644 --- a/web-frontend/modules/builder/elementTypeMixins.js +++ b/web-frontend/modules/builder/elementTypeMixins.js @@ -172,6 +172,21 @@ export const CollectionElementTypeMixin = (Base) => return Boolean(element.data_source_id || element.schema_property) } + getElementContentInStore(element) { + return ( + this.app.store.getters['elementContent/getElementContent'](element) || + [] + ) + } + + getDataSourceForElement({ builder, page, element }) { + const sharedPage = this.app.store.getters['page/getSharedPage'](builder) + return this.app.store.getters['dataSource/getPagesDataSourceById']( + [sharedPage, page], + element.data_source_id + ) + } + /** * Collection elements by default will have three permutations of display names: * diff --git a/web-frontend/modules/builder/elementTypes.js b/web-frontend/modules/builder/elementTypes.js index a81af4c123..bf36de78ab 100644 --- a/web-frontend/modules/builder/elementTypes.js +++ b/web-frontend/modules/builder/elementTypes.js @@ -93,6 +93,8 @@ import elementImageSimpleContainer from '@baserow/modules/builder/assets/icons/e import elementImageTable from '@baserow/modules/builder/assets/icons/element-table.svg' import elementImageText from '@baserow/modules/builder/assets/icons/element-text.svg' +import _ from 'lodash' + export class ElementType extends Registerable { get name() { return null @@ -762,6 +764,110 @@ export class ElementType extends Registerable { ({ type }) => type === ancestorType ) } + + /** + * Returns all collection ancestors of the given element. If allowSameElement is true, + * also return the element itself if it's a collection element. + * We get the right most chain of collection element starting from the first one + * that has a data source. + */ + getCollectionAncestry({ page, element, allowSameElement }) { + const allCollectionAncestry = this.app.store.getters[ + 'element/getAncestors' + ](page, element, { + predicate: (ancestor) => + this.app.$registry.get('element', ancestor.type).isCollectionElement, + includeSelf: allowSameElement, + }) + + // Choose the right-most index of the ancestry which points + // to a data source. If `allowSameElement` is `true`, this could + // result in `element`'s `data_source_id`, rather than its parent + // element's `data_source_id`. + const lastIndex = _.findLastIndex( + allCollectionAncestry, + (ancestor) => ancestor.data_source_id !== null + ) + + return allCollectionAncestry.slice(lastIndex) + } + + /** + * Returns the current content (used by the current data provider) for the given + * element. The content is extracted from the element with an actual data source + * and refined using the current `recordIndexPath`, the schema properties of + * intermediate collection element and finally the path given as parameter. + */ + getElementCurrentContent(applicationContext, path = []) { + const { + builder, + page, + element, + allowSameElement = true, + recordIndexPath, + } = applicationContext + + const collectionAncestry = this.getCollectionAncestry({ + element, + page, + allowSameElement, + }) + + const mainElement = collectionAncestry[0] // The element with the data source + + if ( + !collectionAncestry.length || + !mainElement.data_source_id || + !collectionAncestry + .slice(1) + .every(({ schema_property: schemaProperty }) => schemaProperty) + ) { + return null + } + + const mainElementType = this.app.$registry.get('element', mainElement.type) + + const mainDataSource = mainElementType.getDataSourceForElement({ + builder, + page, + element: mainElement, + }) + + const mainDataSourceType = this.app.$registry.get( + 'service', + mainDataSource.type + ) + + const dataPaths = collectionAncestry + .map(({ schema_property: schemaProperty }) => schemaProperty || null) + .flatMap((x, i) => + i < recordIndexPath.length ? [x, recordIndexPath[i]] : [x] + ) + .filter((v) => v !== null) + + const fullDataPath = [...dataPaths, ...path] + + const contentRows = mainElementType.getElementContentInStore(mainElement) + + if (fullDataPath.length) { + if (mainDataSourceType.returnsList) { + // directly consume the first path item as it's the row index + // and the getValueAtPath is only able to support property level. + return mainDataSourceType.getValueAtPath( + mainDataSource, + contentRows[fullDataPath[0]], + fullDataPath.slice(1) + ) + } + return mainDataSourceType.getValueAtPath( + mainDataSource, + contentRows, + fullDataPath + ) + } + + return contentRows + } } export class FormContainerElementType extends ContainerElementTypeMixin( diff --git a/web-frontend/modules/builder/mixins/collectionElement.js b/web-frontend/modules/builder/mixins/collectionElement.js index b4bfb8094a..034386e31e 100644 --- a/web-frontend/modules/builder/mixins/collectionElement.js +++ b/web-frontend/modules/builder/mixins/collectionElement.js @@ -23,6 +23,7 @@ export default { getReset: 'elementContent/getReset', getPagesDataSourceById: 'dataSource/getPagesDataSourceById', getSharedPage: 'page/getSharedPage', + getElementAncestors: 'element/getAncestors', }), reset() { return this.getReset(this.element) @@ -52,8 +53,11 @@ export default { }) }, elementContent() { - return this.getElementContent(this.element, this.applicationContext) + return ( + this.elementType.getElementCurrentContent(this.applicationContext) || [] + ) }, + hasMorePage() { return this.getHasMorePage(this.element) }, diff --git a/web-frontend/modules/builder/mixins/collectionElementForm.js b/web-frontend/modules/builder/mixins/collectionElementForm.js index 8f5abe3ab5..1501c7db09 100644 --- a/web-frontend/modules/builder/mixins/collectionElementForm.js +++ b/web-frontend/modules/builder/mixins/collectionElementForm.js @@ -160,10 +160,6 @@ export default { this.selectedDataSource ) }, - elementHasContent() { - const { element } = this.applicationContext - return this.$store.getters['elementContent/getElementContent'](element) - }, ...mapGetters({ getElementSelected: 'element/getSelected', }), diff --git a/web-frontend/modules/builder/store/elementContent.js b/web-frontend/modules/builder/store/elementContent.js index 1ba4a731b8..81fd75dbe6 100644 --- a/web-frontend/modules/builder/store/elementContent.js +++ b/web-frontend/modules/builder/store/elementContent.js @@ -90,86 +90,12 @@ const actions = { * If `dataSource` is `null`, this means that we are trying to fetch the content * of a nested collection element, such as a repeat nested in a repeat. * - * The nested collection fetches its content by finding, either the root-level - * collection element with a dataSource, or its immediate parent with a schema property. - * - * If we have a parent with a schema property: this nested collection element - * is a child of a collection element using a schema property as well, e.g.: - * - * - Root collection element (with a dataSource): - * - Parent collection element (with a schema property) - * - Grandchild collection element (this `element`!) with a schema property. - * - * If we don't have a parent element with a schema property, we are a child of - * the root collection element with a dataSource, e.g.: - * - * - Root collection element (with a dataSource): - * - Parent collection element (this `element`!) with a schema property. + * No content is stored for this element directly. It's content will be deduced + * from the applicationContext and the content of the parent element */ if (!dataSource) { - // We clearly can't have more page for that one - commit('SET_HAS_MORE_PAGE', { element, value: false }) - commit('SET_LOADING', { element, value: false }) - - if (!element.schema_property) { - // We have a collection element that supports schema properties, and - // we have A) no data source and B) no schema property - // or, - // We have a collection element that doesn't support schema properties - // (record selector), and there's no data source. - commit('SET_LOADING', { element, value: false }) - return - } - - // Collect all collection element ancestors, with a `data_source_id`. - const collectionAncestors = this.app.store.getters[ - 'element/getAncestors' - ](page, element, { - predicate: (ancestor) => - this.app.$registry.get('element', ancestor.type) - .isCollectionElement && ancestor.data_source_id !== null, - }) - - // Pluck out the root ancestor, which has a data source. - const rootAncestorWithDataSource = collectionAncestors[0] - - // Next, find this element's parent. - const parent = this.app.store.getters['element/getParent'](page, element) - - // If the parent has a `schema_property`, we'll want to use the - // parent's element content for `element` to use. If the parent - // doesn't have a property, we'll access to the root ancestor's - // (which has a data source) for the content. - const targetElement = parent.schema_property - ? parent - : rootAncestorWithDataSource - - const targetContent = - this.app.store.getters['elementContent/getElementContent']( - targetElement - ) - - let elementContent = [] - if (parent.schema_property) { - // If the parent has a `schema_property`, it's an array of values - // *inside* `schema_property`, so we just copy the array. - elementContent = [...targetContent] - } else { - // Build a new array of content, for this `element`, which - // will only contain the property `schema_property`. - elementContent = targetContent.map((obj) => ({ - [element.schema_property]: obj[element.schema_property], - })) - } - - commit('CLEAR_CONTENT', { - element, - }) - commit('SET_CONTENT', { - element, - value: elementContent, - }) - // No more content for sure + // No data source means no content for this element directly. It will then take + // it's content from the parent element so we can fake the end of the loading. commit('SET_HAS_MORE_PAGE', { element, value: false }) commit('SET_LOADING', { element, value: false }) return @@ -263,10 +189,9 @@ const actions = { // range for paging, all results are set at once. We default to an // empty array if the property doesn't exist, this will happen if // the property has been removed since the initial configuration. - const propertyValue = data[element.schema_property] || [] commit('SET_CONTENT', { element, - value: propertyValue, + value: data, }) } @@ -318,20 +243,9 @@ const actions = { } const getters = { - getElementContent: - (state) => - (element, applicationContext = {}) => { - // If we have a recordIndexPath to work with, and the element has - // its content loaded, then we're fetching content for a nested - // collection+container element, which has a schema property. We'll - // return the content at a specific index path, and from that property. - const { recordIndexPath = [] } = applicationContext - if (recordIndexPath.length && element._.content.length) { - const contentAtIndex = element._.content[recordIndexPath[0]] - return contentAtIndex?.[element.schema_property] || [] - } - return element._.content - }, + getElementContent: (state) => (element) => { + return element._.content || [] + }, getHasMorePage: (state) => (element) => { return element._.hasNextPage }, diff --git a/web-frontend/modules/builder/workflowActionTypes.js b/web-frontend/modules/builder/workflowActionTypes.js index cb71077f43..9256fade8f 100644 --- a/web-frontend/modules/builder/workflowActionTypes.js +++ b/web-frontend/modules/builder/workflowActionTypes.js @@ -301,6 +301,14 @@ export class WorkflowActionServiceType extends WorkflowActionType { return super.getErrorMessage(workflowAction, applicationContext) } + getValueAtPath(workflowAction, content, path) { + return this.serviceType.getValueAtPath( + workflowAction.service, + content, + path.join('.') + ) + } + get serviceType() { throw new Error('This method must be implemented') } diff --git a/web-frontend/modules/core/formula/index.js b/web-frontend/modules/core/formula/index.js index 5575704467..05b296710a 100644 --- a/web-frontend/modules/core/formula/index.js +++ b/web-frontend/modules/core/formula/index.js @@ -28,6 +28,7 @@ export const resolveFormula = ( const tree = parseBaserowFormula(formulaCtx.formula) return new JavascriptExecutor(functions, RuntimeFormulaContext).visit(tree) } catch (err) { + console.debug('Err in formula resolution', err) return '' } } diff --git a/web-frontend/modules/core/serviceTypes.js b/web-frontend/modules/core/serviceTypes.js index 675ce32579..01dcb669fc 100644 --- a/web-frontend/modules/core/serviceTypes.js +++ b/web-frontend/modules/core/serviceTypes.js @@ -1,4 +1,5 @@ import { Registerable } from '@baserow/modules/core/registry' +import { getValueAtPath } from '@baserow/modules/core/utils/object' export class ServiceType extends Registerable { get name() { @@ -73,6 +74,13 @@ export class ServiceType extends Registerable { return this.name } + /** + * Allow to customize way data are accessed from service + */ + getValueAtPath(service, content, path) { + return getValueAtPath(content, path.join('.')) + } + getOrder() { return 0 } diff --git a/web-frontend/modules/core/workflowActionTypes.js b/web-frontend/modules/core/workflowActionTypes.js index 725904ec06..6a85b73a10 100644 --- a/web-frontend/modules/core/workflowActionTypes.js +++ b/web-frontend/modules/core/workflowActionTypes.js @@ -1,4 +1,5 @@ import { Registerable } from '@baserow/modules/core/registry' +import { getValueAtPath } from '@baserow/modules/core/utils/object' export class WorkflowActionType extends Registerable { get form() { @@ -31,6 +32,13 @@ export class WorkflowActionType extends Registerable { throw new Error('Must be set on the type.') } + /** + * Allow to customize way data are accessed from workflow action + */ + getValueAtPath(workflowAction, content, path) { + return getValueAtPath(content, path.join('.')) + } + /** * Returns a message if the workflow action configuration is invalid. * @param {object} workflowAction - The workflow action to validate. diff --git a/web-frontend/modules/integrations/localBaserow/serviceTypes.js b/web-frontend/modules/integrations/localBaserow/serviceTypes.js index 7f5656c5c8..7244314aab 100644 --- a/web-frontend/modules/integrations/localBaserow/serviceTypes.js +++ b/web-frontend/modules/integrations/localBaserow/serviceTypes.js @@ -15,6 +15,7 @@ import LocalBaserowSignalTriggerServiceForm from '@baserow/modules/integrations/ import LocalBaserowGetRowForm from '@baserow/modules/integrations/localBaserow/components/services/LocalBaserowGetRowForm' import LocalBaserowListRowsForm from '@baserow/modules/integrations/localBaserow/components/services/LocalBaserowListRowsForm' import LocalBaserowAggregateRowsForm from '@baserow/modules/integrations/localBaserow/components/services/LocalBaserowAggregateRowsForm' +import { getValueAtPath } from '@baserow/modules/core/utils/object' export class LocalBaserowTableServiceType extends ServiceType { get integrationType() { @@ -78,6 +79,24 @@ export class LocalBaserowTableServiceType extends ServiceType { return description } + + getValueAtPath(service, content, path) { + const schema = this.getDataSchema(service) + + const [field, ...rest] = path + let humanName = field + + if (schema) { + if (this.returnsList) { + if (schema.items?.properties?.[field]?.title) { + humanName = schema.items.properties[field].title + } + } else if (schema.properties[field]?.title) { + humanName = schema.properties[field].title + } + } + return getValueAtPath(content, [humanName, ...rest].join('.')) + } } export class DataSourceLocalBaserowTableServiceType extends DataSourceServiceTypeMixin( @@ -268,12 +287,17 @@ export class LocalBaserowListRowsServiceType extends DataSourceLocalBaserowTable } getRecordName(service, record) { - // We skip row_id and order properties here, so we keep only first key - // that should be the primary field - // [{ field_1234: 'The name of the record', id: 0, __idx__: 0 }] - // NOTE: This is assuming that the first field is the primary field. - const field = Object.keys(record).find((key) => key.startsWith('field_')) - return record[field] + const schema = this.getDataSchema(service) + if (!schema?.items?.properties) { + return '' + } + + // Search the primary field using the metadata in the schema + const primaryField = Object.values(schema.items.properties).find( + ({ metadata }) => metadata?.primary + ) + + return record[primaryField.title] } getOrder() { diff --git a/web-frontend/test/unit/builder/components/elements/components/RecordSelectorElement.spec.js b/web-frontend/test/unit/builder/components/elements/components/RecordSelectorElement.spec.js index e8b92e50ff..a9eced7289 100644 --- a/web-frontend/test/unit/builder/components/elements/components/RecordSelectorElement.spec.js +++ b/web-frontend/test/unit/builder/components/elements/components/RecordSelectorElement.spec.js @@ -38,7 +38,25 @@ describe('RecordSelectorElement', () => { test('does not paginate if API returns 400/404', async () => { const page = { id: 1, - dataSources: [{ id: 1, type: 'local_baserow_list_rows', table_id: 1 }], + dataSources: [ + { + id: 1, + type: 'local_baserow_list_rows', + table_id: 1, + schema: { + type: 'array', + items: { + properties: { + field_1: { + metadata: { primary: true }, + title: 'Name', + }, + field_2: { metadata: {}, title: 'Other' }, + }, + }, + }, + }, + ], elements: [], } const sharedPage = { @@ -85,11 +103,11 @@ describe('RecordSelectorElement', () => { .onPost(url) .replyOnce(200, { results: [ - { id: 1, order: 1, field_1: 'First' }, - { id: 2, order: 1, field_1: 'Second' }, - { id: 3, order: 1, field_1: 'Third' }, - { id: 4, order: 1, field_1: 'Fourth' }, - { id: 5, order: 1, field_1: 'Fifth' }, + { id: 1, order: 1, Name: 'First' }, + { id: 2, order: 1, Name: 'Second' }, + { id: 3, order: 1, Name: 'Third' }, + { id: 4, order: 1, Name: 'Fourth' }, + { id: 5, order: 1, Name: 'Fifth' }, ], has_next_page: true, }) @@ -131,7 +149,25 @@ describe('RecordSelectorElement', () => { test('resolves suffix formulas', async () => { const page = { id: 1, - dataSources: [{ id: 1, type: 'local_baserow_list_rows', table_id: 1 }], + dataSources: [ + { + id: 1, + type: 'local_baserow_list_rows', + table_id: 1, + schema: { + type: 'array', + items: { + properties: { + field_1: { + metadata: { primary: true }, + title: 'Name', + }, + field_2: { metadata: {}, title: 'Other' }, + }, + }, + }, + }, + ], elements: [], } const sharedPage = { @@ -153,7 +189,7 @@ describe('RecordSelectorElement', () => { type: 'record_selector', data_source_id: page.dataSources[0].id, items_per_page: 5, - option_name_suffix: { formula: "'Suffix'" }, + option_name_suffix: { formula: "'Suffix'", mode: 'simple' }, } store.dispatch('element/forceCreate', { page, element }) @@ -176,8 +212,8 @@ describe('RecordSelectorElement', () => { const url = `builder/domains/published/data-source/${page.dataSources[0].id}/dispatch/` mockServer.mock.onPost(url).reply(200, { results: [ - { id: 1, order: 1, field_1: 'First', field_2: 'One' }, - { id: 2, order: 1, field_1: 'Second', field_2: 'Two' }, + { id: 1, order: 1, Name: 'First', Other: 'One' }, + { id: 2, order: 1, Name: 'Second', Other: 'Two' }, ], has_next_page: false, }) @@ -188,7 +224,9 @@ describe('RecordSelectorElement', () => { .at(0) .find('.ab-dropdown__selected') .trigger('click') + await flushPromises() + expect(wrapper.element).toMatchSnapshot() expect(wrapper.find("span[title='First - Suffix']").exists()).toBeTruthy() expect(wrapper.find("span[title='Second - Suffix']").exists()).toBeTruthy()