Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ def get_data_chunk(
)
raise InvalidFormulaContext(message) from exc
else:
return get_value_at_path(previous_node_results, rest)
return previous_node.service.get_type().get_value_at_path(
previous_node.service.specific, previous_node_results, rest
)

def import_path(self, path, id_mapping, **kwargs):
"""
Expand Down Expand Up @@ -100,7 +102,7 @@ def get_data_chunk(
)
raise InvalidFormulaContext(message) from exc

current_item = parent_node_results[current_iteration]
current_item = parent_node_results["results"][current_iteration]
data = {"index": current_iteration, "item": current_item}

return get_value_at_path(data, rest)
Expand Down
6 changes: 1 addition & 5 deletions backend/src/baserow/contrib/automation/nodes/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,11 +369,7 @@ def dispatch_node(
return

if children := node.get_children():
node_data = (
dispatch_result.data
if isinstance(dispatch_result.data, list)
else [dispatch_result.data]
)
node_data = dispatch_result.data["results"]

if dispatch_context.simulate_until_node:
iterations = [0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,9 @@ def get_data_chunk(self, dispatch_context: BuilderDispatchContext, path: List[st
if data_source.service.get_type().returns_list:
dispatch_result = dispatch_result["results"]

return get_value_at_path(dispatch_result, rest)
return data_source.service.get_type().get_value_at_path(
data_source.service.specific, dispatch_result, rest
)

def import_path(self, path, id_mapping, **kwargs):
"""
Expand Down Expand Up @@ -482,8 +484,11 @@ def get_data_chunk(self, dispatch_context: DispatchContext, path: List[str]):
cache_key = self.get_dispatch_action_cache_key(
dispatch_id, workflow_action.id
)
return get_value_at_path(cache.get(cache_key), rest)
return workflow_action.service.get_type().get_value_at_path(
workflow_action.service.specific, cache.get(cache_key), rest
)
else:
# Frontend actions
return get_value_at_path(previous_action_results[previous_action_id], rest)

def post_dispatch(
Expand Down
33 changes: 8 additions & 25 deletions backend/src/baserow/contrib/builder/data_sources/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,18 +278,6 @@ def delete_data_source(self, user: AbstractUser, data_source: DataSourceForUpdat
self, data_source_id=data_source.id, page=page, user=user
)

def remove_unused_field_names(
self,
row: Dict[str, Any],
field_names: List[str],
) -> Dict[str, Any]:
"""
Given a row dictionary, return a version of it that only contains keys
existing in the field_names list.
"""

return {key: value for key, value in row.items() if key in field_names}

def dispatch_data_sources(
self,
user,
Expand Down Expand Up @@ -330,22 +318,17 @@ def dispatch_data_sources(
new_results[data_source.id] = results[data_source.id]
continue

field_names = dispatch_context.public_allowed_properties.get(
allowed_field_names = dispatch_context.public_allowed_properties.get(
"external", {}
).get(data_source.service.id, [])

if data_source.service.get_type().returns_list:
new_results[data_source.id] = {
**results[data_source.id],
"results": [
self.remove_unused_field_names(row, field_names)
for row in results[data_source.id]["results"]
],
}
else:
new_results[data_source.id] = self.remove_unused_field_names(
results[data_source.id], field_names
)
new_results[
data_source.id
] = data_source.service.get_type().sanitize_result(
data_source.service.specific,
results[data_source.id],
allowed_field_names,
)

return new_results

Expand Down
20 changes: 6 additions & 14 deletions backend/src/baserow/contrib/builder/workflow_actions/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,18 +291,6 @@ def order_workflow_actions(

return full_order

def remove_unused_field_names(
self,
row: dict[str, Any],
field_names: List[str],
) -> dict[str, Any]:
"""
Given a row dictionary, return a version of it that only contains keys
existing in the field_names list.
"""

return {key: value for key, value in row.items() if key in field_names}

def dispatch_action(
self,
user,
Expand Down Expand Up @@ -338,11 +326,15 @@ def dispatch_action(
)

# Remove unfiltered fields
field_names = dispatch_context.public_allowed_properties.get(
allowed_field_names = dispatch_context.public_allowed_properties.get(
"external", {}
).get(workflow_action.service.id, [])

data = workflow_action.service.get_type().sanitize_result(
workflow_action.service.specific, result.data, allowed_field_names
)

return DispatchResult(
data=self.remove_unused_field_names(result.data, field_names),
data=data,
status=result.status,
)
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@
from baserow.core.services.models import Service
from baserow.core.services.registries import (
DispatchTypes,
ListServiceTypeMixin,
ServiceType,
TriggerServiceTypeMixin,
)
Expand Down Expand Up @@ -1566,7 +1567,7 @@ def export_prepared_values(
return values


class CoreIteratorServiceType(ServiceType):
class CoreIteratorServiceType(ListServiceTypeMixin, ServiceType):
type = "iterator"
model_class = CoreIteratorService
dispatch_types = DispatchTypes.ACTION
Expand Down Expand Up @@ -1609,7 +1610,7 @@ def generate_schema(
allowed_fields is None or "items" in allowed_fields
):
schema_builder = SchemaBuilder()
schema_builder.add_object(service.sample_data["data"])
schema_builder.add_object(service.sample_data["data"]["results"])
schema = schema_builder.to_schema()

# Sometimes there is no items if the array is empty
Expand Down Expand Up @@ -1643,7 +1644,7 @@ def dispatch_data(
resolved_values: Dict[str, Any],
dispatch_context: DispatchContext,
) -> Any:
return resolved_values["source"]
return {"results": resolved_values["source"], "has_next_page": False}

def dispatch_transform(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,58 @@ class LocalBaserowTableServiceType(LocalBaserowServiceType):
class SerializedDict(ServiceDict):
table_id: int

def _convert_allowed_field_names(self, service, allowed_fields):
"""
Convert the `field_x` to human fields.
"""

mapping = {
field_obj["field"].db_column: field_obj["field"].name
for field_obj in self.get_table_field_objects(service)
}
return [mapping.get(f, f) for f in allowed_fields]

def sanitize_result(self, service, result, allowed_field_names):
"""
Remove the non public fields from the result.
"""

allowed_field_names = self._convert_allowed_field_names(
service, allowed_field_names
)

return super().sanitize_result(service, result, allowed_field_names)

def get_value_at_path(self, service: Service, context: Any, path: List[str]):
"""
Convert the field name to a human name.
"""

if self.returns_list:
if len(path) < 2:
return super().get_value_at_path(service, context, path)

row_index, db_column, *rest = path
else:
if len(path) < 1:
return super().get_value_at_path(service, context, path)

db_column, *rest = path

human_name = db_column

for field_obj in self.get_table_field_objects(service) or []:
if field_obj["field"].db_column == db_column:
human_name = field_obj["field"].name
break

if self.returns_list:
return super().get_value_at_path(
service, context, [row_index, human_name, *rest]
)
else:
return super().get_value_at_path(service, context, [human_name, *rest])

def build_queryset(
self,
service: LocalBaserowTableService,
Expand Down Expand Up @@ -1052,6 +1104,7 @@ def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult:
RowSerializer,
is_response=True,
field_ids=field_ids,
user_field_names=True,
)

return DispatchResult(
Expand Down Expand Up @@ -1591,6 +1644,7 @@ def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult:
RowSerializer,
is_response=True,
field_ids=field_ids,
user_field_names=True,
)

serialized_row = serializer(dispatch_data["data"]).data
Expand Down Expand Up @@ -2081,6 +2135,7 @@ def dispatch_transform(self, dispatch_data: Dict[str, Any]) -> DispatchResult:
RowSerializer,
is_response=True,
field_ids=field_ids,
user_field_names=True,
)
serialized_row = serializer(dispatch_data["data"]).data

Expand Down Expand Up @@ -2243,9 +2298,7 @@ def _handle_signal(
**kwargs,
):
serializer = get_row_serializer_class(
model,
RowSerializer,
is_response=True,
model, RowSerializer, is_response=True, user_field_names=True
)

data_to_process = {
Expand Down
37 changes: 37 additions & 0 deletions backend/src/baserow/core/services/registries.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
)
from baserow.core.services.dispatch_context import DispatchContext
from baserow.core.services.types import DispatchResult, FormulaToResolve
from baserow.core.utils import get_value_at_path

from .exceptions import (
DispatchException,
Expand Down Expand Up @@ -311,6 +312,14 @@ def resolve_service_formulas(

return resolved_values

def get_value_at_path(self, service: Service, context: Any, path: List[str]):
"""
Offers the opportunity to hook into way data are extracted from the context for
a given path.
"""

return get_value_at_path(context, path)

def dispatch_transform(
self,
data: Any,
Expand Down Expand Up @@ -384,6 +393,34 @@ def dispatch(

return serialized_data

def remove_unused_field_names(
self,
row: Dict[str, Any],
field_names: List[str],
) -> Dict[str, Any]:
"""
Given a row dictionary, return a version of it that only contains keys
existing in the field_names list.
"""

return {key: value for key, value in row.items() if key in field_names}

def sanitize_result(self, service, result, allowed_field_names):
"""
Remove the non public fields from the result.
"""

if self.returns_list:
return {
**result,
"results": [
self.remove_unused_field_names(row, allowed_field_names)
for row in result["results"]
],
}
else:
return self.remove_unused_field_names(result, allowed_field_names)

def get_schema_name(self, service: Service) -> str:
"""
The default schema name added to the `title` in a JSON Schema object.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,13 @@ def test_current_iteration_data_provider_get_data_chunk(data_fixture):
dispatch_context = AutomationDispatchContext(workflow)

dispatch_context.after_dispatch(
trigger, DispatchResult(data=[{"field_1": "Horse"}, {"field_1": "Duck"}])
trigger,
DispatchResult(data={"results": [{"field_1": "Horse"}, {"field_1": "Duck"}]}),
)

dispatch_context.after_dispatch(
iterator, DispatchResult(data=[{"field_1": "Horse"}, {"field_1": "Duck"}])
iterator,
DispatchResult(data={"results": [{"field_1": "Horse"}, {"field_1": "Duck"}]}),
)

dispatch_context.set_current_iteration(iterator, 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ def test_simulate_dispatch_node_action(data_fixture):

assert action_node.service.sample_data == {
"data": {
f"field_{fields[0].id}": "A new row",
fields[0].name: "A new row",
"id": row.id,
"order": str(row.order),
},
Expand Down Expand Up @@ -346,7 +346,7 @@ def test_simulate_dispatch_node_action_with_update_sample_data(

assert action_node.service.sample_data == {
"data": {
f"field_{fields[0].id}": "A new row",
fields[0].name: "A new row",
"id": AnyInt(),
"order": AnyStr(),
},
Expand Down Expand Up @@ -387,7 +387,7 @@ def test_simulate_dispatch_node_action_with_simulate_until_node(data_fixture):
row = table.get_model().objects.first()
assert action_node_1.service.sample_data == {
"data": {
f"field_{fields[0].id}": "A new row",
fields[0].name: "A new row",
"id": row.id,
"order": str(row.order),
},
Expand Down Expand Up @@ -544,9 +544,9 @@ def test_simulate_dispatch_node_dispatches_correct_edge_node(data_fixture):

node_c_2.refresh_from_db()
node_c_2.service.refresh_from_db()
field_id = node_c_2.service.specific.table.field_set.all()[0].id
field = node_c_2.service.specific.table.field_set.all()[0]
assert node_c_2.service.sample_data == {
"data": {f"field_{field_id}": "cherry", "id": AnyInt(), "order": AnyStr()},
"data": {field.name: "cherry", "id": AnyInt(), "order": AnyStr()},
"output_uid": AnyStr(),
"status": 200,
}
Loading
Loading