From 3988349cd7ddfe84c7c06f546d50c750b5d98410 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9mie=20Pardou?= <571533+jrmi@users.noreply.github.com> Date: Wed, 29 Oct 2025 09:50:34 +0100 Subject: [PATCH 1/2] Add iterator node (#4099) * Refactor the graph * Many other stuff --- .../contrib/automation/api/nodes/errors.py | 32 +- .../automation/api/nodes/serializers.py | 62 +- .../contrib/automation/api/nodes/urls.py | 6 - .../contrib/automation/api/nodes/views.py | 130 ++- .../automation/api/workflows/serializers.py | 1 + .../src/baserow/contrib/automation/apps.py | 14 +- .../automation/automation_dispatch_context.py | 6 + .../data_providers/data_provider_types.py | 83 +- ...e_alter_automationnode_options_and_more.py | 111 +++ .../contrib/automation/nodes/actions.py | 232 +---- .../contrib/automation/nodes/exceptions.py | 38 +- .../contrib/automation/nodes/handler.py | 417 ++------- .../contrib/automation/nodes/models.py | 137 +-- .../contrib/automation/nodes/node_types.py | 194 ++-- .../contrib/automation/nodes/registries.py | 74 +- .../contrib/automation/nodes/service.py | 368 ++++---- .../contrib/automation/nodes/signals.py | 3 - .../contrib/automation/nodes/trash_types.py | 121 ++- .../baserow/contrib/automation/nodes/types.py | 56 +- .../contrib/automation/nodes/ws/signals.py | 86 +- .../src/baserow/contrib/automation/types.py | 1 + .../automation/workflows/graph_handler.py | 482 ++++++++++ .../contrib/automation/workflows/handler.py | 65 +- .../contrib/automation/workflows/models.py | 81 +- .../contrib/automation/workflows/service.py | 16 - .../src/baserow/contrib/integrations/apps.py | 4 + .../contrib/integrations/core/models.py | 10 + .../integrations/core/service_types.py | 96 +- .../integrations/local_baserow/mixins.py | 4 +- .../local_baserow/service_types.py | 8 +- .../migrations/0022_coreiteratorservice.py | 42 + .../src/baserow/core/services/registries.py | 9 +- .../test_utils/fixtures/automation_history.py | 3 - .../test_utils/fixtures/automation_node.py | 77 +- .../fixtures/automation_workflow.py | 7 +- .../baserow/test_utils/fixtures/service.py | 15 +- .../automation/api/nodes/test_nodes_views.py | 529 ++++------- .../api/test_automation_application_views.py | 4 + .../api/test_automation_serializer.py | 2 + .../test_automation_workflow_serializer.py | 1 + .../api/workflows/test_workflow_views.py | 10 +- .../test_data_provider_types.py | 86 +- .../history/test_history_service.py | 1 + .../automation/nodes/test_node_actions.py | 635 ++++++++++---- .../automation/nodes/test_node_dispatch.py | 408 +++++++++ .../automation/nodes/test_node_handler.py | 233 ++--- .../automation/nodes/test_node_models.py | 43 +- .../automation/nodes/test_node_service.py | 593 ++++++++----- .../automation/nodes/test_node_trash_types.py | 173 +++- .../automation/nodes/test_node_types.py | 278 +++--- .../automation/nodes/test_trash_types.py | 116 --- .../test_automation_application_types.py | 35 +- .../workflows/test_graph_handler.py | 825 ++++++++++++++++++ .../workflows/test_workflow_handler.py | 52 +- .../workflows/test_workflow_runner.py | 179 ---- .../workflows/test_workflow_service.py | 6 +- .../workflows/test_workflow_tasks.py | 15 +- .../core/test_core_periodic_service_type.py | 16 +- .../core/test_iterator_service_type.py | 74 ++ docker-compose.dev.yml | 21 - .../fixtures/automation/automationNode.ts | 27 +- e2e-tests/tests/automation/automation.spec.ts | 16 +- .../tests/automation/automationNode.spec.ts | 26 +- .../automation/automationWorkflow.spec.ts | 30 +- .../AutomationBuilderFormulaInput.vue | 2 +- .../components/AutomationHeader.vue | 8 + .../form/SimulateDispatchNodeForm.vue | 68 +- .../workflow/WorkflowAddNodeMenu.vue | 82 ++ .../components/workflow/WorkflowConnector.vue | 3 +- .../components/workflow/WorkflowEdge.vue | 78 +- .../components/workflow/WorkflowEditor.vue | 42 +- .../components/workflow/WorkflowNode.vue | 107 ++- .../workflow/WorkflowNodeContent.vue | 216 +++-- .../workflow/WorkflowNodeContext.vue | 67 +- .../workflow/sidePanels/NodeSidePanel.vue | 33 +- .../modules/automation/dataProviderTypes.js | 95 +- web-frontend/modules/automation/enums.js | 6 +- .../modules/automation/guidedTourTypes.js | 50 +- .../modules/automation/locales/en.json | 38 +- .../modules/automation/nodeTypeMixins.js | 5 + web-frontend/modules/automation/nodeTypes.js | 119 ++- .../automation/pages/automationWorkflow.vue | 57 +- web-frontend/modules/automation/plugin.js | 13 +- web-frontend/modules/automation/realtime.js | 74 -- .../modules/automation/serviceTypes.js | 36 - .../services/automationWorkflowNode.js | 20 +- .../store/automationWorkflowNode.js | 451 +++++----- .../automation/utils/nodeGraphHandler.js | 259 ++++++ .../NotificationWorkflowActionForm.vue | 4 +- .../modules/builder/workflowActionTypes.js | 18 +- .../scss/components/automation/all.scss | 2 +- ...tomation.scss => automation_workflow.scss} | 9 +- .../components/automation/workflow/all.scss | 3 + .../automation/workflow/workflow_edge.scss | 84 ++ .../automation/workflow/workflow_editor.scss | 235 +---- .../automation/workflow/workflow_node.scss | 40 + .../workflow/workflow_node_content.scss | 125 +++ .../scss/components/guided_tour_step.scss | 4 +- .../modules/core/components/Highlight.vue | 14 +- .../components/dataExplorer/DataExplorer.vue | 6 + .../dataExplorer/DataExplorerNode.vue | 18 +- .../components/formula/FormulaInputField.vue | 6 + .../core/components/guidedTour/GuidedTour.vue | 6 +- .../modules/core/dataProviderTypes.js | 3 +- web-frontend/modules/core/guidedTourTypes.js | 8 +- web-frontend/modules/core/locales/en.json | 7 +- web-frontend/modules/core/serviceTypes.js | 14 + .../services/CoreHTTPRequestServiceForm.vue | 10 +- .../services/CoreIteratorServiceForm.vue | 48 + .../services/CoreRouterServiceForm.vue | 2 +- .../services/CoreSMTPEmailServiceForm.vue | 14 +- .../modules/integrations/core/serviceTypes.js | 89 +- .../integrations/localBaserow/serviceTypes.js | 40 + .../modules/integrations/locales/en.json | 9 +- web-frontend/modules/integrations/plugin.js | 5 + 115 files changed, 6362 insertions(+), 3915 deletions(-) create mode 100644 backend/src/baserow/contrib/automation/migrations/0021_coreiteratoractionnode_alter_automationnode_options_and_more.py create mode 100644 backend/src/baserow/contrib/automation/workflows/graph_handler.py create mode 100644 backend/src/baserow/contrib/integrations/migrations/0022_coreiteratorservice.py create mode 100644 backend/tests/baserow/contrib/automation/nodes/test_node_dispatch.py delete mode 100644 backend/tests/baserow/contrib/automation/nodes/test_trash_types.py create mode 100644 backend/tests/baserow/contrib/automation/workflows/test_graph_handler.py delete mode 100644 backend/tests/baserow/contrib/automation/workflows/test_workflow_runner.py create mode 100644 backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py create mode 100644 web-frontend/modules/automation/components/workflow/WorkflowAddNodeMenu.vue delete mode 100644 web-frontend/modules/automation/serviceTypes.js create mode 100644 web-frontend/modules/automation/utils/nodeGraphHandler.js rename web-frontend/modules/core/assets/scss/components/automation/{automation.scss => automation_workflow.scss} (89%) create mode 100644 web-frontend/modules/core/assets/scss/components/automation/workflow/workflow_edge.scss create mode 100644 web-frontend/modules/core/assets/scss/components/automation/workflow/workflow_node.scss create mode 100644 web-frontend/modules/core/assets/scss/components/automation/workflow/workflow_node_content.scss create mode 100644 web-frontend/modules/integrations/core/components/services/CoreIteratorServiceForm.vue diff --git a/backend/src/baserow/contrib/automation/api/nodes/errors.py b/backend/src/baserow/contrib/automation/api/nodes/errors.py index fd05697251..3934663bef 100644 --- a/backend/src/baserow/contrib/automation/api/nodes/errors.py +++ b/backend/src/baserow/contrib/automation/api/nodes/errors.py @@ -1,7 +1,13 @@ from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND -ERROR_AUTOMATION_NODE_BEFORE_INVALID = ( - "ERROR_AUTOMATION_NODE_BEFORE_INVALID", +ERROR_AUTOMATION_UNEXPECTED_ERROR = ( + "ERROR_AUTOMATION_UNEXPECTED_ERROR", + HTTP_400_BAD_REQUEST, + "{e}", +) + +ERROR_AUTOMATION_NODE_REFERENCE_NODE_INVALID = ( + "ERROR_AUTOMATION_NODE_REFERENCE_NODE_INVALID", HTTP_400_BAD_REQUEST, "{e}", ) @@ -24,11 +30,25 @@ "{e}", ) -ERROR_AUTOMATION_TRIGGER_NODE_MODIFICATION_DISALLOWED = ( - "ERROR_AUTOMATION_TRIGGER_NODE_MODIFICATION_DISALLOWED", + +ERROR_AUTOMATION_TRIGGER_MUST_BE_FIRST_NODE = ( + "ERROR_AUTOMATION_TRIGGER_MUST_BE_FIRST_NODE", + HTTP_400_BAD_REQUEST, + "This operation is disallowed because a trigger must be the first node of " + "the workflow", +) + +ERROR_AUTOMATION_FIRST_NODE_MUST_BE_TRIGGER = ( + "ERROR_AUTOMATION_FIRST_NODE_MUST_BE_TRIGGER", + HTTP_400_BAD_REQUEST, + "This operation is disallowed because an action can't be the first node of " + "the workflow", +) + +ERROR_AUTOMATION_TRIGGER_ALREADY_EXISTS = ( + "ERROR_AUTOMATION_TRIGGER_ALREADY_EXISTS", HTTP_400_BAD_REQUEST, - "Triggers can not be created, deleted or duplicated, " - "they can only be replaced with a different type.", + "This workflow already has a trigger", ) ERROR_AUTOMATION_NODE_NOT_DELETABLE = ( diff --git a/backend/src/baserow/contrib/automation/api/nodes/serializers.py b/backend/src/baserow/contrib/automation/api/nodes/serializers.py index 6ee7616f32..7a31af2b4b 100644 --- a/backend/src/baserow/contrib/automation/api/nodes/serializers.py +++ b/backend/src/baserow/contrib/automation/api/nodes/serializers.py @@ -10,6 +10,7 @@ ) from baserow.contrib.automation.nodes.models import AutomationNode from baserow.contrib.automation.nodes.registries import automation_node_type_registry +from baserow.contrib.automation.nodes.types import NodePosition class AutomationNodeSerializer(serializers.ModelSerializer): @@ -19,42 +20,25 @@ class AutomationNodeSerializer(serializers.ModelSerializer): service = PolymorphicServiceSerializer( help_text="The service associated with this automation node." ) - simulate_until_node = serializers.SerializerMethodField( - help_text="Whether to simulate the dispatching of the node." - ) @extend_schema_field(OpenApiTypes.STR) def get_type(self, instance): return automation_node_type_registry.get_by_model(instance.specific_class).type - @extend_schema_field(OpenApiTypes.BOOL) - def get_simulate_until_node(self, instance): - if not instance.workflow.simulate_until_node: - return False - - return instance == instance.workflow.simulate_until_node.specific - class Meta: model = AutomationNode fields = ( "id", "label", - "order", "service", "workflow", "type", - "previous_node_id", - "previous_node_output", - "simulate_until_node", ) extra_kwargs = { "id": {"read_only": True}, "workflow_id": {"read_only": True}, "type": {"read_only": True}, - "previous_node_id": {"read_only": True}, - "order": {"read_only": True, "help_text": "Lowest first."}, - "simulate_until_node": {"read_only": True}, } @@ -64,15 +48,19 @@ class CreateAutomationNodeSerializer(serializers.ModelSerializer): required=True, help_text="The type of the automation node", ) - before_id = serializers.IntegerField( + reference_node_id = serializers.IntegerField( required=False, - help_text="If provided, creates the node before the node with the given id.", + allow_null=True, + help_text="If provided, creates the node relative to the node with the " + "given id.", ) - previous_node_id = serializers.IntegerField( + position = serializers.ChoiceField( + choices=NodePosition.choices, required=False, - help_text="If provided, creates the node after this given id.", + allow_blank=True, + help_text="The position of the new node relative to the reference node.", ) - previous_node_output = serializers.CharField( + output = serializers.CharField( required=False, allow_blank=True, help_text="The unique ID of the branch this node is an output for.", @@ -80,7 +68,13 @@ class CreateAutomationNodeSerializer(serializers.ModelSerializer): class Meta: model = AutomationNode - fields = ("id", "type", "before_id", "previous_node_id", "previous_node_output") + fields = ( + "id", + "type", + "reference_node_id", + "position", + "output", + ) class UpdateAutomationNodeSerializer(serializers.ModelSerializer): @@ -93,17 +87,9 @@ class Meta: fields = ( "label", "service", - "previous_node_output", ) -class OrderAutomationNodesSerializer(serializers.Serializer): - node_ids = serializers.ListField( - child=serializers.IntegerField(), - help_text=("The ids of the nodes in the order they are supposed to be set in."), - ) - - class ReplaceAutomationNodeSerializer(serializers.Serializer): new_type = serializers.ChoiceField( choices=lazy(automation_node_type_registry.get_types, list)(), @@ -113,12 +99,18 @@ class ReplaceAutomationNodeSerializer(serializers.Serializer): class MoveAutomationNodeSerializer(serializers.Serializer): - previous_node_id = serializers.IntegerField( + reference_node_id = serializers.IntegerField( required=False, - help_text="The ID of the node that should be before the moved node.", + help_text="The reference node.", + ) + position = serializers.ChoiceField( + choices=NodePosition.choices, + required=False, + allow_blank=True, + help_text="The new position relative to the reference node.", ) - previous_node_output = serializers.CharField( + output = serializers.CharField( required=False, allow_blank=True, - help_text="The output UID of the destination.", + help_text="The new output.", ) diff --git a/backend/src/baserow/contrib/automation/api/nodes/urls.py b/backend/src/baserow/contrib/automation/api/nodes/urls.py index 5e45d0d21f..9746649fc2 100644 --- a/backend/src/baserow/contrib/automation/api/nodes/urls.py +++ b/backend/src/baserow/contrib/automation/api/nodes/urls.py @@ -5,7 +5,6 @@ AutomationNodeView, DuplicateAutomationNodeView, MoveAutomationNodeView, - OrderAutomationNodesView, ReplaceAutomationNodeView, SimulateDispatchAutomationNodeView, ) @@ -23,11 +22,6 @@ AutomationNodeView.as_view(), name="item", ), - re_path( - r"workflow/(?P[0-9]+)/order/$", - OrderAutomationNodesView.as_view(), - name="order", - ), re_path( r"node/(?P[0-9]+)/duplicate/$", DuplicateAutomationNodeView.as_view(), diff --git a/backend/src/baserow/contrib/automation/api/nodes/views.py b/backend/src/baserow/contrib/automation/api/nodes/views.py index 6c710a01e8..2585503dd6 100644 --- a/backend/src/baserow/contrib/automation/api/nodes/views.py +++ b/backend/src/baserow/contrib/automation/api/nodes/views.py @@ -11,27 +11,33 @@ from baserow.api.decorators import ( map_exceptions, + require_request_data_type, validate_body, - validate_body_custom_fields, ) from baserow.api.schemas import CLIENT_SESSION_ID_SCHEMA_PARAMETER, get_error_schema -from baserow.api.utils import DiscriminatorCustomFieldsMappingSerializer +from baserow.api.utils import ( + DiscriminatorCustomFieldsMappingSerializer, + type_from_data_or_registry, + validate_data_custom_fields, +) from baserow.contrib.automation.api.nodes.errors import ( - ERROR_AUTOMATION_NODE_BEFORE_INVALID, + ERROR_AUTOMATION_FIRST_NODE_MUST_BE_TRIGGER, ERROR_AUTOMATION_NODE_DOES_NOT_EXIST, ERROR_AUTOMATION_NODE_MISCONFIGURED_SERVICE, ERROR_AUTOMATION_NODE_NOT_DELETABLE, ERROR_AUTOMATION_NODE_NOT_IN_WORKFLOW, ERROR_AUTOMATION_NODE_NOT_MOVABLE, ERROR_AUTOMATION_NODE_NOT_REPLACEABLE, + ERROR_AUTOMATION_NODE_REFERENCE_NODE_INVALID, ERROR_AUTOMATION_NODE_SIMULATE_DISPATCH, - ERROR_AUTOMATION_TRIGGER_NODE_MODIFICATION_DISALLOWED, + ERROR_AUTOMATION_TRIGGER_ALREADY_EXISTS, + ERROR_AUTOMATION_TRIGGER_MUST_BE_FIRST_NODE, + ERROR_AUTOMATION_UNEXPECTED_ERROR, ) from baserow.contrib.automation.api.nodes.serializers import ( AutomationNodeSerializer, CreateAutomationNodeSerializer, MoveAutomationNodeSerializer, - OrderAutomationNodesSerializer, ReplaceAutomationNodeSerializer, UpdateAutomationNodeSerializer, ) @@ -43,21 +49,24 @@ DeleteAutomationNodeActionType, DuplicateAutomationNodeActionType, MoveAutomationNodeActionType, - OrderAutomationNodesActionType, ReplaceAutomationNodeActionType, UpdateAutomationNodeActionType, ) from baserow.contrib.automation.nodes.exceptions import ( - AutomationNodeBeforeInvalid, AutomationNodeDoesNotExist, + AutomationNodeError, + AutomationNodeFirstNodeMustBeTrigger, AutomationNodeMisconfiguredService, AutomationNodeNotDeletable, AutomationNodeNotInWorkflow, AutomationNodeNotMovable, AutomationNodeNotReplaceable, + AutomationNodeReferenceNodeInvalid, AutomationNodeSimulateDispatchError, - AutomationTriggerModificationDisallowed, + AutomationNodeTriggerAlreadyExists, + AutomationNodeTriggerMustBeFirstNode, ) +from baserow.contrib.automation.nodes.handler import AutomationNodeHandler from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.automation.nodes.service import AutomationNodeService from baserow.contrib.automation.workflows.exceptions import ( @@ -111,9 +120,12 @@ def get_permissions(self): @map_exceptions( { AutomationWorkflowDoesNotExist: ERROR_AUTOMATION_WORKFLOW_DOES_NOT_EXIST, - AutomationNodeBeforeInvalid: ERROR_AUTOMATION_NODE_BEFORE_INVALID, + AutomationNodeReferenceNodeInvalid: ERROR_AUTOMATION_NODE_REFERENCE_NODE_INVALID, AutomationNodeDoesNotExist: ERROR_AUTOMATION_NODE_DOES_NOT_EXIST, - AutomationTriggerModificationDisallowed: ERROR_AUTOMATION_TRIGGER_NODE_MODIFICATION_DISALLOWED, + AutomationNodeTriggerAlreadyExists: ERROR_AUTOMATION_TRIGGER_ALREADY_EXISTS, + AutomationNodeFirstNodeMustBeTrigger: ERROR_AUTOMATION_FIRST_NODE_MUST_BE_TRIGGER, + AutomationNodeTriggerMustBeFirstNode: ERROR_AUTOMATION_TRIGGER_MUST_BE_FIRST_NODE, + AutomationNodeError: ERROR_AUTOMATION_UNEXPECTED_ERROR, } ) @validate_body(CreateAutomationNodeSerializer) @@ -219,12 +231,22 @@ class AutomationNodeView(APIView): AutomationNodeMisconfiguredService: ERROR_AUTOMATION_NODE_MISCONFIGURED_SERVICE, } ) - @validate_body_custom_fields( - automation_node_type_registry, - base_serializer_class=UpdateAutomationNodeSerializer, - partial=True, - ) - def patch(self, request, data: Dict, node_id: int): + @require_request_data_type(dict) + def patch(self, request, node_id: int): + node = AutomationNodeHandler().get_node(node_id) + node_type = type_from_data_or_registry( + request.data, automation_node_type_registry, node + ) + + data = validate_data_custom_fields( + node_type.type, + automation_node_type_registry, + request.data, + base_serializer_class=UpdateAutomationNodeSerializer, + partial=True, + return_validated=True, + ) + node = UpdateAutomationNodeActionType.do(request.user, node_id, data) serializer = automation_node_type_registry.get_serializer( @@ -257,59 +279,17 @@ def patch(self, request, data: Dict, node_id: int): { AutomationNodeDoesNotExist: ERROR_AUTOMATION_NODE_DOES_NOT_EXIST, AutomationNodeNotDeletable: ERROR_AUTOMATION_NODE_NOT_DELETABLE, + AutomationNodeFirstNodeMustBeTrigger: ERROR_AUTOMATION_TRIGGER_MUST_BE_FIRST_NODE, + AutomationNodeError: ERROR_AUTOMATION_UNEXPECTED_ERROR, } ) @transaction.atomic def delete(self, request, node_id: int): node = AutomationNodeService().get_node(request.user, node_id) - node.get_type().before_delete(node) - DeleteAutomationNodeActionType.do(request.user, node_id) - - return Response(status=204) + node.get_type().before_delete(node) -class OrderAutomationNodesView(APIView): - @extend_schema( - parameters=[ - OpenApiParameter( - name="workflow_id", - location=OpenApiParameter.PATH, - type=OpenApiTypes.INT, - description="The workflow that the nodes belong to.", - ), - CLIENT_SESSION_ID_SCHEMA_PARAMETER, - ], - tags=[AUTOMATION_NODES_TAG], - operation_id="order_automation_nodes", - description="Apply a new order to the nodes of a workflow.", - request=OrderAutomationNodesSerializer, - responses={ - 204: None, - 400: get_error_schema( - [ - "ERROR_REQUEST_BODY_VALIDATION", - "ERROR_AUTOMATION_NODE_NOT_IN_WORKFLOW", - ] - ), - 404: get_error_schema( - [ - "ERROR_AUTOMATION_NODE_DOES_NOT_EXIST", - "ERROR_AUTOMATION_WORKFLOW_DOES_NOT_EXIST", - ] - ), - }, - ) - @transaction.atomic - @map_exceptions( - { - AutomationWorkflowDoesNotExist: ERROR_AUTOMATION_WORKFLOW_DOES_NOT_EXIST, - AutomationNodeDoesNotExist: ERROR_AUTOMATION_NODE_DOES_NOT_EXIST, - AutomationNodeNotInWorkflow: ERROR_AUTOMATION_NODE_NOT_IN_WORKFLOW, - } - ) - @validate_body(OrderAutomationNodesSerializer) - def post(self, request, data: Dict, workflow_id: int): - OrderAutomationNodesActionType.do(request.user, workflow_id, data["node_ids"]) + DeleteAutomationNodeActionType.do(request.user, node_id) return Response(status=204) @@ -345,7 +325,10 @@ class DuplicateAutomationNodeView(APIView): @map_exceptions( { AutomationNodeDoesNotExist: ERROR_AUTOMATION_NODE_DOES_NOT_EXIST, - AutomationTriggerModificationDisallowed: ERROR_AUTOMATION_TRIGGER_NODE_MODIFICATION_DISALLOWED, + AutomationNodeTriggerMustBeFirstNode: ERROR_AUTOMATION_TRIGGER_MUST_BE_FIRST_NODE, + AutomationNodeTriggerAlreadyExists: ERROR_AUTOMATION_TRIGGER_ALREADY_EXISTS, + AutomationNodeFirstNodeMustBeTrigger: ERROR_AUTOMATION_FIRST_NODE_MUST_BE_TRIGGER, + AutomationNodeError: ERROR_AUTOMATION_UNEXPECTED_ERROR, } ) def post(self, request, node_id: int): @@ -389,6 +372,8 @@ class ReplaceAutomationNodeView(APIView): { AutomationNodeDoesNotExist: ERROR_AUTOMATION_NODE_DOES_NOT_EXIST, AutomationNodeNotReplaceable: ERROR_AUTOMATION_NODE_NOT_REPLACEABLE, + AutomationNodeNotInWorkflow: ERROR_AUTOMATION_NODE_NOT_IN_WORKFLOW, + AutomationNodeError: ERROR_AUTOMATION_UNEXPECTED_ERROR, } ) @validate_body(ReplaceAutomationNodeSerializer) @@ -396,6 +381,7 @@ def post(self, request, data: Dict, node_id: int): replaced_node = ReplaceAutomationNodeActionType.do( request.user, node_id, data["new_type"] ) + return Response( automation_node_type_registry.get_serializer( replaced_node, AutomationNodeSerializer @@ -471,18 +457,14 @@ class MoveAutomationNodeView(APIView): { AutomationNodeDoesNotExist: ERROR_AUTOMATION_NODE_DOES_NOT_EXIST, AutomationNodeNotMovable: ERROR_AUTOMATION_NODE_NOT_MOVABLE, + AutomationNodeNotInWorkflow: ERROR_AUTOMATION_NODE_NOT_IN_WORKFLOW, + AutomationNodeFirstNodeMustBeTrigger: ERROR_AUTOMATION_FIRST_NODE_MUST_BE_TRIGGER, + AutomationNodeTriggerMustBeFirstNode: ERROR_AUTOMATION_TRIGGER_MUST_BE_FIRST_NODE, + AutomationNodeReferenceNodeInvalid: ERROR_AUTOMATION_NODE_REFERENCE_NODE_INVALID, + AutomationNodeError: ERROR_AUTOMATION_UNEXPECTED_ERROR, } ) @validate_body(MoveAutomationNodeSerializer) def post(self, request, data: Dict, node_id: int): - moved_node = MoveAutomationNodeActionType.do( - request.user, - node_id, - data["previous_node_id"], - data["previous_node_output"], - ) - return Response( - automation_node_type_registry.get_serializer( - moved_node, AutomationNodeSerializer - ).data - ) + MoveAutomationNodeActionType.do(request.user, node_id, **data) + return Response(status=HTTP_202_ACCEPTED) diff --git a/backend/src/baserow/contrib/automation/api/workflows/serializers.py b/backend/src/baserow/contrib/automation/api/workflows/serializers.py index f0cb9ba0f6..3c38b4271d 100644 --- a/backend/src/baserow/contrib/automation/api/workflows/serializers.py +++ b/backend/src/baserow/contrib/automation/api/workflows/serializers.py @@ -28,6 +28,7 @@ class Meta: "simulate_until_node_id", "published_on", "state", + "graph", ) extra_kwargs = { "id": {"read_only": True}, diff --git a/backend/src/baserow/contrib/automation/apps.py b/backend/src/baserow/contrib/automation/apps.py index 3bfe4b090b..75ac9d9eba 100644 --- a/backend/src/baserow/contrib/automation/apps.py +++ b/backend/src/baserow/contrib/automation/apps.py @@ -16,13 +16,13 @@ def ready(self): DeleteAutomationNodeActionType, DuplicateAutomationNodeActionType, MoveAutomationNodeActionType, - OrderAutomationNodesActionType, ReplaceAutomationNodeActionType, UpdateAutomationNodeActionType, ) from baserow.contrib.automation.nodes.node_types import ( CoreHttpRequestNodeType, CoreHTTPTriggerNodeType, + CoreIteratorNodeType, CorePeriodicTriggerNodeType, CoreRouterActionNodeType, CoreSMTPEmailNodeType, @@ -88,9 +88,6 @@ def ready(self): from baserow.contrib.automation.workflows.trash_types import ( AutomationWorkflowTrashableItemType, ) - from baserow.contrib.integrations.core.service_types import ( - CorePeriodicServiceType, - ) from baserow.core.action.registries import ( action_scope_registry, action_type_registry, @@ -101,7 +98,6 @@ def ready(self): object_scope_type_registry, operation_type_registry, ) - from baserow.core.services.registries import service_type_registry from baserow.core.trash.registries import trash_item_type_registry if feature_flag_is_enabled(FF_AUTOMATION): @@ -144,15 +140,12 @@ def ready(self): action_type_registry.register(CreateAutomationNodeActionType()) action_type_registry.register(UpdateAutomationNodeActionType()) action_type_registry.register(DeleteAutomationNodeActionType()) - action_type_registry.register(OrderAutomationNodesActionType()) action_type_registry.register(DuplicateAutomationNodeActionType()) action_type_registry.register(ReplaceAutomationNodeActionType()) action_type_registry.register(MoveAutomationNodeActionType()) action_scope_registry.register(WorkflowActionScopeType()) - service_type_registry.register(CorePeriodicServiceType()) - automation_node_type_registry.register(LocalBaserowCreateRowNodeType()) automation_node_type_registry.register(LocalBaserowUpdateRowNodeType()) automation_node_type_registry.register(LocalBaserowDeleteRowNodeType()) @@ -160,6 +153,7 @@ def ready(self): automation_node_type_registry.register(LocalBaserowListRowsNodeType()) automation_node_type_registry.register(LocalBaserowAggregateRowsNodeType()) automation_node_type_registry.register(CoreHttpRequestNodeType()) + automation_node_type_registry.register(CoreIteratorNodeType()) automation_node_type_registry.register(CoreSMTPEmailNodeType()) automation_node_type_registry.register(CoreRouterActionNodeType()) automation_node_type_registry.register( @@ -181,6 +175,7 @@ def ready(self): ) from baserow.contrib.automation.data_providers.data_provider_types import ( + CurrentIterationDataProviderType, PreviousNodeProviderType, ) from baserow.contrib.automation.data_providers.registries import ( @@ -188,6 +183,9 @@ def ready(self): ) automation_data_provider_type_registry.register(PreviousNodeProviderType()) + automation_data_provider_type_registry.register( + CurrentIterationDataProviderType() + ) from baserow.contrib.automation.nodes.permission_manager import ( AutomationNodePermissionManager, diff --git a/backend/src/baserow/contrib/automation/automation_dispatch_context.py b/backend/src/baserow/contrib/automation/automation_dispatch_context.py index 4267163bdd..67bbc7b8f2 100644 --- a/backend/src/baserow/contrib/automation/automation_dispatch_context.py +++ b/backend/src/baserow/contrib/automation/automation_dispatch_context.py @@ -36,12 +36,14 @@ def __init__( self.previous_nodes_results: Dict[int, Any] = {} self.dispatch_history: List[int] = [] self.simulate_until_node = simulate_until_node + self.current_iterations: Dict[int, int] = {} services = ( [self.simulate_until_node.service.specific] if self.simulate_until_node else None ) + force_outputs = ( simulate_until_node.get_previous_service_outputs() if simulate_until_node @@ -58,6 +60,7 @@ def __init__( def clone(self, **kwargs): new_context = super().clone(**kwargs) new_context.previous_nodes_results = {**self.previous_nodes_results} + new_context.current_iterations = {**self.current_iterations} new_context.dispatch_history = list(self.dispatch_history) return new_context @@ -80,6 +83,9 @@ def after_dispatch(self, node: AutomationNode, dispatch_result: DispatchResult): self.dispatch_history.append(node.id) self._register_node_result(node, dispatch_result.data) + def set_current_iteration(self, node, index): + self.current_iterations[node.id] = index + def range(self, service: Service): return [0, None] diff --git a/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py b/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py index d9a900da1b..a7bb1ed1fd 100644 --- a/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py +++ b/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py @@ -24,15 +24,28 @@ def get_data_chunk( self, dispatch_context: AutomationDispatchContext, path: List[str] ): previous_node_id, *rest = path - previous_node_results = dispatch_context.previous_nodes_results.get( - int(previous_node_id), SENTINEL - ) - if previous_node_results is SENTINEL: + + previous_node_id = int(previous_node_id) + + try: + previous_node = AutomationNodeHandler().get_node(previous_node_id) + except AutomationNodeDoesNotExist as exc: + message = "The previous node doesn't exist" + raise InvalidFormulaContext(message) from exc + + try: + previous_node_results = dispatch_context.previous_nodes_results[ + int(previous_node.id) + ] + if previous_node.service.get_type().returns_list: + previous_node_results = previous_node_results["results"] + except KeyError as exc: message = ( "The previous node id is not present in the dispatch context results" ) - raise InvalidFormulaContext(message) - return get_value_at_path(previous_node_results, rest) + raise InvalidFormulaContext(message) from exc + else: + return get_value_at_path(previous_node_results, rest) def import_path(self, path, id_mapping, **kwargs): """ @@ -57,3 +70,61 @@ def import_path(self, path, id_mapping, **kwargs): rest = service_type.import_context_path(rest, id_mapping) return [str(new_node_id), *rest] + + +class CurrentIterationDataProviderType(AutomationDataProviderType): + type = "current_iteration" + + def get_data_chunk( + self, dispatch_context: AutomationDispatchContext, path: List[str] + ): + parent_node_id, *rest = path + + parent_node_id = int(parent_node_id) + + try: + parent_node_results = dispatch_context.previous_nodes_results[ + parent_node_id + ] + except KeyError as exc: + message = ( + "The parent node id is not present in the dispatch context results" + ) + raise InvalidFormulaContext(message) from exc + + try: + current_iteration = dispatch_context.current_iterations[parent_node_id] + except KeyError as exc: + message = ( + "The current node iteration is not present in the dispatch context" + ) + raise InvalidFormulaContext(message) from exc + + current_item = parent_node_results[current_iteration] + data = {"index": current_iteration, "item": current_item} + + return get_value_at_path(data, rest) + + def import_path(self, path, id_mapping, **kwargs): + """ + Update the parent node ID of the path. + + :param path: the path part list. + :param id_mapping: The id_mapping of the process import. + :return: The updated path. + """ + + parent_node_id, *rest = path + + try: + new_node_id = id_mapping["automation_workflow_nodes"][int(parent_node_id)] + node = AutomationNodeHandler().get_node(new_node_id) + except (KeyError, AutomationNodeDoesNotExist): + # In the event the `previous_node_id` is not found in the `id_mapping`, + # or if the previous node does not exist, we return the malformed path. + return [str(parent_node_id), *rest] + else: + service_type = node.service.get_type() + rest = service_type.import_context_path(rest, id_mapping) + + return [str(new_node_id), *rest] diff --git a/backend/src/baserow/contrib/automation/migrations/0021_coreiteratoractionnode_alter_automationnode_options_and_more.py b/backend/src/baserow/contrib/automation/migrations/0021_coreiteratoractionnode_alter_automationnode_options_and_more.py new file mode 100644 index 0000000000..07831a8c44 --- /dev/null +++ b/backend/src/baserow/contrib/automation/migrations/0021_coreiteratoractionnode_alter_automationnode_options_and_more.py @@ -0,0 +1,111 @@ +# Generated by Django 5.0.14 on 2025-10-20 15:53 + +import django.db.models.deletion +from django.db import migrations, models + + +def find(list_, predicate): + return next((n for n in list_ if predicate(n)), None) + + +def add_node_to_graph(graph, nodes, current_node): + graph[str(current_node.id)] = {} + + next_nodes = [n for n in nodes if n.previous_node_id == current_node.id] + if next_nodes: + graph[str(current_node.id)]["next"] = { + n.previous_node_output: [n.id] for n in next_nodes + } + + for next_node in next_nodes: + add_node_to_graph(graph, nodes, next_node) + + +def forward(apps, schema_editor): + Workflow = apps.get_model("automation", "automationworkflow") + AutomationNode = apps.get_model("automation", "automationnode") + + all_nodes = list(AutomationNode.objects.filter(trashed=False)) + + for workflow in Workflow.objects.all(): + graph = {} + + nodes = [n for n in all_nodes if n.workflow_id == workflow.id] + trigger = find(nodes, lambda n: n.previous_node_id is None) + + if trigger: + graph["0"] = trigger.id + add_node_to_graph(graph, nodes, trigger) + + workflow.graph = graph + workflow.save(update_fields=["graph"]) + + +def reverse(apps, schema_editor): + Workflow = apps.get_model("automation", "automationworkflow") + AutomationNode = apps.get_model("automation", "automationnode") + + for workflow in Workflow.objects.all(): + graph = workflow.graph + for key, info in graph.items(): + if key == "0": + continue + for output, nodes in info.get("next", {}).items(): + AutomationNode.objects.filter(id__in=nodes).update( + previous_node_id=key, previous_node_output=output + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("automation", "0020_corehttptriggernode"), + ] + + operations = [ + migrations.CreateModel( + name="CoreIteratorActionNode", + fields=[ + ( + "automationnode_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="automation.automationnode", + ), + ), + ], + options={ + "abstract": False, + }, + bases=("automation.automationnode",), + ), + migrations.AlterModelOptions( + name="automationnode", + options={"ordering": ("id",)}, + ), + migrations.RemoveField( + model_name="automationnode", + name="order", + ), + migrations.RemoveField( + model_name="automationnode", + name="parent_node", + ), + migrations.AddField( + model_name="automationworkflow", + name="graph", + field=models.JSONField(default=dict, help_text="Contains the node graph."), + ), + migrations.RunPython(forward, reverse), + migrations.RemoveField( + model_name="automationnode", + name="previous_node", + ), + migrations.RemoveField( + model_name="automationnode", + name="previous_node_output", + ), + ] diff --git a/backend/src/baserow/contrib/automation/nodes/actions.py b/backend/src/baserow/contrib/automation/nodes/actions.py index a39a0366af..52092652df 100644 --- a/backend/src/baserow/contrib/automation/nodes/actions.py +++ b/backend/src/baserow/contrib/automation/nodes/actions.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Any, List, Optional +from typing import Any from django.contrib.auth.models import AbstractUser from django.utils.translation import gettext_lazy as _ @@ -8,19 +8,12 @@ NODE_ACTION_CONTEXT, WorkflowActionScopeType, ) -from baserow.contrib.automation.actions import AUTOMATION_WORKFLOW_CONTEXT -from baserow.contrib.automation.nodes.handler import AutomationNodeHandler from baserow.contrib.automation.nodes.models import AutomationActionNode, AutomationNode from baserow.contrib.automation.nodes.node_types import AutomationNodeType -from baserow.contrib.automation.nodes.registries import ( - ReplaceAutomationNodeTrashOperationType, -) from baserow.contrib.automation.nodes.service import AutomationNodeService -from baserow.contrib.automation.nodes.signals import automation_node_replaced from baserow.contrib.automation.nodes.trash_types import AutomationNodeTrashableItemType -from baserow.contrib.automation.nodes.types import NextAutomationNodeValues +from baserow.contrib.automation.nodes.types import NodePositionType from baserow.contrib.automation.workflows.models import AutomationWorkflow -from baserow.contrib.automation.workflows.service import AutomationWorkflowService from baserow.core.action.models import Action from baserow.core.action.registries import ActionTypeDescription, UndoableActionType from baserow.core.trash.handler import TrashHandler @@ -49,14 +42,7 @@ def do( workflow: AutomationWorkflow, data: dict, ) -> AutomationNode: - before_id = data.pop("before_id", None) - before = ( - AutomationNodeService().get_node(user, before_id) if before_id else None - ) - - node = AutomationNodeService().create_node( - user, node_type, workflow, before, **data - ) + node = AutomationNodeService().create_node(user, node_type, workflow, **data) cls.register_action( user=user, @@ -225,75 +211,6 @@ def redo( AutomationNodeService().delete_node(user, params.node_id) -class OrderAutomationNodesActionType(UndoableActionType): - type = "order_automation_nodes" - description = ActionTypeDescription( - _("Order nodes"), - _("Node order changed"), - AUTOMATION_WORKFLOW_CONTEXT, - ) - - @dataclass - class Params: - workflow_id: int - nodes_order: List[int] - original_nodes_order: List[int] - automation_name: str - automation_id: int - - @classmethod - def do(cls, user: AbstractUser, workflow_id: int, order: List[int]) -> None: - workflow = AutomationWorkflowService().get_workflow(user, workflow_id) - - original_nodes_order = AutomationNodeHandler().get_nodes_order(workflow) - params = cls.Params( - workflow_id, - order, - original_nodes_order, - workflow.automation.name, - workflow.automation.id, - ) - - AutomationNodeService().order_nodes(user, workflow, order=order) - - cls.register_action( - user=user, - params=params, - scope=cls.scope(workflow_id), - workspace=workflow.automation.workspace, - ) - - @classmethod - def scope(cls, workflow_id): - return WorkflowActionScopeType.value(workflow_id) - - @classmethod - def undo( - cls, - user: AbstractUser, - params: Params, - action_to_undo: Action, - ): - AutomationNodeService().order_nodes( - user, - AutomationWorkflowService().get_workflow(user, params.workflow_id), - order=params.original_nodes_order, - ) - - @classmethod - def redo( - cls, - user: AbstractUser, - params: Params, - action_to_redo: Action, - ): - AutomationNodeService().order_nodes( - user, - AutomationWorkflowService().get_workflow(user, params.workflow_id), - order=params.nodes_order, - ) - - class DuplicateAutomationNodeActionType(UndoableActionType): type = "duplicate_automation_node" description = ActionTypeDescription( @@ -309,9 +226,7 @@ class Params: workflow_id: int node_id: int # The source node id node_type: str # The source node type - source_node_next_nodes_values: List[NextAutomationNodeValues] duplicated_node_id: int - duplicated_node_next_nodes_values: List[NextAutomationNodeValues] @classmethod def do( @@ -320,7 +235,7 @@ def do( source_node_id: int, ) -> AutomationNode: source_node = AutomationNodeService().get_node(user, source_node_id) - duplication = AutomationNodeService().duplicate_node(user, source_node) + duplicated_node = AutomationNodeService().duplicate_node(user, source_node_id) workflow = source_node.workflow cls.register_action( user=user, @@ -330,14 +245,12 @@ def do( workflow.id, source_node_id, source_node.get_type().type, - duplication.source_node_next_nodes_values, - duplication.duplicated_node.id, - duplication.duplicated_node_next_nodes_values, + duplicated_node.id, ), scope=cls.scope(workflow.id), workspace=workflow.automation.workspace, ) - return duplication.duplicated_node + return duplicated_node @classmethod def scope(cls, workflow_id): @@ -353,11 +266,6 @@ def undo( # Trash the duplicated node. AutomationNodeService().delete_node(user, params.duplicated_node_id) - # Revert any next nodes to point back to the source node. - AutomationNodeHandler().update_next_nodes_values( - params.source_node_next_nodes_values - ) - @classmethod def redo( cls, @@ -372,11 +280,6 @@ def redo( params.duplicated_node_id, ) - # Revert any next nodes to point back to the duplicated node. - AutomationNodeHandler().update_next_nodes_values( - params.duplicated_node_next_nodes_values - ) - class ReplaceAutomationNodeActionType(UndoableActionType): type = "replace_automation_node" @@ -408,6 +311,7 @@ def do( ) -> AutomationNode: replacement = AutomationNodeService().replace_node(user, node_id, new_node_type) replaced_node = replacement.node + cls.register_action( user=user, params=cls.Params( @@ -441,19 +345,9 @@ def undo( AutomationNodeTrashableItemType.type, params.original_node_id, ) - # Trash the node of the new type, and pass its operation type so that its - # trash entry is flagged as managed to prevent users from restoring it. - deleted_node = AutomationNodeService().delete_node( - user, - params.node_id, - trash_operation_type=ReplaceAutomationNodeTrashOperationType.type, - ) - automation_node_replaced.send( - cls, - workflow=restored_node.workflow, - deleted_node=deleted_node, - restored_node=restored_node.specific, - user=user, + + AutomationNodeService().replace_node( + user, params.node_id, params.original_node_type, existing_node=restored_node ) @classmethod @@ -469,19 +363,9 @@ def redo( AutomationNodeTrashableItemType.type, params.node_id, ) - # Trash the node of the original type, and pass its operation type so that its - # trash entry is flagged as managed to prevent users from restoring it. - deleted_node = AutomationNodeService().delete_node( - user, - params.original_node_id, - trash_operation_type=ReplaceAutomationNodeTrashOperationType.type, - ) - automation_node_replaced.send( - cls, - workflow=restored_node.workflow, - restored_node=restored_node.specific, - deleted_node=deleted_node, - user=user, + + AutomationNodeService().replace_node( + user, params.original_node_id, params.node_type, existing_node=restored_node ) @@ -500,48 +384,50 @@ class Params: workflow_id: int node_id: int node_type: str - origin_previous_node_id: int - origin_previous_node_output: str - origin_new_next_nodes_values: List[NextAutomationNodeValues] - origin_old_next_nodes_values: List[NextAutomationNodeValues] - destination_previous_node_id: int - destination_previous_node_output: str - destination_new_next_nodes_values: List[NextAutomationNodeValues] - destination_old_next_nodes_values: List[NextAutomationNodeValues] + origin_reference_node_id: int + origin_position: NodePositionType + origin_output: str + destination_reference_node_id: int + destination_position: NodePositionType + destination_output: str @classmethod def do( cls, user: AbstractUser, node_id: int, - new_previous_node_id: int, - new_previous_node_output: Optional[str] = None, + reference_node_id: int | None, + position: NodePositionType, + output: str, ) -> AutomationActionNode: move = AutomationNodeService().move_node( - user, node_id, new_previous_node_id, new_previous_node_output + user, + node_id, + reference_node_id, + position, + output, ) - workflow = move.node.workflow + node = move.node + workflow = node.workflow cls.register_action( user=user, params=cls.Params( workflow.automation_id, workflow.automation.name, workflow.id, - move.node.id, - move.node.get_type().type, - move.origin_previous_node_id, - move.origin_previous_node_output, - move.origin_new_next_nodes_values, - move.origin_old_next_nodes_values, - move.destination_previous_node_id, - move.destination_previous_node_output, - move.destination_new_next_nodes_values, - move.destination_old_next_nodes_values, + node.id, + node.get_type().type, + move.previous_reference_node.id, + move.previous_position, + move.previous_output, + reference_node_id, + position, + output, ), scope=cls.scope(workflow.id), workspace=workflow.automation.workspace, ) - return move.node + return node @classmethod def scope(cls, workflow_id): @@ -554,25 +440,12 @@ def undo( params: Params, action_to_undo: Action, ): - # Revert the node to its original position & output (if applicable). - AutomationNodeService().update_node( + AutomationNodeService().move_node( user, params.node_id, - previous_node_id=params.origin_previous_node_id, - previous_node_output=params.origin_previous_node_output, - ) - - # Pluck out the workflow, we need it to send our signals for next nodes. - workflow = AutomationWorkflowService().get_workflow(user, params.workflow_id) - - # Revert the origin's next nodes back to their original position. - AutomationNodeService().update_next_nodes_values( - user, params.origin_old_next_nodes_values, workflow - ) - - # Revert the destination's next nodes back to their original position. - AutomationNodeService().update_next_nodes_values( - user, params.destination_old_next_nodes_values, workflow + params.origin_reference_node_id, + params.origin_position, + params.origin_output, ) @classmethod @@ -582,23 +455,10 @@ def redo( params: Params, action_to_redo: Action, ): - # Set the node to its new position & output (if applicable). - AutomationNodeService().update_node( + AutomationNodeService().move_node( user, params.node_id, - previous_node_id=params.destination_previous_node_id, - previous_node_output=params.destination_previous_node_output, - ) - - # Pluck out the workflow, we need it to send our signals for next nodes. - workflow = AutomationWorkflowService().get_workflow(user, params.workflow_id) - - # Set the origin's next nodes to their new position. - AutomationNodeService().update_next_nodes_values( - user, params.origin_new_next_nodes_values, workflow - ) - - # Set the destination's next nodes to their new position. - AutomationNodeService().update_next_nodes_values( - user, params.destination_new_next_nodes_values, workflow + params.destination_reference_node_id, + params.destination_position, + params.destination_output, ) diff --git a/backend/src/baserow/contrib/automation/nodes/exceptions.py b/backend/src/baserow/contrib/automation/nodes/exceptions.py index 5079885063..d65da4a254 100644 --- a/backend/src/baserow/contrib/automation/nodes/exceptions.py +++ b/backend/src/baserow/contrib/automation/nodes/exceptions.py @@ -29,26 +29,34 @@ def __init__(self, node_id=None, *args, **kwargs): ) -class AutomationNodeBeforeInvalid(Exception): +class AutomationNodeNotFoundInGraph(AutomationNodeError): """ - Raised when trying to create an automation node `before` another, but it is invalid. - This can happen if the `before` is a trigger, or if `before.workflow` belongs to a - different workflow to the one supplied. + Raised when we try to access a node that doesn't exist in the graph. """ -class AutomationNodeMisconfiguredService(AutomationNodeError): - """When the node's service is misconfigured.""" - - -class AutomationTriggerModificationDisallowed(AutomationNodeError): +class AutomationNodeReferenceNodeInvalid(AutomationNodeError): """ - Raised when trying to create, delete or duplicate a trigger node. There can only - be one trigger node per workflow, and it is created automatically when the workflow - is created. Users can only change the trigger node type, not create a new one. + Raised when trying to use an invalid reference node. """ +class AutomationNodeTriggerAlreadyExists(AutomationNodeError): + """When we try to create a trigger node when it already exists""" + + +class AutomationNodeFirstNodeMustBeTrigger(AutomationNodeError): + """When we try to create a non trigger node as first node of the graph""" + + +class AutomationNodeTriggerMustBeFirstNode(AutomationNodeError): + """When we try to create a trigger node as non first node of the graph""" + + +class AutomationNodeMisconfiguredService(AutomationNodeError): + """When the node's service is misconfigured.""" + + class AutomationNodeNotDeletable(AutomationNodeError): """ Raised when an automation node is not deletable. This can happen if @@ -73,3 +81,9 @@ class AutomationNodeNotMovable(AutomationNodeError): Raised when an automation node is not movable. This can happen if the node's type dictates that it cannot be moved due to its state. """ + + +class AutomationNodeMissingOutput(AutomationNodeError): + """ + Raised when the target output is missing in the reference node. + """ diff --git a/backend/src/baserow/contrib/automation/nodes/handler.py b/backend/src/baserow/contrib/automation/nodes/handler.py index 76cec124a1..bf0dd864e2 100644 --- a/backend/src/baserow/contrib/automation/nodes/handler.py +++ b/backend/src/baserow/contrib/automation/nodes/handler.py @@ -11,23 +11,16 @@ from baserow.contrib.automation.nodes.exceptions import ( AutomationNodeDoesNotExist, AutomationNodeMisconfiguredService, - AutomationNodeNotInWorkflow, ) -from baserow.contrib.automation.nodes.models import AutomationActionNode, AutomationNode +from baserow.contrib.automation.nodes.models import AutomationNode from baserow.contrib.automation.nodes.node_types import ( AutomationNodeActionNodeType, AutomationNodeType, ) from baserow.contrib.automation.nodes.registries import automation_node_type_registry -from baserow.contrib.automation.nodes.types import ( - AutomationNodeDict, - AutomationNodeDuplication, - AutomationNodeMove, - NextAutomationNodeValues, -) +from baserow.contrib.automation.nodes.types import AutomationNodeDict from baserow.core.cache import local_cache from baserow.core.db import specific_iterator -from baserow.core.exceptions import IdDoesNotExist from baserow.core.registries import ImportExportConfig from baserow.core.services.exceptions import ( ServiceImproperlyConfiguredDispatchException, @@ -41,7 +34,17 @@ class AutomationNodeHandler: - allowed_fields = ["label", "service", "previous_node_id", "previous_node_output"] + allowed_fields = [ + "label", + "service", + ] + allowed_update_fields = [ + "label", + "service", + ] + + def _get_node_cache_key(self, workflow, specific): + return f"wa_get_{workflow.id}_nodes_{specific}" def get_nodes( self, @@ -88,37 +91,30 @@ def _get_nodes(base_queryset=base_queryset): if with_cache and not base_queryset: return local_cache.get( - f"wa_get_{workflow.id}_nodes_{specific}", + self._get_node_cache_key(workflow, specific), _get_nodes, ) return _get_nodes() - def get_next_nodes( - self, - workflow, - node: None | AutomationNode, - output_uid: str | None = None, - specific: bool = False, - ) -> Iterable["AutomationNode"]: + def invalidate_node_cache(self, workflow): """ - Returns all nodes which follow the given node in the workflow. A list of nodes - is returned as there can be multiple nodes that follow this one, for example - when there are multiple branches in the workflow. - - :param workflow: filter nodes for this workflow. - :param node: this is the previous not. If null, first nodes are returned. - :param output_uid: filter nodes only for this output uid. - :param specific: If True, returns the specific node type. + Invalidates the node cache. To be used when we add or remove a node from the + graph. + + :param workflow: The target workflow cache. """ - queryset = AutomationNode.objects.filter( - previous_node_id=node.id if node else None - ) + local_cache.delete(self._get_node_cache_key(workflow, True)) + local_cache.delete(self._get_node_cache_key(workflow, False)) + + def get_children(self, node, specific=True): + """ + Returns the direct children of the given node. - if output_uid is not None: - queryset = queryset.filter(previous_node_output=output_uid) + :param specific: Whether to return specific node instances. + """ - return self.get_nodes(workflow, base_queryset=queryset, specific=specific) + return node.workflow.get_graph().get_children(node) def get_node( self, node_id: int, base_queryset: Optional[QuerySet] = None @@ -145,65 +141,10 @@ def get_node( except AutomationNode.DoesNotExist: raise AutomationNodeDoesNotExist(node_id) - def update_previous_node( - self, - new_previous_node: AutomationNode, - nodes: List[AutomationNode], - previous_node_output: Optional[str] = None, - ) -> List[AutomationActionNode]: - """ - Relink all nodes to the given new previous node and ensure that we set the - previous node output correctly. - - :param new_previous_node: The new previous node. - :param nodes: The nodes to relink. - :param previous_node_output: The output of the previous node, if any. - """ - - update_kwargs = {"previous_node": new_previous_node} - if previous_node_output is not None: - update_kwargs["previous_node_output"] = previous_node_output - - updates = [] - for node in nodes: - for key, value in update_kwargs.items(): - setattr(node, key, value) - updates.append(node) - AutomationNode.objects.bulk_update(updates, update_kwargs.keys()) - - return updates - - def update_next_nodes_values( - self, - next_node_values: List[NextAutomationNodeValues], - ) -> List[AutomationActionNode]: - """ - Update the next nodes values for a list of nodes. - - :param next_node_values: The new next node values. - :return: The updated nodes. - """ - - next_node_updates = [] - next_nodes = AutomationNode.objects.filter( - pk__in=[next_node_value["id"] for next_node_value in next_node_values] - ) - next_nodes_grouped = {node.id: node for node in next_nodes} - for next_node_value in next_node_values: - next_node = next_nodes_grouped.get(next_node_value["id"]) - next_node.previous_node_id = next_node_value["previous_node_id"] - next_node.previous_node_output = next_node_value["previous_node_output"] - next_node_updates.append(next_node) - AutomationNode.objects.bulk_update( - next_node_updates, ["previous_node_id", "previous_node_output"] - ) - return next_node_updates - def create_node( self, node_type: AutomationNodeType, workflow: AutomationWorkflow, - before: Optional[AutomationNode] = None, **kwargs, ) -> AutomationNode: """ @@ -211,8 +152,6 @@ def create_node( :param node_type: The automation node's type. :param workflow: The workflow the automation node is associated with. - :param before: If provided and no order is provided, will place the new node - before the given node. :return: The newly created automation node instance. """ @@ -220,65 +159,11 @@ def create_node( kwargs, self.allowed_fields + node_type.allowed_fields ) - # Are we creating a node as a child of another node? - parent_node_id = allowed_prepared_values.get("parent_node_id", None) - - node_previous_ids_to_update = [] - - # Are we creating a node before another? If we are, the - # `previous_node_id` and `previous_node_output` fields - # need to be adjusted. - if before: - # We're creating a node before another, and it has an - # output, so we need to re-use it for this new node. - if before.previous_node_output: - allowed_prepared_values[ - "previous_node_output" - ] = before.previous_node_output - - # Find the nodes that are using `before` as their previous node. - # If `before` has a `previous_node_id`, then we get `before.previous_node`'s - # next nodes. If there's no `previous_node_id`, then `before` is a trigger, - # so we want the nodes that come after this trigger. - node_previous_ids_to_update = list( - workflow.automation_workflow_nodes.filter( - previous_node_id=before.id - if before.previous_node_id is None - else before.previous_node_id, - previous_node_output=before.previous_node_output, - ) - ) - - # If we don't already have a `previous_node_id`... - if "previous_node_id" not in allowed_prepared_values: - # Figure out what the previous node ID should be. If we've been given a - # `before` node, then we'll use its previous node ID. If not, we'll use the - # last node ID of the workflow, which is the last node in the hierarchy. - allowed_prepared_values["previous_node_id"] = ( - before.previous_node_id - if before - else AutomationWorkflow.get_last_node_id(workflow, parent_node_id) - ) - - order = kwargs.pop("order", None) - if before: - order = AutomationNode.get_unique_order_before_node(before, parent_node_id) - elif not order: - order = AutomationNode.get_last_order(workflow) - - allowed_prepared_values["workflow"] = workflow - node = node_type.model_class(order=order, **allowed_prepared_values) - node.save() - - # If we have `previous_node_id` to update, we need to adjust them. - if node_previous_ids_to_update: - self.update_previous_node(node, node_previous_ids_to_update) + node = node_type.model_class.objects.create( + workflow=workflow, **allowed_prepared_values + ) - # If we have a `before` node, and it had an output, then - # we need to clear it as `node` has now claimed it as its output. - if before and before.previous_node_output: - before.previous_node_output = "" - before.save(update_fields=["previous_node_output"]) + self.invalidate_node_cache(workflow) return node @@ -292,7 +177,8 @@ def update_node(self, node: AutomationNode, **kwargs) -> AutomationNode: :return: The updated AutomationNode. """ - allowed_values = extract_allowed(kwargs, self.allowed_fields) + allowed_values = extract_allowed(kwargs, self.allowed_update_fields) + for key, value in allowed_values.items(): setattr(node, key, value) @@ -300,78 +186,18 @@ def update_node(self, node: AutomationNode, **kwargs) -> AutomationNode: return node - def get_nodes_order(self, workflow: AutomationWorkflow) -> List[int]: - """ - Returns the nodes in the workflow ordered by the order field. - - :param workflow: The workflow that the nodes belong to. - :return: A list containing the order of the nodes in the workflow. - """ - - return [ - node.id for node in workflow.automation_workflow_nodes.order_by("order") - ] - - def order_nodes( - self, - workflow: AutomationWorkflow, - order: List[int], - base_qs=None, - ) -> List[int]: - """ - Assigns a new order to the nodes in a workflow. - - A base_qs can be provided to pre-filter the nodes affected by this change. - - :param workflow: The workflow that the nodes belong to. - :param order: The new order of the nodes. - :param base_qs: A QS that can have filters already applied. - :raises AutomationNodeNotInWorkflow: If the node is not part of the - provided workflow. - :return: The new order of the nodes. - """ - - if base_qs is None: - base_qs = AutomationNode.objects.filter(workflow=workflow) - - try: - full_order = AutomationNode.order_objects(base_qs, order) - except IdDoesNotExist as error: - raise AutomationNodeNotInWorkflow(error.not_existing_id) - - return full_order - - def duplicate_node(self, source_node: AutomationNode) -> AutomationNodeDuplication: + def duplicate_node(self, source_node: AutomationNode) -> AutomationNode: """ Duplicates an existing AutomationNode instance. :param source_node: The AutomationNode that is being duplicated. :raises ValueError: When the provided node is not an instance of AutomationNode. - :return: The `AutomationNodeDuplication` dataclass containing the source - node, its next nodes values and the duplicated node. + :return: The duplicated node. """ exported_node = self.export_node(source_node) - # Does `node` have any next nodes with no output? If so, we need to ensure - # their `previous_node_id` are updated to the new duplicated node. - source_node_next_nodes = list(source_node.get_next_nodes(output_uid="")) - source_node_next_nodes_values = [ - NextAutomationNodeValues( - id=nn.id, - previous_node_id=nn.previous_node_id, - previous_node_output=nn.previous_node_output, - ) - for nn in source_node_next_nodes - ] - - exported_node["order"] = AutomationNode.get_last_order(source_node.workflow) - # The duplicated node can't have the same output as the source node. - exported_node["previous_node_output"] = "" - # The duplicated node will follow `node`. - exported_node["previous_node_id"] = source_node.id - id_mapping = defaultdict(lambda: MirrorDict()) id_mapping["automation_workflow_nodes"] = MirrorDict() @@ -389,134 +215,9 @@ def duplicate_node(self, source_node: AutomationNode) -> AutomationNodeDuplicati import_export_config=import_export_config, ) - # Update the nodes that follow the original node to now follow the new clone. - self.update_previous_node(duplicated_node, source_node_next_nodes) - - # Get the next nodes without outputs of the duplicated node. - duplicated_node_next_nodes = list(duplicated_node.get_next_nodes(output_uid="")) - duplicated_node_next_nodes_values = [ - NextAutomationNodeValues( - id=nn.id, - previous_node_id=nn.previous_node_id, - previous_node_output=nn.previous_node_output, - ) - for nn in duplicated_node_next_nodes - ] - - return AutomationNodeDuplication( - source_node=source_node, - source_node_next_nodes_values=source_node_next_nodes_values, - duplicated_node=duplicated_node, - duplicated_node_next_nodes_values=duplicated_node_next_nodes_values, - ) + self.invalidate_node_cache(duplicated_node.workflow) - def move_node( - self, - node: AutomationActionNode, - after_node: AutomationNode, - previous_node_output: Optional[str] = None, - order: Optional[float] = None, - ) -> AutomationNodeMove: - """ - Moves an action node to be after another node in the same workflow. - - :param node: The action node to move. - :param after_node: The node to move the action node after. - :param previous_node_output: If the destination is an output, the output uid. - :param order: The new order of the node. If not provided, it will be calculated - to be last of `after_node`. - :return: The `AutomationNodeMove` dataclass containing the moved node, - its original previous node values and its new previous node values. - """ - - # Does `node`, in its current position, have any next nodes? If so, - # we need to ensure their `previous_node_id` are updated to the new - # previous node of `node`. - origin_next_nodes = list(node.get_next_nodes()) - origin_old_next_nodes_values = [ - NextAutomationNodeValues( - id=nn.id, - previous_node_id=nn.previous_node_id, - previous_node_output=nn.previous_node_output, - ) - for nn in origin_next_nodes - ] - - # Keep a list of "next nodes" at the origin and destination which - # we've updated. The node service will use this list to send a bulk - # 'automation nodes updated' signal. - next_node_updates: List[AutomationActionNode] = [] - - # Update the nodes that followed `node` to now follow `node`'s previous node. - # i.e. they all move "up" one step in the workflow. - updated_origin_next_nodes = self.update_previous_node( - node.previous_node, origin_next_nodes, node.previous_node_output - ) - next_node_updates.extend(updated_origin_next_nodes) - - origin_new_next_nodes_values = [ - NextAutomationNodeValues( - id=nn.id, - previous_node_id=nn.previous_node_id, - previous_node_output=nn.previous_node_output, - ) - for nn in updated_origin_next_nodes - ] - - # Does `after_node`, the node that `node` is being moved after, - # have any next nodes? If so, we need to ensure their `previous_node_id` - # are updated to `node`. - destination_next_nodes = list(after_node.get_next_nodes(previous_node_output)) - destination_old_next_nodes_values = [ - NextAutomationNodeValues( - id=nn.id, - previous_node_id=nn.previous_node_id, - previous_node_output=nn.previous_node_output, - ) - for nn in destination_next_nodes - ] - - # Store the original `previous_node_{id,output}` so we can revert. - origin_previous_node_id = node.previous_node_id - origin_previous_node_output = node.previous_node_output - - # Set the new position. - node.previous_node_id = after_node.id - node.previous_node_output = previous_node_output or "" - node.order = order or AutomationNode.get_unique_order_before_node( - after_node, after_node.parent_node - ) - node.save(update_fields=["previous_node_id", "previous_node_output", "order"]) - - # Update the nodes at the destination that their previous node is now `node`. - updated_destination_next_nodes = self.update_previous_node( - node, - destination_next_nodes, - previous_node_output="" if previous_node_output else None, - ) - next_node_updates.extend(updated_destination_next_nodes) - - destination_new_next_nodes_values = [ - NextAutomationNodeValues( - id=nn.id, - previous_node_id=nn.previous_node_id, - previous_node_output=nn.previous_node_output, - ) - for nn in updated_destination_next_nodes - ] - - return AutomationNodeMove( - node=node, - next_node_updates=next_node_updates, - origin_previous_node_id=origin_previous_node_id, - origin_previous_node_output=origin_previous_node_output, - origin_old_next_nodes_values=origin_old_next_nodes_values, - origin_new_next_nodes_values=origin_new_next_nodes_values, - destination_previous_node_id=node.previous_node_id, - destination_previous_node_output=node.previous_node_output, - destination_old_next_nodes_values=destination_old_next_nodes_values, - destination_new_next_nodes_values=destination_new_next_nodes_values, - ) + return duplicated_node def export_node( self, @@ -629,7 +330,10 @@ def import_node_only( return node_instance def dispatch_node( - self, node: "AutomationNode", dispatch_context: AutomationDispatchContext + self, + node: "AutomationNode", + dispatch_context: AutomationDispatchContext, + allowed_nodes=None, ): """ Dispatch one node and recursively dispatch the next nodes. @@ -637,8 +341,19 @@ def dispatch_node( :param node: The node to start with. :param dispatch_context: The context in which the workflow is being dispatched, which contains the event payload and other relevant data. + :param allowed_nodes: if set, only the nodes from the list will be dispatched. """ + if dispatch_context.simulate_until_node and allowed_nodes is None: + allowed_nodes = { + *dispatch_context.simulate_until_node.get_previous_nodes(), + dispatch_context.simulate_until_node, + } + + if allowed_nodes is not None and node not in allowed_nodes: + # Return early as the node is not on the path until the simulated node + return + node_type: Type[AutomationNodeActionNodeType] = node.get_type() try: dispatch_result = node_type.dispatch(node, dispatch_context) @@ -653,10 +368,34 @@ def dispatch_node( automation_node_updated.send(self, user=None, node=node) return + if children := node.get_children(): + node_data = ( + dispatch_result.data + if isinstance(dispatch_result.data, list) + else [dispatch_result.data] + ) + + if dispatch_context.simulate_until_node: + iterations = [0] + else: + iterations = range(len(node_data)) + + for index in iterations: + sub_dispatch_context = dispatch_context.clone() + sub_dispatch_context.set_current_iteration(node, index) + + # dispatch context build + for child in children: + self.dispatch_node( + child, sub_dispatch_context, allowed_nodes=allowed_nodes + ) + next_nodes = node.get_next_nodes(dispatch_result.output_uid) for next_node in next_nodes: - self.dispatch_node(next_node, dispatch_context) + self.dispatch_node( + next_node, dispatch_context, allowed_nodes=allowed_nodes + ) except ServiceImproperlyConfiguredDispatchException as e: raise AutomationNodeMisconfiguredService( f"The node {node.id} has a misconfigured service." diff --git a/backend/src/baserow/contrib/automation/nodes/models.py b/backend/src/baserow/contrib/automation/nodes/models.py index af3d35721c..5f2a4fd73f 100644 --- a/backend/src/baserow/contrib/automation/nodes/models.py +++ b/backend/src/baserow/contrib/automation/nodes/models.py @@ -1,15 +1,12 @@ -from decimal import Decimal -from typing import Iterable, List, Optional +from typing import Iterable from django.contrib.contenttypes.models import ContentType from django.db import models -from django.db.models import Manager, QuerySet +from django.db.models import Manager from baserow.contrib.automation.workflows.models import AutomationWorkflow -from baserow.core.db import get_unique_orders_before_item from baserow.core.mixins import ( CreatedAndUpdatedOnMixin, - FractionOrderableMixin, HierarchicalModelMixin, PolymorphicContentTypeMixin, TrashableModelMixin, @@ -47,7 +44,6 @@ class AutomationNode( PolymorphicContentTypeMixin, CreatedAndUpdatedOnMixin, HierarchicalModelMixin, - FractionOrderableMixin, WithRegistry, ): """ @@ -75,43 +71,18 @@ class AutomationNode( on_delete=models.CASCADE, related_name="automation_workflow_nodes", ) - parent_node = models.ForeignKey( - "self", - on_delete=models.CASCADE, - null=True, - blank=True, - help_text="The parent automation node.", - related_name="automation_workflow_child_nodes", - ) - previous_node = models.ForeignKey( - "self", - on_delete=models.CASCADE, - null=True, - blank=True, - help_text="The previous automation node.", - related_name="automation_workflow_previous_nodes", - ) service = models.OneToOneField( Service, help_text="The service which this node is associated with.", related_name="automation_workflow_node", on_delete=models.CASCADE, ) - order = models.DecimalField( - help_text="Lowest first.", - max_digits=40, - decimal_places=20, - editable=False, - default=1, - ) - - previous_node_output = models.CharField(default="") objects = AutomationNodeTrashManager() objects_and_trash = Manager() class Meta: - ordering = ("order", "id") + ordering = ("id",) @staticmethod def get_type_registry(): @@ -124,85 +95,53 @@ def get_type_registry(): def get_parent(self): return self.workflow - def get_previous_service_outputs(self): - return ( - ( - {self.previous_node.service.id: str(self.previous_node_output)} - | self.previous_node.get_previous_service_outputs() - ) - if self.previous_node - else {} - ) + def get_label(self): + if self.label: + return self.label + else: + return self.get_type().type - def get_next_nodes( - self, output_uid: str | None = None, specific: bool = False - ) -> Iterable["AutomationNode"]: + def get_previous_nodes(self): """ - Returns all nodes which follow this node in the workflow. A list of nodes - is returned as there can be multiple nodes that follow this one, for example - when there are multiple branches in the workflow. - - :param output_uid: filter nodes only for this output uid. - :param specific: If True, returns the specific node type. + Returns the nodes before the current node. A previous node can be a + `previous node` or a `parent node`. """ - from baserow.contrib.automation.nodes.handler import AutomationNodeHandler + return [ + position[0] + for position in self.workflow.get_graph().get_previous_positions(self) + ] - return AutomationNodeHandler().get_next_nodes( - self.workflow, self, output_uid=output_uid, specific=specific - ) + def get_previous_service_outputs(self): + """ + Returns the list of edge UIDs to choose to get to this node from the first node. + """ - @classmethod - def get_last_order(cls, workflow: "AutomationWorkflow"): - queryset = AutomationNode.objects.filter(workflow=workflow) - return cls.get_highest_order_of_queryset(queryset)[0] + previous_positions = self.workflow.get_graph().get_previous_positions(self) - @classmethod - def get_unique_order_before_node( - cls, before: "AutomationNode", parent_node_id: Optional[int] - ) -> Decimal: - """ - Returns a safe order value before the given node in the given workflow. - - :param before: The node before which we want the safe order - :param parent_node_id: The id of the parent node. - :raises CannotCalculateIntermediateOrder: If it's not possible to find an - intermediate order. The full order of the items must be recalculated in this - case before calling this method again. - :return: The order value. + return {node.service_id: str(out) for [node, _, out] in previous_positions} + + def get_next_nodes( + self, output_uid: str | None = None + ) -> Iterable["AutomationNode"]: """ + Returns all nodes which directly follow this node in the workflow. + A list of nodes is returned as there can be multiple nodes that follow this one, + for example when there are multiple branches in the workflow. - queryset = AutomationNode.objects.filter(workflow=before.workflow).filter( - parent_node_id=parent_node_id - ) + :param output_uid: filter nodes only for this output uid. + """ - return cls.get_unique_orders_before_item(before, queryset)[0] + return self.workflow.get_graph().get_next_nodes(self, output_uid) - @classmethod - def get_unique_orders_before_item( - cls, - before: Optional[models.Model], - queryset: QuerySet, - amount: int = 1, - field: str = "order", - ) -> List[Decimal]: + def get_children(self, specific=True): """ - Calculates a list of unique decimal orders that can safely be used before the - provided `before` item. - - :param before: The model instance where the before orders must be - calculated for. - :param queryset: The base queryset used to compute the value. - :param amount: The number of orders that must be requested. Can be higher if - multiple items are inserted or moved. - :param field: The order field name. - :raises CannotCalculateIntermediateOrder: If it's not possible to find an - intermediate order. The full order of the items must be recalculated in this - case before calling this method again. - :return: A list of decimals containing safe to use orders in order. + Returns the direct children of this node if any. """ - return get_unique_orders_before_item(before, queryset, amount, field=field) + from baserow.contrib.automation.nodes.handler import AutomationNodeHandler + + return AutomationNodeHandler().get_children(self, specific=specific) class AutomationActionNode(AutomationNode): @@ -269,3 +208,7 @@ class CoreSMTPEmailActionNode(AutomationActionNode): class CoreRouterActionNode(AutomationActionNode): ... + + +class CoreIteratorActionNode(AutomationActionNode): + ... diff --git a/backend/src/baserow/contrib/automation/nodes/node_types.py b/backend/src/baserow/contrib/automation/nodes/node_types.py index 5ef3cc0d5d..ed81abc650 100644 --- a/backend/src/baserow/contrib/automation/nodes/node_types.py +++ b/backend/src/baserow/contrib/automation/nodes/node_types.py @@ -1,23 +1,26 @@ -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional from django.contrib.auth.models import AbstractUser from django.db import router -from django.db.models import CharField, Q, QuerySet -from django.db.models.functions import Cast +from django.db.models import Q, QuerySet from django.utils import timezone from django.utils.translation import gettext as _ from baserow.contrib.automation.nodes.exceptions import ( + AutomationNodeFirstNodeMustBeTrigger, AutomationNodeMisconfiguredService, AutomationNodeNotDeletable, + AutomationNodeNotMovable, AutomationNodeNotReplaceable, + AutomationNodeTriggerAlreadyExists, + AutomationNodeTriggerMustBeFirstNode, ) from baserow.contrib.automation.nodes.models import ( - AutomationActionNode, AutomationNode, AutomationTriggerNode, CoreHTTPRequestActionNode, CoreHTTPTriggerNode, + CoreIteratorActionNode, CorePeriodicTriggerNode, CoreRouterActionNode, CoreSMTPEmailActionNode, @@ -32,10 +35,13 @@ LocalBaserowUpdateRowActionNode, ) from baserow.contrib.automation.nodes.registries import AutomationNodeType +from baserow.contrib.automation.nodes.types import NodePositionType from baserow.contrib.automation.workflows.constants import WorkflowState +from baserow.contrib.automation.workflows.models import AutomationWorkflow from baserow.contrib.integrations.core.service_types import ( CoreHTTPRequestServiceType, CoreHTTPTriggerServiceType, + CoreIteratorServiceType, CorePeriodicServiceType, CoreRouterServiceType, CoreSMTPEmailServiceType, @@ -50,7 +56,6 @@ LocalBaserowRowsUpdatedServiceType, LocalBaserowUpsertRowServiceType, ) -from baserow.core.db import specific_iterator from baserow.core.registry import Instance from baserow.core.services.models import Service from baserow.core.services.registries import service_type_registry @@ -59,6 +64,52 @@ class AutomationNodeActionNodeType(AutomationNodeType): is_workflow_action = True + def before_create(self, workflow, reference_node, position, output): + if reference_node is None: + raise AutomationNodeFirstNodeMustBeTrigger() + + def before_move(self, node, reference_node, position, output): + if reference_node is None: + raise AutomationNodeFirstNodeMustBeTrigger() + + +class ContainerNodeTypeMixin: + is_container = True + + def before_delete(self, node: "ContainerNodeTypeMixin"): + if node.workflow.get_graph().get_children(node): + raise AutomationNodeNotDeletable( + "Container nodes cannot be deleted if they " + "have one or more children nodes associated with them." + ) + + def before_replace(self, node: "ContainerNodeTypeMixin", new_node_type: Instance): + if node.workflow.get_graph().get_children(node): + raise AutomationNodeNotReplaceable( + "Container nodes cannot be replaced if they " + "have one or more children nodes associated with them." + ) + + super().before_replace(node, new_node_type) + + def before_move( + self, + node: "ContainerNodeTypeMixin", + reference_node: AutomationNode | None, + position: NodePositionType, + output: str, + ): + """ + Check the container node is not moved inside it self. + """ + + if node in reference_node.get_previous_nodes(): + raise AutomationNodeNotMovable( + "A container node cannot be moved inside itself" + ) + + super().before_move(node, reference_node, position, output) + class LocalBaserowUpsertRowNodeType(AutomationNodeActionNodeType): type = "upsert_row" @@ -109,6 +160,12 @@ class CoreHttpRequestNodeType(AutomationNodeActionNodeType): service_type = CoreHTTPRequestServiceType.type +class CoreIteratorNodeType(ContainerNodeTypeMixin, AutomationNodeActionNodeType): + type = "iterator" + model_class = CoreIteratorActionNode + service_type = CoreIteratorServiceType.type + + class CoreSMTPEmailNodeType(AutomationNodeActionNodeType): type = "smtp_email" model_class = CoreSMTPEmailActionNode @@ -120,51 +177,58 @@ class CoreRouterActionNodeType(AutomationNodeActionNodeType): model_class = CoreRouterActionNode service_type = CoreRouterServiceType.type - # Routers cannot be moved in the workflow to a new position. - is_fixed = True - - def get_output_nodes( - self, node: CoreRouterActionNode, specific: bool = False - ) -> Union[List[AutomationActionNode], QuerySet[AutomationActionNode]]: + def has_node_on_edge(self, node: CoreRouterActionNode) -> bool: """ - Given a router node, this method returns the output nodes that are - along the edges of the router node. + Given a router node, this method returns whether one of its edges has a node. + :param node: The router node instance. - :param specific: Whether to return the specific node instances. - :return: An iterable of output nodes that are connected to the - router node's edges. """ - queryset = ( - node.workflow.automation_workflow_nodes.select_related("content_type") - .filter(previous_node_id=node.id) - .filter( - Q(previous_node_output="") - | Q( - previous_node_output__in=node.service.specific.edges.values_list( - Cast("uid", output_field=CharField()), flat=True - ) - ), - ) - ) - return specific_iterator(queryset) if specific else queryset + for edge_uid in node.service.get_type().get_edges(node.service.specific).keys(): + if edge_uid != "" and node.workflow.get_graph().get_next_nodes( + node, edge_uid + ): + return True + + return False def before_delete(self, node: CoreRouterActionNode): - output_nodes_count = self.get_output_nodes(node).count() - if output_nodes_count != 0: + if self.has_node_on_edge(node): raise AutomationNodeNotDeletable( "Router nodes cannot be deleted if they " "have one or more output nodes associated with them." ) + super().before_delete(node) + def before_replace(self, node: CoreRouterActionNode, new_node_type: Instance): - output_nodes_count = self.get_output_nodes(node).count() - if output_nodes_count != 0: + if self.has_node_on_edge(node): raise AutomationNodeNotReplaceable( "Router nodes cannot be replaced if they " "have one or more output nodes associated with them." ) + super().before_replace(node, new_node_type) + + def before_move( + self, + node: AutomationTriggerNode, + reference_node: AutomationNode | None, + position: NodePositionType, + output: str, + ): + """ + Check the container node is not moved inside it self. + """ + + if self.has_node_on_edge(node): + raise AutomationNodeNotMovable( + "Router nodes cannot be moved if they " + "have one or more output nodes associated with them." + ) + + super().before_move(node, reference_node, position, output) + def after_create(self, node: CoreRouterActionNode): """ After a router node is created, this method will create @@ -195,25 +259,27 @@ def prepare_values( if instance: service = instance.service.specific - prepared_uids = [edge["uid"] for edge in values["service"].get("edges", [])] + + prepared_uids = [ + str(edge["uid"]) for edge in values["service"].get("edges", []) + ] persisted_uids = [str(edge.uid) for edge in service.edges.only("uid")] removed_uids = list(set(persisted_uids) - set(prepared_uids)) - output_nodes_with_removed_uids = AutomationNode.objects.filter( - previous_node_id=instance.id, previous_node_output__in=removed_uids - ).exists() - if output_nodes_with_removed_uids: - raise AutomationNodeMisconfiguredService( - "One or more branches have been removed from the router node, " - "but they still point to output nodes. These nodes must be " - "trashed before the router can be updated." - ) + + for removed_uid in removed_uids: + if instance.workflow.get_graph().get_node_at_position( + instance, "south", removed_uid + ): + raise AutomationNodeMisconfiguredService( + "One or more branches have been removed from the router node, " + "but they still point to output nodes. These nodes must be " + "trashed before the router can be updated." + ) + return super().prepare_values(values, user, instance) class AutomationNodeTriggerType(AutomationNodeType): - # Triggers cannot be moved in the workflow to a new position. - is_fixed = True - is_workflow_trigger = True def after_register(self): @@ -224,17 +290,33 @@ def before_unregister(self): service_type_registry.get(self.service_type).stop_listening() return super().before_unregister() - def before_delete(self, node: AutomationTriggerNode): - """ - Trigger nodes cannot be deleted. - :param node: The node instance to check. - :raises: AutomationNodeNotDeletable - """ + def before_create( + self, + workflow: AutomationWorkflow, + reference_node: AutomationNode, + position: str, + output: str, + ): + if workflow.get_graph().get_node_at_position(None, "south", ""): + raise AutomationNodeTriggerAlreadyExists() - raise AutomationNodeNotDeletable( - "Triggers can not be created, deleted or duplicated, " - "they can only be replaced with a different type." - ) + if reference_node is not None: + raise AutomationNodeTriggerMustBeFirstNode() + + def before_delete(self, node: AutomationNode): + if node.workflow.get_graph().get_next_nodes(node): + raise AutomationNodeNotDeletable( + "Trigger nodes cannot be deleted if they are followed nodes." + ) + + def before_move( + self, + node: AutomationTriggerNode, + reference_node: AutomationNode | None, + position: NodePositionType, + output: str, + ): + raise AutomationNodeNotMovable("Trigger nodes cannot be moved.") def on_event( self, diff --git a/backend/src/baserow/contrib/automation/nodes/registries.py b/backend/src/baserow/contrib/automation/nodes/registries.py index faecbd2c0b..8b72abd995 100644 --- a/backend/src/baserow/contrib/automation/nodes/registries.py +++ b/backend/src/baserow/contrib/automation/nodes/registries.py @@ -2,15 +2,14 @@ from django.contrib.auth.models import AbstractUser -from rest_framework import serializers - from baserow.contrib.automation.automation_dispatch_context import ( AutomationDispatchContext, ) from baserow.contrib.automation.formula_importer import import_formula from baserow.contrib.automation.nodes.exceptions import AutomationNodeNotReplaceable from baserow.contrib.automation.nodes.models import AutomationNode -from baserow.contrib.automation.nodes.types import AutomationNodeDict +from baserow.contrib.automation.nodes.types import AutomationNodeDict, NodePositionType +from baserow.contrib.automation.workflows.models import AutomationWorkflow from baserow.core.integrations.models import Integration from baserow.core.registry import ( CustomFieldsRegistryMixin, @@ -39,24 +38,6 @@ class AutomationNodeType( parent_property_name = "workflow" id_mapping_name = "automation_workflow_nodes" - request_serializer_field_names = ["previous_node_id", "previous_node_output"] - request_serializer_field_overrides = { - "previous_node_id": serializers.IntegerField( - required=False, - default=None, - allow_null=True, - ), - "previous_node_output": serializers.CharField( - required=False, - default="", - allow_blank=True, - help_text="The output of the previous node.", - ), - } - - # Whether this node type is allowed to be moved in a workflow. - is_fixed = False - # Whether this node type is a trigger. Triggers start workflows. is_workflow_trigger = False @@ -64,18 +45,15 @@ class AutomationNodeType( # Actions are executed as part of workflows. is_workflow_action = False + is_container = False + class SerializedDict(AutomationNodeDict): - label: str - service: Dict - parent_node_id: Optional[int] - previous_node_id: Optional[int] + ... @property def allowed_fields(self): return super().allowed_fields + [ "label", - "previous_node_id", - "previous_node_output", "service", ] @@ -87,8 +65,6 @@ def before_delete(self, node: AutomationNode) -> None: :param node: The node instance to about to be deleted. """ - ... - def before_replace(self, node: AutomationNode, new_node_type: Instance) -> None: """ A hook called just before a node is replaced. Can be @@ -105,6 +81,27 @@ def before_replace(self, node: AutomationNode, new_node_type: Instance) -> None: "category. Triggers cannot be updated with actions, and vice-versa." ) + def before_move( + self, + node: AutomationNode, + reference_node: AutomationNode | None, + position: NodePositionType, + output: str, + ): + """Called before the node is moved.""" + + def before_create( + self, + workflow: AutomationWorkflow, + reference_node: AutomationNode | None, + position: NodePositionType, + output: str, + ): + """ + A hook called just before a node is created. Can be + overridden by subclasses to implement specific logic. + """ + def after_create(self, node: AutomationNode) -> None: """ A hook called just after a node is created. Can be @@ -113,8 +110,6 @@ def after_create(self, node: AutomationNode) -> None: :param node: The node instance that was just created. """ - ... - def get_service_type(self) -> Optional[ServiceTypeSubClass]: return ( service_type_registry.get(self.service_type) if self.service_type else None @@ -159,9 +154,6 @@ def serialize_property( storage=None, cache=None, ): - if prop_name == "order": - return str(node.order) - if prop_name == "service": service = node.service.specific return service.get_type().export_serialized( @@ -195,12 +187,6 @@ def deserialize_property( :return: the deserialized version for this property. """ - if prop_name in ["previous_node_id", "parent_node_id"] and value: - return id_mapping["automation_workflow_nodes"][value] - - if prop_name == "previous_node_output" and value: - return id_mapping["automation_edge_outputs"].get(value, value) - if prop_name == "service" and value: integration = None serialized_service = value @@ -264,6 +250,8 @@ def prepare_values( :return: The modified node values, prepared. """ + from baserow.contrib.automation.nodes.handler import AutomationNodeHandler + service_type = service_type_registry.get(self.service_type) if not instance: @@ -286,6 +274,12 @@ def prepare_values( ) values["service"] = service + + if (reference_node_id := values.get("reference_node_id", None)) is not None: + values["reference_node"] = AutomationNodeHandler().get_node( + reference_node_id + ) + return values def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]: diff --git a/backend/src/baserow/contrib/automation/nodes/service.py b/backend/src/baserow/contrib/automation/nodes/service.py index be7af8dabe..074c24a297 100644 --- a/backend/src/baserow/contrib/automation/nodes/service.py +++ b/backend/src/baserow/contrib/automation/nodes/service.py @@ -1,22 +1,21 @@ -from typing import Iterable, List, Optional +from typing import Iterable, Optional from django.contrib.auth.models import AbstractUser from baserow.contrib.automation.models import AutomationWorkflow from baserow.contrib.automation.nodes.exceptions import ( - AutomationNodeBeforeInvalid, - AutomationNodeNotMovable, - AutomationTriggerModificationDisallowed, + AutomationNodeDoesNotExist, + AutomationNodeMissingOutput, + AutomationNodeReferenceNodeInvalid, ) from baserow.contrib.automation.nodes.handler import AutomationNodeHandler -from baserow.contrib.automation.nodes.models import AutomationActionNode, AutomationNode +from baserow.contrib.automation.nodes.models import AutomationNode from baserow.contrib.automation.nodes.node_types import AutomationNodeType from baserow.contrib.automation.nodes.operations import ( CreateAutomationNodeOperationType, DeleteAutomationNodeOperationType, DuplicateAutomationNodeOperationType, ListAutomationNodeOperationType, - OrderAutomationNodeOperationType, ReadAutomationNodeOperationType, UpdateAutomationNodeOperationType, ) @@ -27,18 +26,15 @@ from baserow.contrib.automation.nodes.signals import ( automation_node_created, automation_node_deleted, - automation_node_replaced, automation_node_updated, - automation_nodes_reordered, - automation_nodes_updated, ) from baserow.contrib.automation.nodes.types import ( - AutomationNodeDuplication, AutomationNodeMove, - NextAutomationNodeValues, + NodePositionType, ReplacedAutomationNode, UpdatedAutomationNode, ) +from baserow.contrib.automation.workflows.signals import automation_workflow_updated from baserow.core.handler import CoreHandler from baserow.core.trash.handler import TrashHandler @@ -101,13 +97,45 @@ def get_nodes( workflow, specific=specific, base_queryset=user_nodes ) + def _check_position( + self, + workflow: AutomationWorkflow, + reference_node: AutomationNode | None, + position: NodePositionType, + output: str, + ): + """ + Validates the position. + """ + + if reference_node is None: + return + + if reference_node.workflow_id != workflow.id: + raise AutomationNodeReferenceNodeInvalid( + f"The reference node {reference_node.id} doesn't exist" + ) + + if output not in reference_node.service.get_type().get_edges( + reference_node.service.specific + ): + raise AutomationNodeMissingOutput( + f"Output {output} doesn't exist on node {reference_node.id}" + ) + + if position == "child" and not reference_node.get_type().is_container: + raise AutomationNodeReferenceNodeInvalid( + f"The reference node {reference_node.id} can't have child" + ) + def create_node( self, user: AbstractUser, node_type: AutomationNodeType, workflow: AutomationWorkflow, - before: Optional[AutomationNode] = None, - order: Optional[str] = None, + reference_node_id: int | None = None, + position: NodePositionType = "south", # south, child + output: str = "", **kwargs, ) -> AutomationNode: """ @@ -116,20 +144,13 @@ def create_node( :param user: The user trying to create the automation node. :param node_type: The type of the automation node. :param workflow: The workflow the automation node is associated with. - :param before: If set, the new node is inserted before this node. - :param order: The order of the new node. If not set, it will be determined - automatically based on the existing nodes in the workflow. + :param reference_node_id: The node reference node for the position. + :param position: The position relative to the reference node. + :param output: The output of the reference node. :param kwargs: Additional attributes of the automation node. - :raises AutomationTriggerModificationDisallowed: If the node_type is a trigger. :return: The created automation node. """ - # Triggers are not directly created by users. When a workflow is created, - # the trigger node is created automatically, so users are only able to change - # the trigger node type, not create a new one. - if node_type.is_workflow_trigger: - raise AutomationTriggerModificationDisallowed() - CoreHandler().check_permissions( user, CreateAutomationNodeOperationType.type, @@ -137,37 +158,46 @@ def create_node( context=workflow, ) - # If we've been given a `before` node, validate it. - if before: - if workflow.id != before.workflow_id: - raise AutomationNodeBeforeInvalid( - "The `before` node must belong to the same workflow " - "as the one supplied." - ) - if not before.previous_node_id: - # You can't create a node before a trigger node. Even if `node_type` is - # a trigger, API consumers must delete `before` and then try again. - raise AutomationNodeBeforeInvalid( - "You cannot create an automation node before a trigger." - ) + try: + reference_node = ( + self.handler.get_node(reference_node_id) if reference_node_id else None + ) + except AutomationNodeDoesNotExist as e: + raise AutomationNodeReferenceNodeInvalid( + f"The reference node {reference_node_id} doesn't exist" + ) from e + + self._check_position(workflow, reference_node, position, output) + + node_type.before_create(workflow, reference_node, position, output) prepared_values = node_type.prepare_values(kwargs, user) new_node = self.handler.create_node( - node_type, order=order, workflow=workflow, before=before, **prepared_values + node_type, + workflow=workflow, + **prepared_values, ) + node_type.after_create(new_node) + workflow.get_graph().insert(new_node, reference_node, position, output) + automation_node_created.send( self, node=new_node, user=user, ) + automation_workflow_updated.send(self, workflow=workflow, user=user) + return new_node def update_node( - self, user: AbstractUser, node_id: int, **kwargs + self, + user: AbstractUser, + node_id: int, + **kwargs, ) -> UpdatedAutomationNode: """ Updates fields of a node. @@ -210,47 +240,21 @@ def update_node( new_values=new_node_values, ) - def update_next_nodes_values( - self, - user: AbstractUser, - next_node_values: List[NextAutomationNodeValues], - workflow: AutomationWorkflow, - ) -> List[AutomationActionNode]: - """ - Update the next nodes values for a list of nodes. - - :param user: The user trying to update the next node values. - :param next_node_values: The new next node values. - :param workflow: The workflow the nodes belong to. - :return: The updated nodes. - """ - - updated_next_nodes = self.handler.update_next_nodes_values(next_node_values) - if updated_next_nodes: - automation_nodes_updated.send( - self, user=user, nodes=updated_next_nodes, workflow=workflow - ) - - return updated_next_nodes - def delete_node( self, user: AbstractUser, node_id: int, - trash_operation_type: Optional[str] = None, ) -> AutomationNode: """ Deletes the specified automation node. :param user: The user trying to delete the node. :param node_id: The ID of the node to delete. - :param trash_operation_type: The trash operation type to use when trashing - the node. :return: The deleted node. - :raises AutomationTriggerModificationDisallowed: If the node is a trigger. """ node = self.handler.get_node(node_id) + workflow = node.workflow CoreHandler().check_permissions( user, @@ -259,107 +263,72 @@ def delete_node( context=node, ) - automation = node.workflow.automation - trash_entry = TrashHandler.trash( - user, - automation.workspace, - automation, - node, - trash_operation_type=trash_operation_type, - ) - - if trash_entry.get_operation_type().send_post_trash_deleted_signal: - automation_node_deleted.send( - self, - workflow=node.workflow, - node_id=node.id, - user=user, - ) - - return node - - def order_nodes( - self, user: AbstractUser, workflow: AutomationWorkflow, order: List[int] - ) -> List[int]: - """ - Assigns a new order to the nodes in a workflow. - - :param user: The user trying to order the workflows. - :param workflow The workflow that the nodes belong to. - :param order: The new order of the nodes. - :return: The new order of the nodes. - """ - automation = workflow.automation - CoreHandler().check_permissions( - user, - OrderAutomationNodeOperationType.type, - workspace=automation.workspace, - context=workflow, - ) - all_nodes = self.handler.get_nodes( - workflow, specific=False, base_queryset=AutomationNode.objects - ) + node.get_type().before_delete(node.specific) - user_nodes = CoreHandler().filter_queryset( + TrashHandler.trash( user, - OrderAutomationNodeOperationType.type, - all_nodes, - workspace=automation.workspace, + automation.workspace, + automation, + node, ) - new_order = self.handler.order_nodes(workflow, order, user_nodes) - - automation_nodes_reordered.send( - self, workflow=workflow, order=new_order, user=user + automation_node_deleted.send( + self, + workflow=workflow, + node_id=node.id, + user=user, ) - return new_order + return node def duplicate_node( self, user: AbstractUser, - node: AutomationNode, - ) -> AutomationNodeDuplication: + source_node_id: AutomationNode, + ) -> AutomationNode: """ Duplicates an existing AutomationNode instance. :param user: The user initiating the duplication. - :param node: The node that is being duplicated. + :param source_node_id: The id of the node that is being duplicated. :raises ValueError: When the provided node is not an instance of AutomationNode. - :raises AutomationTriggerModificationDisallowed: If the node is a trigger. - :return: The `AutomationNodeDuplication` dataclass containing the source - node, its next nodes values and the duplicated node. + :return: The duplicated node. """ + source_node = AutomationNodeService().get_node(user, source_node_id) + workflow = source_node.workflow + CoreHandler().check_permissions( user, DuplicateAutomationNodeOperationType.type, - workspace=node.workflow.automation.workspace, - context=node, + workspace=workflow.automation.workspace, + context=source_node, ) - # If we received a trigger node, we cannot duplicate it. - if node.get_type().is_workflow_trigger: - raise AutomationTriggerModificationDisallowed() + source_node.get_type().before_create(workflow, source_node, "south", "") + + duplicated_node = self.handler.duplicate_node(source_node) - duplication = self.handler.duplicate_node(node) + workflow.get_graph().insert(duplicated_node, source_node, "south", "") automation_node_created.send( self, - node=duplication.duplicated_node, + node=duplicated_node, user=user, ) + automation_workflow_updated.send(self, workflow=workflow, user=user) - return duplication + return duplicated_node def replace_node( self, user: AbstractUser, node_id: int, new_node_type_str: str, + existing_node: AutomationNode | None = None, ) -> ReplacedAutomationNode: """ Replaces an existing automation node with a new one of a different type. @@ -367,110 +336,143 @@ def replace_node( :param user: The user trying to replace the node. :param node_id: The ID of the node to replace. :param new_node_type_str: The type of the new node to replace with. + :param existing_node: If provided, used to replace the node instead of creating + a new instance. Used during undo/redo. :return: The replaced automation node. """ - node = self.get_node(user, node_id) - node_type: AutomationNodeType = node.get_type() + node_to_replace = self.get_node(user, node_id) + workflow = node_to_replace.workflow + automation = workflow.automation + + node_type: AutomationNodeType = node_to_replace.get_type() CoreHandler().check_permissions( user, CreateAutomationNodeOperationType.type, - workspace=node.workflow.automation.workspace, - context=node.workflow, + workspace=node_to_replace.workflow.automation.workspace, + context=node_to_replace.workflow, ) - new_node_type = automation_node_type_registry.get(new_node_type_str) - node_type.before_replace(node, new_node_type) + if not existing_node: + new_node_type = automation_node_type_registry.get(new_node_type_str) + node_type.before_replace(node_to_replace, new_node_type) - prepared_values = new_node_type.prepare_values( - {}, - user, - ) + prepared_values = new_node_type.prepare_values({}, user) - new_node = self.handler.create_node( - new_node_type, - workflow=node.workflow, - before=node, - order=node.order, - **prepared_values, - ) + new_node = self.handler.create_node( + new_node_type, + workflow=workflow, + **prepared_values, + ) + + new_node_type.after_create(new_node) - new_node_type.after_create(new_node) + else: + new_node = existing_node - # After the node creation, the replaced node has changed - node.refresh_from_db() + automation_node_created.send( + self, + node=new_node, + user=user, + ) - # Trash the old node, assigning it a specific trash operation - # type so that we know it was replaced when restoring it. - automation = node.workflow.automation + # When we use a replace operation type, we make sure no graph modification is + # made so that we can do it here. TrashHandler.trash( user, automation.workspace, automation, - node, + node_to_replace, trash_operation_type=ReplaceAutomationNodeTrashOperationType.type, ) - automation_node_replaced.send( + workflow.get_graph().replace(node_to_replace, new_node) + + automation_node_deleted.send( self, - workflow=new_node.workflow, - restored_node=new_node, - deleted_node=node, + workflow=workflow, + node_id=node_to_replace.id, user=user, ) + automation_workflow_updated.send(self, workflow=workflow, user=user) + return ReplacedAutomationNode( node=new_node, - original_node_id=node.id, + original_node_id=node_to_replace.id, original_node_type=node_type.type, ) def move_node( self, user: AbstractUser, - node_id: int, - new_previous_node_id: int, - new_previous_output: Optional[str] = None, - new_order: Optional[float] = None, + node_id_to_move: int, + reference_node_id: int | None, + position: NodePositionType, + output: str, ) -> AutomationNodeMove: """ Moves an existing automation node to a new position in the workflow. :param user: The user trying to move the node. - :param node_id: The ID of the node to move. - :param new_previous_node_id: The ID of the node that - will be the new previous node. - :param new_previous_output: If the destination is an output, the output uid. - :param new_order: The new order of the node. If not provided, it will - be calculated to be last of `new_previous_node_id`. + :param node_id_to_move: The ID of the node to move. + :param reference_node_id: The node the new position is relative to. + :param position: The new position relative to the reference node. + :param output: The new output of the reference node. :raises AutomationNodeNotMovable: If the node cannot be moved. :return: The move operation details. """ - node = self.get_node(user, node_id) - node_type: AutomationNodeType = node.get_type() + node_to_move = self.get_node(user, node_id_to_move) + node_type: AutomationNodeType = node_to_move.get_type() + + workflow = node_to_move.workflow CoreHandler().check_permissions( user, UpdateAutomationNodeOperationType.type, - workspace=node.workflow.automation.workspace, - context=node, + workspace=node_to_move.workflow.automation.workspace, + context=node_to_move, ) + try: + reference_node = ( + self.handler.get_node(reference_node_id) if reference_node_id else None + ) + except AutomationNodeDoesNotExist as e: + raise AutomationNodeReferenceNodeInvalid( + f"The reference node {reference_node_id} doesn't exist" + ) from e - # If a node type cannot move, raise an exception. - if node_type.is_fixed: - raise AutomationNodeNotMovable("This automation node cannot be moved.") + self._check_position(workflow, reference_node, position, output) - after_node = self.get_node(user, new_previous_node_id) - move = self.handler.move_node(node, after_node, new_previous_output, new_order) + if reference_node.id == node_to_move.id: + raise AutomationNodeReferenceNodeInvalid( + "The reference node and the moved node must be different" + ) - updated_nodes = [move.node] + move.next_node_updates - automation_nodes_updated.send( - self, - user=user, - nodes=updated_nodes, - workflow=node.workflow, + node_type.before_move(node_to_move, reference_node, position, output) + + # We extract the current node position to restore it if we undo the operation. + [ + previous_reference_node_id, + previous_position, + previous_output, + ] = workflow.get_graph().get_position(node_to_move) + + previous_reference_node = ( + self.get_node(user, previous_reference_node_id) + if previous_reference_node_id + else None ) - return move + workflow.get_graph().move(node_to_move, reference_node, position, output) + + automation_workflow_updated.send(self, workflow=workflow, user=user) + + return AutomationNodeMove( + node=node_to_move, + previous_reference_node=previous_reference_node, + previous_position=previous_position, + previous_output=previous_output, + ) diff --git a/backend/src/baserow/contrib/automation/nodes/signals.py b/backend/src/baserow/contrib/automation/nodes/signals.py index 0fd8feb0e3..f6c01caa6e 100644 --- a/backend/src/baserow/contrib/automation/nodes/signals.py +++ b/backend/src/baserow/contrib/automation/nodes/signals.py @@ -2,7 +2,4 @@ automation_node_created = Signal() automation_node_updated = Signal() -automation_nodes_updated = Signal() automation_node_deleted = Signal() -automation_node_replaced = Signal() -automation_nodes_reordered = Signal() diff --git a/backend/src/baserow/contrib/automation/nodes/trash_types.py b/backend/src/baserow/contrib/automation/nodes/trash_types.py index 70f5f97fa7..fc079f57dd 100644 --- a/backend/src/baserow/contrib/automation/nodes/trash_types.py +++ b/backend/src/baserow/contrib/automation/nodes/trash_types.py @@ -10,10 +10,13 @@ ) from baserow.contrib.automation.nodes.signals import automation_node_created from baserow.contrib.automation.workflows.models import AutomationWorkflow +from baserow.contrib.automation.workflows.signals import automation_workflow_updated from baserow.core.models import TrashEntry from baserow.core.trash.exceptions import TrashItemRestorationDisallowed from baserow.core.trash.registries import TrashableItemType +from .exceptions import AutomationNodeDoesNotExist + class AutomationNodeTrashableItemType(TrashableItemType): type = "automation_node" @@ -25,12 +28,9 @@ def get_parent(self, trashed_item: AutomationActionNode) -> AutomationWorkflow: def get_name(self, trashed_item: AutomationActionNode) -> str: return f"{trashed_item.get_type().type} ({trashed_item.id})" - def get_additional_restoration_data(self, trash_item: AutomationActionNode): - return { - node.id: {"previous_node_output": node.previous_node_output} - for node in trash_item.get_next_nodes() - if node.previous_node_output - } + def get_additional_restoration_data(self, trashed_item: AutomationActionNode): + # We save the previous position for the restoration + return trashed_item.workflow.get_graph().get_position(trashed_item) def trash( self, @@ -38,18 +38,20 @@ def trash( requesting_user: AbstractUser, trash_entry: TrashEntry, ): - # Determine if this node has a node after it. If it does, we'll - # need to update its previous_node_id after `item_to_trash` is trashed. - next_nodes = list(item_to_trash.get_next_nodes()) - super().trash(item_to_trash, requesting_user, trash_entry) - # As `item_to_trash` is trashed, we need to update the nodes that immediately - # follow this node, to point to the node before `item_to_trash`, and ensure - # that the previous_node_output is set to the output of the node before. - AutomationNodeHandler().update_previous_node( - item_to_trash.previous_node, next_nodes, item_to_trash.previous_node_output - ) + if ( + trash_entry.trash_operation_type + != ReplaceAutomationNodeTrashOperationType.type + ): + item_to_trash.workflow.get_graph().remove(item_to_trash) + item_to_trash.workflow.refresh_from_db() + + automation_workflow_updated.send( + self, workflow=item_to_trash.workflow, user=requesting_user + ) + + AutomationNodeHandler().invalidate_node_cache(item_to_trash.workflow) def restore( self, @@ -57,63 +59,50 @@ def restore( trash_entry: TrashEntry, ): workflow = trashed_item.workflow - next_nodes = list( - AutomationNodeHandler().get_next_nodes( - workflow, - trashed_item.previous_node, - trashed_item.previous_node_output, - ) - ) - # If we have we have a trash operation type, and it's not a replace operation... + super().restore(trashed_item, trash_entry) + + AutomationNodeHandler().invalidate_node_cache(trashed_item.workflow) + if ( trash_entry.trash_operation_type != ReplaceAutomationNodeTrashOperationType.type ): - # If we're restoring a node, and it has a previous node output, ensure that - # the output UUID matches one of the `uid` in the previous node's edges. If - # the output isn't found, it means that the edge was deleted whilst the node - # was trashed, and we cannot restore the node because it would create a - # broken workflow. - if trashed_item.previous_node_output and trashed_item.previous_node_id: - previous_node = trashed_item.previous_node.specific - if not previous_node.service.specific.edges.filter( - uid=trashed_item.previous_node_output - ).exists(): - raise TrashItemRestorationDisallowed( - "This automation node cannot be " - "restored as its branch has been deleted." - ) - - super().restore(trashed_item, trash_entry) + ( + reference_node_id, + position, + output, + ) = trash_entry.additional_restoration_data + + try: + reference_node = ( + AutomationNodeHandler().get_node(reference_node_id) + if reference_node_id + else None + ) + except AutomationNodeDoesNotExist as exc: + raise TrashItemRestorationDisallowed( + "This automation node cannot be " + "restored as its reference node has been deleted." + ) from exc + + # Does the output still exists? + if ( + reference_node is not None + and output + not in reference_node.service.get_type().get_edges( + reference_node.service.specific + ) + ): + raise TrashItemRestorationDisallowed( + "This automation node cannot be " + "restored as its branch has been deleted." + ) + + workflow.get_graph().insert(trashed_item, reference_node, position, output) - # Determine if this restored node has one or more nodes after it. If it does, - # we'll need to update their previous_node_id to point to `trashed_item.id` - AutomationNodeHandler().update_previous_node( - trashed_item, - next_nodes, - ) - - # If the trashed item had any restoration data, then that means that - # we have `previous_node_output` from next nodes to update. - restoration_data = trash_entry.additional_restoration_data or {} - if restoration_data: - updates = [] - for next_node in next_nodes: - # Do we have anything to restore for this next node? For defensive - # programming purposes we double-check that the next node is present - # in the old state's restoration data. - node_restoration_data = restoration_data.get(str(next_node.id)) - if node_restoration_data is None: - continue - next_node.previous_node_output = node_restoration_data[ - "previous_node_output" - ] - updates.append(next_node) - AutomationNode.objects.bulk_update(updates, ["previous_node_output"]) - - if trash_entry.get_operation_type().send_post_restore_created_signal: automation_node_created.send(self, node=trashed_item, user=None) + automation_workflow_updated.send(self, workflow=workflow, user=None) def permanently_delete_item( self, trashed_item: AutomationNode, trash_item_lookup_cache=None diff --git a/backend/src/baserow/contrib/automation/nodes/types.py b/backend/src/baserow/contrib/automation/nodes/types.py index 35ec4cba85..9a3ea481a7 100644 --- a/backend/src/baserow/contrib/automation/nodes/types.py +++ b/backend/src/baserow/contrib/automation/nodes/types.py @@ -1,11 +1,23 @@ from dataclasses import dataclass -from typing import Any, List, NewType, TypedDict +from typing import Any, Literal, NewType, TypeAlias, TypedDict + +from django.db import models from baserow.contrib.automation.nodes.models import AutomationActionNode, AutomationNode AutomationNodeForUpdate = NewType("AutomationNodeForUpdate", AutomationNode) +class NodePosition(models.TextChoices): + SOUTH = "south", "South" + CHILD = "child", "Child" + + +NodePositionType = Literal["south", "child"] + +NodePositionTriplet: TypeAlias = tuple[AutomationNode | None, NodePositionType, str] + + @dataclass class UpdatedAutomationNode: node: AutomationNode @@ -20,50 +32,18 @@ class ReplacedAutomationNode: original_node_type: str -@dataclass -class NextAutomationNodeValues: - id: int - previous_node_id: int - previous_node_output: str - - -@dataclass -class AutomationNodeDuplication: - source_node: AutomationNode - source_node_next_nodes_values: List[NextAutomationNodeValues] - duplicated_node: AutomationNode - duplicated_node_next_nodes_values: List[NextAutomationNodeValues] - - @dataclass class AutomationNodeMove: # The node we're trying to move. node: AutomationActionNode - # A list of origin *and* destination next nodes - next_node_updates: List[AutomationActionNode] - # The original position & output of the node before the move. - origin_previous_node_id: int - origin_previous_node_output: str - # The pre-move values of the next nodes after `node`, at the original position. - origin_old_next_nodes_values: List[NextAutomationNodeValues] - # The post-move values of the next nodes after `node`, at the original position. - origin_new_next_nodes_values: List[NextAutomationNodeValues] - # The destination position & output of the node after the move. - destination_previous_node_id: int - destination_previous_node_output: str - # The pre-move values of the next nodes after - # `destination_previous_node_id`, at the new position. - destination_old_next_nodes_values: List[NextAutomationNodeValues] - # The post-move values of the next nodes after - # `destination_previous_node_id`, at the new position. - destination_new_next_nodes_values: List[NextAutomationNodeValues] + previous_reference_node: AutomationActionNode | None + previous_position: NodePositionType + previous_output: str class AutomationNodeDict(TypedDict): id: int type: str - order: float + label: str + service: dict workflow_id: int - parent_node_id: int - previous_node_id: int - previous_node_output: str diff --git a/backend/src/baserow/contrib/automation/nodes/ws/signals.py b/backend/src/baserow/contrib/automation/nodes/ws/signals.py index bfc1aeaf94..632b5eb192 100644 --- a/backend/src/baserow/contrib/automation/nodes/ws/signals.py +++ b/backend/src/baserow/contrib/automation/nodes/ws/signals.py @@ -1,5 +1,3 @@ -from typing import List - from django.contrib.auth.models import AbstractUser from django.db import transaction from django.dispatch import receiver @@ -11,22 +9,16 @@ from baserow.contrib.automation.nodes.operations import ( ListAutomationNodeOperationType, ReadAutomationNodeOperationType, - UpdateAutomationNodeOperationType, ) -from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.automation.nodes.signals import ( automation_node_created, automation_node_deleted, - automation_node_replaced, automation_node_updated, - automation_nodes_reordered, - automation_nodes_updated, ) from baserow.contrib.automation.workflows.object_scopes import ( AutomationWorkflowObjectScopeType, ) -from baserow.core.utils import generate_hash -from baserow.ws.tasks import broadcast_to_group, broadcast_to_permitted_users +from baserow.ws.tasks import broadcast_to_permitted_users @receiver(automation_node_created) @@ -81,79 +73,3 @@ def node_updated(sender, node: AutomationNode, user: AbstractUser, **kwargs): getattr(user, "web_socket_id", None), ) ) - - -@receiver(automation_nodes_updated) -def nodes_updated( - sender, - nodes: List[AutomationNode], - workflow: AutomationWorkflow, - user: AbstractUser, - **kwargs, -): - transaction.on_commit( - lambda: broadcast_to_permitted_users.delay( - workflow.automation.workspace_id, - ListAutomationNodeOperationType.type, - AutomationWorkflowObjectScopeType.type, - workflow.id, - { - "workflow_id": workflow.id, - "type": "automation_nodes_updated", - "nodes": [ - automation_node_type_registry.get_serializer( - node, AutomationNodeSerializer - ).data - for node in nodes - ], - }, - getattr(user, "web_socket_id", None), - ) - ) - - -@receiver(automation_nodes_reordered) -def nodes_reordered( - sender, workflow: AutomationWorkflow, order: List[int], user: AbstractUser, **kwargs -): - # Hashing all values here to not expose real ids of workflows a user - # might not have access to - order = [generate_hash(o) for o in order] - transaction.on_commit( - lambda: broadcast_to_group.delay( - workflow.automation.workspace_id, - { - "type": "automation_nodes_reordered", - # A user might also not have access to the automation itself - "workflow_id": generate_hash(workflow.id), - "order": order, - }, - getattr(user, "web_socket_id", None), - ) - ) - - -@receiver(automation_node_replaced) -def node_replaced( - sender, - workflow: AutomationWorkflow, - deleted_node: AutomationNode, - restored_node: AutomationNode, - user: AbstractUser, - **kwargs, -): - transaction.on_commit( - lambda: broadcast_to_permitted_users.delay( - workflow.automation.workspace_id, - UpdateAutomationNodeOperationType.type, - AutomationWorkflowObjectScopeType.type, - workflow.id, - { - "type": "automation_node_replaced", - "workflow_id": workflow.id, - "deleted_node": AutomationNodeSerializer(deleted_node).data, - "restored_node": AutomationNodeSerializer(restored_node).data, - }, - getattr(user, "web_socket_id", None), - ) - ) diff --git a/backend/src/baserow/contrib/automation/types.py b/backend/src/baserow/contrib/automation/types.py index 06e1670f5d..5cc234d647 100644 --- a/backend/src/baserow/contrib/automation/types.py +++ b/backend/src/baserow/contrib/automation/types.py @@ -11,6 +11,7 @@ class AutomationWorkflowDict(TypedDict): order: int nodes: List[AutomationNodeDict] state: WorkflowState + graph: dict class AutomationDict(TypedDict): diff --git a/backend/src/baserow/contrib/automation/workflows/graph_handler.py b/backend/src/baserow/contrib/automation/workflows/graph_handler.py new file mode 100644 index 0000000000..f9f51153c9 --- /dev/null +++ b/backend/src/baserow/contrib/automation/workflows/graph_handler.py @@ -0,0 +1,482 @@ +from typing import Any, Dict, List + +from baserow.contrib.automation.nodes.exceptions import ( + AutomationNodeDoesNotExist, + AutomationNodeNotFoundInGraph, +) +from baserow.contrib.automation.nodes.models import AutomationNode +from baserow.contrib.automation.nodes.types import NodePositionTriplet, NodePositionType + + +def _replace(list_, item_to_replace, replacement): + index = list_.index(item_to_replace) + + return ( + list_[:index] + + (replacement if isinstance(replacement, list) else [replacement]) + + list_[index + 1 :] + ) + + +class NodeGraphHandler: + """ + Handler to support all workflow node graph operation. Most operation over the graph + structure should happen here. + + The structure looks like: + + ``` + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": { + "next": { + "uuid1": [3], + "uui2": [5], + "": [4], + } + }, + "3": {}, + "5": {}, + "4": {"next": {"": [6]}}, + "6": {"children": [7]} + "7": {} + } + ``` + + The key is the ID of a node except for the key '0' that indicates the ID of the + first node of the graph. + + For each node, `next` is the dict keyed by edge UUIDs and valued by the list of + node ID on this edge. For now only one node is possible per output. + + `children` is an array of children for the container node. + + This graph structure use triplet of position to identify the position of a node. + A triplet looks like [reference_node, position, output]. + + For instance: + - [, 'south', ''] refers to the node placed at the + south of the node 42 at default output "". + - [, 'south', 'uuid45'] refers to the node placed at the + south of the node 42 at the edge with uid `uuid45`. + - [, 'child', ''] refers to the node placed as child of the + node 42. + """ + + def __init__(self, workflow): + self.workflow = workflow + + @property + def graph(self): + return self.workflow.graph + + def _update_graph(self, graph=None): + """ + Save the workflow graph. + """ + + if graph is not None: + self.workflow.graph = graph + + self.workflow.save(update_fields=["graph"]) + + def get_info(self, node: AutomationNode | str | int | None) -> Dict[str, Any]: + """ + Returns the info dict for the given node. + """ + + if node is None: + node_id = self.graph["0"] + + elif hasattr(node, "id"): + node_id = node.id + else: + node_id = node + + return self.graph[str(node_id)] + + def _get_node_map(self) -> Dict[int, AutomationNode]: + from baserow.contrib.automation.nodes.handler import AutomationNodeHandler + + return {n.id: n for n in AutomationNodeHandler().get_nodes(self.workflow)} + + def get_node(self, node_id: str | int) -> AutomationNode: + """ + Return the node instance for the given node ID. + """ + + if int(node_id) not in self._get_node_map(): + raise AutomationNodeDoesNotExist(node_id) + + return self._get_node_map()[int(node_id)] + + def get_node_at_position( + self, reference_node: AutomationNode, position: NodePositionType, output: str + ) -> AutomationNode: + """ + Returns the node at the given position in the graph. + + :param reference_node: The node used as reference for the position. + :param position: The direction relative to the reference node. + :param output: The output of the reference node to use. + """ + + output = str(output) + + if position == "south": + # First node + if reference_node is None: + if "0" in self.graph: + return self.get_node(self.graph["0"]) + else: + return None + + next_nodes = self.get_info(reference_node).get("next", {}).get(output, []) + if next_nodes: + return self.get_node(next_nodes[0]) + + elif position == "child": + children = self.get_info(reference_node).get("children", []) + if children: + return self.get_node(children[0]) + + return None + + def get_last_position(self) -> NodePositionTriplet: + """ + Return the last position of the graph if we follow the default edge ("") of + each node. Mostly used to place nodes in tests. + """ + + if self.graph.get("0") is None: + return (None, "south", "") + + def search_last(node_id): + next_nodes = self.get_info(node_id).get("next", {}).get("", []) + if not next_nodes: + return (self.get_node(node_id), "south", "") + else: + return search_last(next_nodes[0]) + + return search_last(self.graph["0"]) + + def get_position(self, node: AutomationNode) -> NodePositionTriplet: + """ + Returns the position of the given node. + """ + + if node.id == self.graph.get("0", None): + # it's the trigger + return (None, "south", "") + + for node_id, node_info in self.graph.items(): + if node_id == "0" or node_id == str(node.id): + continue + + for output_uid, next_nodes in node_info.get("next", {}).items(): + if node.id in next_nodes: + return (node_id, "south", output_uid) + + if node.id in node_info.get("children", []): + return (node_id, "child", "") + + raise AutomationNodeNotFoundInGraph(f"Node {node.id} not found in the graph") + + def get_previous_positions( + self, target_node: AutomationNode + ) -> NodePositionTriplet: + """ + Generates the list of all positions to get to the target node. + """ + + def explore(current_position, path): + node = self.get_node_at_position(*current_position) + + node_id = str(node.id) + + if node_id == str(target_node.id): + return path + + node_info = self.get_info(node_id) + + next_positions = [] + # Collect all possible positions + next_positions.extend( + [ + (node_id, "south", uid) + for uid, nodes in node_info.get("next", {}).items() + if nodes + ] + ) + if node_info.get("children"): + next_positions.append((node_id, "child", "")) + + for next_position in next_positions: + found = explore(next_position, path + [next_position]) + if found is not None: + return found + + return None + + full_path = explore((None, "south", ""), []) + if full_path is not None: + return [(self.get_node(nid), p, o) for [nid, p, o] in full_path] + + return None + + def _get_all_next_nodes(self, node: AutomationNode): + """ + Collects all next node of the give node regardless of their output. + """ + + node_info = self.get_info(node) + + return [ + x for sublist in node_info.get("next", {}).values() for x in sublist + ] + node_info.get("children", []) + + def get_next_nodes( + self, node: AutomationNode, output: str | None = None + ) -> List[AutomationNode]: + """ + Get next nodes on the given output if output is set or all outputs if not.. + """ + + node_info = self.get_info(node) + + return [ + self.get_node(x) + for uid, sublist in node_info.get("next", {}).items() + for x in sublist + if output is None or uid == output + ] + + def get_children(self, node) -> List[AutomationNode]: + """ + Returns the node children. + """ + + return [self.get_node(cid) for cid in self.get_info(node).get("children", [])] + + def insert( + self, + node: AutomationNode, + reference_node: AutomationNode, + position: NodePositionType, + output: str, + ): + """ + Insert a node at the given position. Rewire all necessary nodes. + """ + + output = str(output) # When it's an UUID + + graph = self.graph + + node_info = graph.setdefault(str(node.id), {}) + + new_next = None + + if reference_node is None: + if "0" in graph: + new_next = [graph["0"]] + + # This is the first node of the graph + graph["0"] = node.id + + if new_next: + node_info["next"] = {"": new_next} + + self._update_graph() + return + + if position == "south": + if output in self.get_info(reference_node).get("next", {}): + new_next = self.get_info(reference_node)["next"][output] + + self.get_info(reference_node).setdefault("next", {})[output] = [node.id] + + elif position == "child": + if "children" in self.get_info(reference_node): + new_next = self.get_info(reference_node)["children"] + + self.get_info(reference_node)["children"] = [node.id] + + if new_next: + node_info["next"] = {"": new_next} + else: + if "next" in node_info: + del node_info["next"] + + self._update_graph() + + def remove(self, node_to_delete: AutomationNode, keep_info=False): + """ + Remove the given node. + + :param node_to_delete: The node to delete. + :param keep_info: doesn't delete the info dict from the graph yet if True. + """ + + graph = self.workflow.graph + + if str(node_to_delete.id) not in graph: + # The node is already removed. Could be by a replace. + return + + next_node_ids = self._get_all_next_nodes(node_to_delete) + + node_position_id, position, output = self.get_position(node_to_delete) + + if node_position_id is None: + next_nodes = self._get_all_next_nodes(node_to_delete) + if next_nodes: + graph["0"] = next_nodes[0] + else: + del graph["0"] + + elif position == "south": + graph[node_position_id]["next"][output] = _replace( + graph[node_position_id]["next"][output], + node_to_delete.id, + next_node_ids, + ) + elif position == "child": + next_nodes = self._get_all_next_nodes(node_to_delete) + graph[node_position_id]["children"] = _replace( + graph[node_position_id]["children"], + node_to_delete.id, + next_nodes, + ) + + if not keep_info: + del graph[str(node_to_delete.id)] + + self._update_graph() + + def replace(self, node_to_replace: AutomationNode, new_node: AutomationNode): + """ + Replace a node with another at the same position. + """ + + reference_node_id, position, output = self.get_position(node_to_replace) + + node_to_replace_id = str(node_to_replace.id) + new_node_id = str(new_node.id) + + self.graph[new_node_id] = self.graph[node_to_replace_id] + + if position == "south": + if reference_node_id is None: + self.graph["0"] = new_node.id + else: + self.graph[reference_node_id]["next"][output] = _replace( + self.graph[reference_node_id]["next"][output], + node_to_replace.id, + new_node.id, + ) + elif position == "child": + self.graph[reference_node_id]["children"] = _replace( + self.graph[reference_node_id]["children"], + node_to_replace.id, + new_node.id, + ) + + del self.graph[node_to_replace_id] + + self._update_graph() + + def move( + self, + node_to_move: AutomationNode, + reference_node: AutomationNode | None, + position: NodePositionType, + output: str, + ): + """ + Move a node at another given position. + """ + + output = str(output) # When it's an UUID + + self.remove(node_to_move, keep_info=True) + self.insert(node_to_move, reference_node, position, output) + + def migrate_graph(self, id_mapping): + """ + Updates the node IDs and edge UIDs in the graph from the id_mapping. + """ + + migrated = {} + + def map_node(nid): + return id_mapping["automation_workflow_nodes"][int(nid)] + + def map_output(uid): + if uid == "": + return "" + return id_mapping["automation_edge_outputs"][uid] + + for key, info in self.graph.items(): + if key == "0": + migrated["0"] = id_mapping["automation_workflow_nodes"][info] + + else: + migrated[str(map_node(key))] = {} + if "next" in info: + migrated[str(map_node(key))]["next"] = { + map_output(uid): [map_node(nid) for nid in nids] + for uid, nids in info["next"].items() + } + if "children" in info: + migrated[str(map_node(key))]["children"] = [ + map_node(nid) for nid in info["children"] + ] + + self._update_graph(migrated) + + def _get_edge_label(self, node, uid): + """ + Returns the label of the given edge uid for the given node. + """ + + edges = node.service.get_type().get_edges(node.service.specific) + return edges[uid]["label"] + + def labeled_graph(self): + """ + Generate a graph representation that doesn't depends on the node IDs and that is + reliable between test executions. + """ + + used_label = {} + + def label(node_id): + node_id = str(node_id) + label = self.get_node(node_id).get_label() + + while used_label.setdefault(label, node_id) != node_id: + label += "-" + + return label + + result = {} + for key, node_info in self.graph.items(): + if key == "0": + result[key] = label(node_info) + else: + result[label(key)] = {} + if "children" in node_info: + result[label(key)]["children"] = [ + label(id) for id in node_info["children"] + ] + if "next" in node_info: + result[label(key)]["next"] = { + self._get_edge_label(self.get_node(key), o): [ + label(id) for id in n + ] + for o, n in node_info["next"].items() + } + + return result diff --git a/backend/src/baserow/contrib/automation/workflows/handler.py b/backend/src/baserow/contrib/automation/workflows/handler.py index 026814257c..2bef23a43a 100644 --- a/backend/src/baserow/contrib/automation/workflows/handler.py +++ b/backend/src/baserow/contrib/automation/workflows/handler.py @@ -396,6 +396,7 @@ def export_workflow( order=workflow.order, nodes=serialized_nodes, state=workflow.state, + graph=workflow.graph, ) def _ops_count_for_import_workflow( @@ -409,19 +410,6 @@ def _ops_count_for_import_workflow( # Return zero for now, since we don't have Triggers and Actions yet. return 0 - def _sort_serialized_nodes_by_priority( - self, serialized_nodes: List[AutomationNodeDict] - ) -> List[AutomationNodeDict]: - """ - Sorts the serialized nodes so that root-level nodes (those without a parent) - are first, and then sorts by their `order` ASC. - """ - - def _node_priority_sort(n): - return n.get("parent_node_id") is not None, n.get("order", 0) - - return sorted(serialized_nodes, key=_node_priority_sort) - def import_nodes( self, workflow: AutomationWorkflow, @@ -451,36 +439,24 @@ def import_nodes( from baserow.contrib.automation.nodes.handler import AutomationNodeHandler imported_nodes = [] - prioritized_nodes = self._sort_serialized_nodes_by_priority(serialized_nodes) - - # True if we have imported at least one node on last iteration - was_imported = True - while was_imported: - was_imported = False - workflow_node_mapping = id_mapping.get("automation_workflow_nodes", {}) - - for serialized_node in prioritized_nodes: - parent_node_id = serialized_node["parent_node_id"] - # check that the node has not already been imported in a - # previous pass or if the parent doesn't exist yet. - if serialized_node["id"] not in workflow_node_mapping and ( - parent_node_id is None or parent_node_id in workflow_node_mapping - ): - imported_node = AutomationNodeHandler().import_node( - workflow, - serialized_node, - id_mapping, - import_export_config=import_export_config, - files_zip=files_zip, - storage=storage, - cache=cache, - ) - - imported_nodes.append(imported_node) - - was_imported = True - if progress: - progress.increment(state=IMPORT_SERIALIZED_IMPORTING) + + for serialized_node in serialized_nodes: + # check that the node has not already been imported in a + # previous pass or if the parent doesn't exist yet. + imported_node = AutomationNodeHandler().import_node( + workflow, + serialized_node, + id_mapping, + import_export_config=import_export_config, + files_zip=files_zip, + storage=storage, + cache=cache, + ) + + imported_nodes.append(imported_node) + + if progress: + progress.increment(state=IMPORT_SERIALIZED_IMPORTING) return imported_nodes @@ -543,6 +519,8 @@ def import_workflows( cache=cache, ) + workflow_instance.get_graph().migrate_graph(id_mapping) + return [i[0] for i in imported_workflows] def import_workflow( @@ -601,6 +579,7 @@ def import_workflow_only( name=serialized_workflow["name"], order=serialized_workflow["order"], state=serialized_workflow["state"] or WorkflowState.DRAFT, + graph=serialized_workflow.get("graph", {}), ) id_mapping["automation_workflows"][ diff --git a/backend/src/baserow/contrib/automation/workflows/models.py b/backend/src/baserow/contrib/automation/workflows/models.py index c05358b6a8..908a67e46d 100644 --- a/backend/src/baserow/contrib/automation/workflows/models.py +++ b/backend/src/baserow/contrib/automation/workflows/models.py @@ -1,9 +1,10 @@ -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from django.db import models from baserow.contrib.automation.constants import WORKFLOW_NAME_MAX_LEN from baserow.contrib.automation.workflows.constants import WorkflowState +from baserow.core.cache import local_cache from baserow.core.jobs.mixins import ( JobWithUndoRedoIds, JobWithUserIpAddress, @@ -76,6 +77,8 @@ class AutomationWorkflow( allow_test_run_until = models.DateTimeField(null=True, blank=True) + graph = models.JSONField(default=dict, help_text="Contains the node graph.") + objects = AutomationWorkflowTrashManager() objects_and_trash = models.Manager() @@ -91,30 +94,37 @@ def get_last_order(cls, automation: "Automation"): queryset = AutomationWorkflow.objects.filter(automation=automation) return cls.get_highest_order_of_queryset(queryset) + 1 - @classmethod - def get_last_node_id( - cls, workflow: "AutomationWorkflow", parent_node_id: Optional[int] = None - ) -> Optional[int]: - from baserow.contrib.automation.nodes.models import AutomationNode - - last_node = ( - AutomationNode.objects.filter( - workflow=workflow, parent_node_id=parent_node_id - ) - .order_by("order") - .only("id") - .last() - ) - return last_node.id if last_node else None + def get_trigger(self) -> "AutomationTriggerNode": + """ + Returns the first node of the workflow A.K.A the trigger. + """ - def get_trigger(self, specific: bool = True) -> "AutomationTriggerNode": - node = self.automation_workflow_nodes.get(previous_node_id=None) - return node.specific if specific else node + return self.get_graph().get_node_at_position(None, "south", "") def can_immediately_be_tested(self): + """ + True of the workflow trigger can immediately be dispatched in test mode. + """ + service = self.get_trigger().service.specific return service.get_type().can_immediately_be_tested(service) + def get_graph(self): + """ + Returns the workflow graph. Use the same graph instance related to the workflow + ID regardless of the workflow instance. + """ + + from .graph_handler import NodeGraphHandler + + # always return the same instance to avoid using different graphs from different + # instances of the same workflow + + return local_cache.get( + f"automation_workflow__{self.id}", + lambda: NodeGraphHandler(self), + ) + @property def is_published(self) -> bool: from baserow.contrib.automation.workflows.handler import ( @@ -129,6 +139,39 @@ def is_published(self) -> bool: return workflow.state == WorkflowState.LIVE + def print(self, message=None, original=False): + """ + Prints the graph in a pretty way. Useful for debug. + """ + + import pprint + + if message: + print(message) + + if original: + pprint.pprint(self.get_graph().graph, indent=2) + else: + pprint.pprint(self.get_graph().labeled_graph(), indent=2) + + def assert_reference(self, reference): + """ + Used in test, compare the current workflow graph with the given reference and + raise an error if the graph doesn't match. + """ + + import pprint + + try: + assert ( + self.get_graph().labeled_graph() == reference # nosec B101 + ), "Failed to match the reference." + except AssertionError: + print("Failed to match the reference:") + pprint.pprint(reference, indent=2) + self.print("Current graph:") + raise + class DuplicateAutomationWorkflowJob( JobWithUserIpAddress, JobWithWebsocketId, JobWithUndoRedoIds, Job diff --git a/backend/src/baserow/contrib/automation/workflows/service.py b/backend/src/baserow/contrib/automation/workflows/service.py index 471034e3ec..f1987694d9 100644 --- a/backend/src/baserow/contrib/automation/workflows/service.py +++ b/backend/src/baserow/contrib/automation/workflows/service.py @@ -5,8 +5,6 @@ from baserow.contrib.automation.handler import AutomationHandler from baserow.contrib.automation.models import Automation, AutomationWorkflow from baserow.contrib.automation.nodes.handler import AutomationNodeHandler -from baserow.contrib.automation.nodes.node_types import CorePeriodicTriggerNodeType -from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.automation.operations import OrderAutomationWorkflowsOperationType from baserow.contrib.automation.workflows.handler import AutomationWorkflowHandler from baserow.contrib.automation.workflows.operations import ( @@ -61,7 +59,6 @@ def create_workflow( user: AbstractUser, automation_id: int, name: str, - auto_create_trigger: bool = True, ) -> AutomationWorkflow: """ Returns a new instance of AutomationWorkflow. @@ -69,8 +66,6 @@ def create_workflow( :param user: The user trying to create the workflow. :param automation_id: The automation workflow belongs to. :param name: The name of the workflow. - :param auto_create_trigger: Whether to automatically create a - trigger for the workflow. :return: The newly created AutomationWorkflow instance. """ @@ -85,17 +80,6 @@ def create_workflow( workflow = self.handler.create_workflow(automation, name) - if auto_create_trigger: - from baserow.contrib.automation.nodes.handler import AutomationNodeHandler - - trigger_type = automation_node_type_registry.get( - CorePeriodicTriggerNodeType.type - ) - prepared_values = trigger_type.prepare_values({}, user) - AutomationNodeHandler().create_node( - trigger_type, workflow, **prepared_values - ) - automation_workflow_created.send(self, workflow=workflow, user=user) return workflow diff --git a/backend/src/baserow/contrib/integrations/apps.py b/backend/src/baserow/contrib/integrations/apps.py index 0e8346ce88..8c9bd4da88 100644 --- a/backend/src/baserow/contrib/integrations/apps.py +++ b/backend/src/baserow/contrib/integrations/apps.py @@ -40,6 +40,8 @@ def ready(self): from baserow.contrib.integrations.core.service_types import ( CoreHTTPRequestServiceType, CoreHTTPTriggerServiceType, + CoreIteratorServiceType, + CorePeriodicServiceType, CoreRouterServiceType, CoreSMTPEmailServiceType, ) @@ -48,5 +50,7 @@ def ready(self): service_type_registry.register(CoreSMTPEmailServiceType()) service_type_registry.register(CoreRouterServiceType()) service_type_registry.register(CoreHTTPTriggerServiceType()) + service_type_registry.register(CoreIteratorServiceType()) + service_type_registry.register(CorePeriodicServiceType()) import baserow.contrib.integrations.signals # noqa: F403, F401 diff --git a/backend/src/baserow/contrib/integrations/core/models.py b/backend/src/baserow/contrib/integrations/core/models.py index b33f988241..7287b9e9d2 100644 --- a/backend/src/baserow/contrib/integrations/core/models.py +++ b/backend/src/baserow/contrib/integrations/core/models.py @@ -47,6 +47,16 @@ class SMTPIntegration(Integration): ) +class CoreIteratorService(Service): + """ + A service to iterate over an array of data. + """ + + source = FormulaField( + help_text="The path of the array.", + ) + + class CoreHTTPRequestService(Service): """ A service for handling HTTP requests. diff --git a/backend/src/baserow/contrib/integrations/core/service_types.py b/backend/src/baserow/contrib/integrations/core/service_types.py index 473fcf9dcd..d4eca7e76c 100644 --- a/backend/src/baserow/contrib/integrations/core/service_types.py +++ b/backend/src/baserow/contrib/integrations/core/service_types.py @@ -40,6 +40,7 @@ from baserow.contrib.integrations.core.models import ( CoreHTTPRequestService, CoreHTTPTriggerService, + CoreIteratorService, CorePeriodicService, CoreRouterService, CoreRouterServiceEdge, @@ -908,9 +909,7 @@ def import_serialized( Responsible for importing the router service and its edges. For each edge that we find, generate a new unique ID and store it in the - `id_mapping` dictionary under the key "automation_edge_outputs". Any nodes - with a `previous_node_output` that matches the edge's UID will be updated to - use the new unique ID in their own deserialization. + `id_mapping` dictionary under the key "automation_edge_outputs". """ for edge in serialized_values["edges"]: @@ -1159,6 +1158,11 @@ def get_sample_data(self, service, dispatch_context): return super().get_sample_data(service, dispatch_context) + def get_edges(self, service): + return {str(e.uid): {"label": e.label} for e in service.edges.all()} | { + "": {"label": service.default_edge_label} + } + class CorePeriodicServiceType(TriggerServiceTypeMixin, CoreServiceType): type = "periodic" @@ -1560,3 +1564,89 @@ def export_prepared_values( values["uid"] = str(values["uid"]) return values + + +class CoreIteratorServiceType(ServiceType): + type = "iterator" + model_class = CoreIteratorService + dispatch_types = DispatchTypes.ACTION + + allowed_fields = [ + "source", + ] + + serializer_field_names = [ + "source", + ] + + class SerializedDict(ServiceDict): + source: str + + simple_formula_fields = [ + "source", + ] + + @property + def serializer_field_overrides(self): + from baserow.core.formula.serializers import FormulaSerializerField + + return { + "source": FormulaSerializerField( + help_text=CoreIteratorService._meta.get_field("source").help_text, + required=False, + ), + } + + def get_schema_name(self, service: CoreSMTPEmailService) -> str: + return f"Iterator{service.id}Schema" + + def generate_schema( + self, + service: CoreIteratorService, + allowed_fields: Optional[List[str]] = None, + ) -> Optional[Dict[str, Any]]: + if service.sample_data and ( + allowed_fields is None or "items" in allowed_fields + ): + schema_builder = SchemaBuilder() + schema_builder.add_object(service.sample_data["data"]) + schema = schema_builder.to_schema() + + # Sometimes there is no items if the array is empty + if "items" in schema: + return { + **schema, + "title": self.get_schema_name(service), + } + else: + return None + else: + return None + + def formulas_to_resolve(self, service: CoreRouterService) -> list[FormulaToResolve]: + """ + Returns the formula to resolve for this service. + """ + + return [ + FormulaToResolve( + "source", + service.source, + ensure_array, + "'source' property", + ) + ] + + def dispatch_data( + self, + service: CoreSMTPEmailService, + resolved_values: Dict[str, Any], + dispatch_context: DispatchContext, + ) -> Any: + return resolved_values["source"] + + def dispatch_transform( + self, + data: Any, + ) -> DispatchResult: + return DispatchResult(data=data) diff --git a/backend/src/baserow/contrib/integrations/local_baserow/mixins.py b/backend/src/baserow/contrib/integrations/local_baserow/mixins.py index 4c4de6f253..4e79bbf68e 100644 --- a/backend/src/baserow/contrib/integrations/local_baserow/mixins.py +++ b/backend/src/baserow/contrib/integrations/local_baserow/mixins.py @@ -150,7 +150,9 @@ def deserialize_filters(self, value, id_mapping): id_mapping["database_field_select_options"].get( int(formula["formula"]), formula["formula"] ) - ) + ), + mode=formula["mode"], + version=formula["version"], ) result.append({**f, "field_id": field_id, "value": val}) diff --git a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py index 84f6a250cd..c6317e8f21 100644 --- a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py +++ b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py @@ -2247,9 +2247,15 @@ def _handle_signal( RowSerializer, is_response=True, ) + + data_to_process = { + "results": serializer(rows, many=True).data, + "has_next_page": False, + } + self._process_event( self.model_class.objects.filter(table=table), - serializer(rows, many=True).data, + data_to_process, user=user, ) diff --git a/backend/src/baserow/contrib/integrations/migrations/0022_coreiteratorservice.py b/backend/src/baserow/contrib/integrations/migrations/0022_coreiteratorservice.py new file mode 100644 index 0000000000..b234b55407 --- /dev/null +++ b/backend/src/baserow/contrib/integrations/migrations/0022_coreiteratorservice.py @@ -0,0 +1,42 @@ +# Generated by Django 5.0.13 on 2025-09-10 12:05 + +import django.db.models.deletion +from django.db import migrations, models + +import baserow.core.formula.field + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0102_service_sample_data"), + ("integrations", "0021_migrate_to_formula_field_objects"), + ] + + operations = [ + migrations.CreateModel( + name="CoreIteratorService", + fields=[ + ( + "service_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="core.service", + ), + ), + ( + "source", + baserow.core.formula.field.FormulaField( + blank=True, help_text="The path of the array." + ), + ), + ], + options={ + "abstract": False, + }, + bases=("core.service",), + ), + ] diff --git a/backend/src/baserow/core/services/registries.py b/backend/src/baserow/core/services/registries.py index bccf4ac661..b4d848c4ef 100644 --- a/backend/src/baserow/core/services/registries.py +++ b/backend/src/baserow/core/services/registries.py @@ -81,9 +81,6 @@ class ServiceType( # Does this service return a list of record? returns_list = False - # Is this a service that triggers events? - is_trigger: bool = False - # What parent object is responsible for dispatching this `ServiceType`? # It could be via a `DataSource`, in which case `DATA` should be # chosen, or via a `WorkflowAction`, in which case `ACTION` @@ -482,6 +479,9 @@ def import_property_name( return property_name + def get_edges(self, service): + return {"": {"label": ""}} + ServiceTypeSubClass = TypeVar("ServiceTypeSubClass", bound=ServiceType) @@ -525,9 +525,6 @@ def get_default_result_limit(self, service: Service): class TriggerServiceTypeMixin(ABC): - # Is this a service that triggers events? - is_trigger: bool = True - # The callable function which should be called when the event occurs. on_event: Callable = lambda *args: None diff --git a/backend/src/baserow/test_utils/fixtures/automation_history.py b/backend/src/baserow/test_utils/fixtures/automation_history.py index 7ab7c48c2b..894bdd83cd 100644 --- a/backend/src/baserow/test_utils/fixtures/automation_history.py +++ b/backend/src/baserow/test_utils/fixtures/automation_history.py @@ -37,9 +37,6 @@ def create_workflow_history(self, user=None, **kwargs): is_test_run = kwargs.pop("status", False) - self.create_local_baserow_rows_created_trigger_node( - user=user, workflow=original_workflow - ) self.create_local_baserow_create_row_action_node( user=user, workflow=original_workflow ) diff --git a/backend/src/baserow/test_utils/fixtures/automation_node.py b/backend/src/baserow/test_utils/fixtures/automation_node.py index 80a9f56a0b..92c9053d19 100644 --- a/backend/src/baserow/test_utils/fixtures/automation_node.py +++ b/backend/src/baserow/test_utils/fixtures/automation_node.py @@ -4,11 +4,13 @@ from baserow.contrib.automation.nodes.models import ( AutomationActionNode, AutomationNode, + CoreIteratorActionNode, CoreRouterActionNode, LocalBaserowCreateRowActionNode, ) from baserow.contrib.automation.nodes.node_types import ( CoreHTTPTriggerNodeType, + CoreIteratorNodeType, CorePeriodicTriggerNodeType, CoreRouterActionNodeType, LocalBaserowCreateRowNodeType, @@ -18,6 +20,7 @@ ) from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.integrations.core.models import CoreRouterServiceEdge +from baserow.core.cache import local_cache from baserow.core.services.registries import service_type_registry @@ -33,12 +36,6 @@ class CoreRouterWithEdges: class AutomationNodeFixtures: def create_automation_node(self, user=None, **kwargs): - workflow = kwargs.pop("workflow", None) - if not workflow: - if user is None: - user = self.create_user() - workflow = self.create_automation_workflow(user) - _node_type = kwargs.pop("type", None) if _node_type is None: node_type = automation_node_type_registry.get("create_row") @@ -47,6 +44,14 @@ def create_automation_node(self, user=None, **kwargs): else: node_type = _node_type + workflow = kwargs.pop("workflow", None) + if not workflow: + if user is None: + user = self.create_user() + workflow = self.create_automation_workflow( + user, create_trigger=not node_type.is_workflow_trigger + ) + if "service" not in kwargs: service_kwargs = kwargs.pop("service_kwargs", {}) service_type = service_type_registry.get(node_type.service_type) @@ -54,12 +59,26 @@ def create_automation_node(self, user=None, **kwargs): service_type.model_class, **service_kwargs ) - if "order" not in kwargs: - kwargs["order"] = AutomationNode.get_last_order(workflow) + [ + last_reference_node, + last_position, + last_output, + ] = workflow.get_graph().get_last_position() + + # By default the node is placed at the end of the graph if not position is + # provided + reference_node = kwargs.pop("reference_node", last_reference_node) + position = kwargs.pop("position", last_position) + output = kwargs.pop("output", last_output) + + with local_cache.context(): # We make sure the cache is empty + created_node = AutomationNodeHandler().create_node( + node_type, workflow=workflow, **kwargs + ) + # insert the node in the graph + workflow.get_graph().insert(created_node, reference_node, position, output) - return AutomationNodeHandler().create_node( - node_type, workflow=workflow, **kwargs - ) + return created_node def create_local_baserow_rows_created_trigger_node(self, user=None, **kwargs): return self.create_automation_node( @@ -91,6 +110,15 @@ def create_local_baserow_delete_row_action_node(self, user=None, **kwargs): **kwargs, ) + def create_core_iterator_action_node( + self, user=None, **kwargs + ) -> CoreIteratorActionNode: + return self.create_automation_node( + user=user, + type=CoreIteratorNodeType.type, + **kwargs, + ) + def create_core_router_action_node( self, user=None, **kwargs ) -> CoreRouterActionNode: @@ -106,20 +134,29 @@ def create_core_router_action_node_with_edges(self, user=None, **kwargs): user=user, service=service, **kwargs ) workflow = router.workflow + edge1 = self.create_core_router_service_edge( - service=service, label="Do this", condition="'true'" + service=service, + label="Do this", + condition="'true'", + output_label="output edge 1", ) - edge1_output = workflow.automation_workflow_nodes.get( - previous_node_output=edge1.uid - ).specific edge2 = self.create_core_router_service_edge( - service=service, label="Do that", condition="'true'" + service=service, + label="Do that", + condition="'true'", + output_label="output edge 2", + ) + + edge1_output = workflow.get_graph().get_node_at_position( + reference_node=router, position="south", output=edge1.uid + ) + edge2_output = workflow.get_graph().get_node_at_position( + reference_node=router, position="south", output=edge2.uid ) - edge2_output = workflow.automation_workflow_nodes.get( - previous_node_output=edge2.uid - ).specific + fallback_output_node = self.create_local_baserow_create_row_action_node( - workflow=workflow, previous_node_id=router.id, previous_node_output="" + workflow=workflow, reference_node=router, label="fallback node" ) return CoreRouterWithEdges( diff --git a/backend/src/baserow/test_utils/fixtures/automation_workflow.py b/backend/src/baserow/test_utils/fixtures/automation_workflow.py index e7aa0b74c2..de05c41cc8 100644 --- a/backend/src/baserow/test_utils/fixtures/automation_workflow.py +++ b/backend/src/baserow/test_utils/fixtures/automation_workflow.py @@ -47,7 +47,12 @@ def create_automation_workflow(self, user=None, **kwargs): service_type.model_class, **trigger_service_kwargs ) self.create_automation_node( - workflow=workflow, type=trigger_type.type, service=service + workflow=workflow, + type=trigger_type.type, + service=service, + reference_node_id=None, + position="south", + output="", ) return workflow diff --git a/backend/src/baserow/test_utils/fixtures/service.py b/backend/src/baserow/test_utils/fixtures/service.py index f60645eea1..7114086231 100644 --- a/backend/src/baserow/test_utils/fixtures/service.py +++ b/backend/src/baserow/test_utils/fixtures/service.py @@ -3,6 +3,7 @@ from baserow.contrib.integrations.core.models import ( CoreHTTPRequestService, CoreHTTPTriggerService, + CoreIteratorService, CoreRouterService, CoreSMTPEmailService, ) @@ -99,20 +100,30 @@ def create_core_smtp_email_service(self, **kwargs) -> CoreSMTPEmailService: service = self.create_service(CoreSMTPEmailService, **kwargs) return service + def create_core_iterator_service(self, **kwargs): + return self.create_service(CoreIteratorService, **kwargs) + def create_core_router_service(self, **kwargs): return self.create_service(CoreRouterService, **kwargs) def create_core_router_service_edge(self, service: CoreRouterService, **kwargs): output_node = kwargs.pop("output_node", None) skip_output_node = kwargs.pop("skip_output_node", False) + edge_label = kwargs.get("label", "Edge") + output_label = kwargs.pop("output_label", f"{edge_label} output node") + edge = service.edges.create(**kwargs) + if output_node is None and not skip_output_node: router_node = service.automation_workflow_node self.create_local_baserow_create_row_action_node( - previous_node_output=edge.uid, - previous_node_id=router_node.id, + reference_node=router_node, + output=edge.uid, + position="south", workflow=router_node.workflow, + label=output_label, ) + return edge def create_core_http_trigger_service(self, **kwargs) -> CoreSMTPEmailService: diff --git a/backend/tests/baserow/contrib/automation/api/nodes/test_nodes_views.py b/backend/tests/baserow/contrib/automation/api/nodes/test_nodes_views.py index 028985a19b..c789f620fb 100644 --- a/backend/tests/baserow/contrib/automation/api/nodes/test_nodes_views.py +++ b/backend/tests/baserow/contrib/automation/api/nodes/test_nodes_views.py @@ -16,8 +16,6 @@ CorePeriodicTriggerNodeType, LocalBaserowRowsCreatedNodeTriggerType, ) -from baserow.contrib.automation.nodes.registries import automation_node_type_registry -from baserow.contrib.automation.workflows.models import AutomationWorkflow from baserow.test_utils.helpers import AnyDict, AnyInt, AnyStr from tests.baserow.contrib.automation.api.utils import get_api_kwargs @@ -37,32 +35,7 @@ def test_create_node(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) - url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) - response = api_client.post( - url, - {"type": "create_row"}, - **get_api_kwargs(token), - ) - assert response.status_code == HTTP_200_OK - assert response.json() == { - "id": AnyInt(), - "label": "", - "order": AnyStr(), - "previous_node_id": trigger.id, - "previous_node_output": "", - "service": AnyDict(), - "type": "create_row", - "workflow": AnyInt(), - "simulate_until_node": False, - } - - -@pytest.mark.django_db -def test_create_node_before(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() node_before = data_fixture.create_local_baserow_create_row_action_node( workflow=workflow ) @@ -70,7 +43,12 @@ def test_create_node_before(api_client, data_fixture): url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) response = api_client.post( url, - {"type": "create_row", "before_id": node_before.id}, + { + "type": "update_row", + "reference_node_id": trigger.id, + "position": "south", + "output": "", + }, **get_api_kwargs(token), ) @@ -78,71 +56,29 @@ def test_create_node_before(api_client, data_fixture): assert response.json() == { "id": AnyInt(), "label": "", - "order": AnyStr(), - "previous_node_id": trigger.id, - "previous_node_output": "", "service": AnyDict(), - "type": "create_row", + "type": "update_row", "workflow": workflow.id, - "simulate_until_node": False, } - new_node = AutomationNode.objects.get(id=response.json()["id"]) - nodes = AutomationNode.objects.all() - - assert nodes[0].id == trigger.id - assert nodes[1].id == new_node.id - assert nodes[2].id == node_before.id - + AutomationNode.objects.get(id=response.json()["id"]) -@pytest.mark.django_db -def test_create_node_before_router_edge_output(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - service = data_fixture.create_core_router_service() - router = data_fixture.create_core_router_action_node( - service=service, workflow=workflow - ) - edge1 = data_fixture.create_core_router_service_edge( - service=service, label="Edge 1", condition="'true'" - ) - edge1_output = AutomationNode.objects.get( - previous_node_id=router.id, previous_node_output=edge1.uid - ) - edge2 = data_fixture.create_core_router_service_edge( - service=service, label="Edge 2", condition="'true'" - ) - edge2_output = AutomationNode.objects.get( - previous_node_id=router.id, previous_node_output=edge2.uid - ) - - url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) - response = api_client.post( - url, - {"type": "router", "before_id": edge2_output.id}, - **get_api_kwargs(token), + workflow.refresh_from_db() + workflow.assert_reference( + { + "0": "rows_created", + "create_row": {}, + "rows_created": {"next": {"": ["update_row"]}}, + "update_row": {"next": {"": ["create_row"]}}, + } ) - assert response.status_code == HTTP_200_OK - response_json = response.json() - assert response_json["previous_node_id"] == router.id - assert response_json["previous_node_output"] == str(edge2.uid) - - # edge1's output should be *unaffected*. - edge1_output.refresh_from_db() - assert edge1_output.previous_node_id == router.id - assert edge1_output.previous_node_output == str(edge1.uid) - - # edge2's output is now after the node we just created. - edge2_output.refresh_from_db() - assert edge2_output.previous_node_id == response_json["id"] - assert edge2_output.previous_node_output == "" @pytest.mark.django_db -def test_create_node_before_invalid(api_client, data_fixture): +def test_create_node_reference_node_invalid(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow_a = data_fixture.create_automation_workflow(user) - trigger_a = workflow_a.get_trigger(specific=False) + trigger_a = workflow_a.get_trigger() workflow_b = data_fixture.create_automation_workflow(user) node2_b = data_fixture.create_local_baserow_create_row_action_node( workflow=workflow_b @@ -152,39 +88,33 @@ def test_create_node_before_invalid(api_client, data_fixture): response = api_client.post( url, - {"type": "create_row", "before_id": trigger_a.id}, + { + "type": "create_row", + "reference_node_id": 99999999999, + "position": "south", + "output": "", + }, **get_api_kwargs(token), ) assert response.status_code == HTTP_400_BAD_REQUEST assert response.json() == { - "error": "ERROR_AUTOMATION_NODE_BEFORE_INVALID", - "detail": "You cannot create an automation node before a trigger.", + "error": "ERROR_AUTOMATION_NODE_REFERENCE_NODE_INVALID", + "detail": "The reference node 99999999999 doesn't exist", } response = api_client.post( url, - {"type": "create_row", "before_id": node2_b.id}, - **get_api_kwargs(token), - ) - assert response.json() == { - "error": "ERROR_AUTOMATION_NODE_BEFORE_INVALID", - "detail": "The `before` node must belong to the same workflow " - "as the one supplied.", - } - - -@pytest.mark.django_db -def test_create_node_before_does_not_exist(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - response = api_client.post( - reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}), - {"type": "create_row", "before_id": 9999999999}, + { + "type": "create_row", + "reference_node_id": node2_b.id, + "position": "south", + "output": "", + }, **get_api_kwargs(token), ) assert response.json() == { - "error": "ERROR_AUTOMATION_NODE_DOES_NOT_EXIST", - "detail": "The requested node does not exist.", + "error": "ERROR_AUTOMATION_NODE_REFERENCE_NODE_INVALID", + "detail": f"The reference node {node2_b.id} doesn't exist", } @@ -218,11 +148,15 @@ def test_create_node_invalid_body(api_client, data_fixture): def test_create_node_invalid_workflow(api_client, data_fixture): user, token = data_fixture.create_user_and_token() - assert AutomationWorkflow.objects.filter(pk=999).count() == 0 - url = reverse(API_URL_LIST, kwargs={"workflow_id": 999}) + url = reverse(API_URL_LIST, kwargs={"workflow_id": 0}) response = api_client.post( url, - {"type": "create_row"}, + { + "type": "create_row", + "reference_node_id": 0, + "position": "south", + "output": "", + }, **get_api_kwargs(token), ) @@ -233,26 +167,6 @@ def test_create_node_invalid_workflow(api_client, data_fixture): } -@pytest.mark.django_db -def test_create_trigger_node_disallowed(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user, name="test") - - url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) - response = api_client.post( - url, - {"type": "rows_created"}, - **get_api_kwargs(token), - ) - - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.json() == { - "error": "ERROR_AUTOMATION_TRIGGER_NODE_MODIFICATION_DISALLOWED", - "detail": "Triggers can not be created, deleted or duplicated, " - "they can only be replaced with a different type.", - } - - @pytest.mark.django_db def test_create_node_undo_redo(api_client, data_fixture): user, token = data_fixture.create_user_and_token() @@ -261,7 +175,17 @@ def test_create_node_undo_redo(api_client, data_fixture): url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) api_kwargs = get_api_kwargs(token) - response = api_client.post(url, {"type": "create_row"}, **api_kwargs) + + response = api_client.post( + url, + { + "type": "create_row", + "reference_node_id": workflow.get_trigger().id, + "position": "south", + "output": "", + }, + **api_kwargs, + ) assert response.status_code == HTTP_200_OK assert workflow.automation_workflow_nodes.count() == 2 @@ -274,6 +198,7 @@ def test_create_node_undo_redo(api_client, data_fixture): "workflow": workflow.id, }, } + response = api_client.patch(reverse(API_URL_UNDO), payload, **api_kwargs) assert response.status_code == HTTP_200_OK assert workflow.automation_workflow_nodes.count() == 1 @@ -287,13 +212,9 @@ def test_create_node_undo_redo(api_client, data_fixture): def test_get_nodes(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() node = data_fixture.create_automation_node(workflow=workflow) - # Simulate one node - workflow.simulate_until_node = node - workflow.save() - url = reverse(API_URL_LIST, kwargs={"workflow_id": node.workflow.id}) response = api_client.get(url, **get_api_kwargs(token)) @@ -302,24 +223,16 @@ def test_get_nodes(api_client, data_fixture): { "id": trigger.id, "label": trigger.label, - "order": AnyStr(), - "previous_node_id": None, - "previous_node_output": "", "service": AnyDict(), "type": "rows_created", "workflow": workflow.id, - "simulate_until_node": False, }, { "id": node.id, "label": node.label, - "order": AnyStr(), - "previous_node_id": trigger.id, - "previous_node_output": "", "service": AnyDict(), "type": "create_row", "workflow": node.workflow.id, - "simulate_until_node": True, }, ] @@ -334,92 +247,6 @@ def test_get_node_invalid_workflow(api_client, data_fixture): assert response.status_code == HTTP_404_NOT_FOUND -@pytest.mark.django_db -def test_order_nodes(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) - node_1 = data_fixture.create_automation_node(user=user, workflow=workflow) - node_2 = data_fixture.create_automation_node(user=user, workflow=workflow) - - list_url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) - api_kwargs = get_api_kwargs(token) - response = api_client.get(list_url, **api_kwargs) - assert [n["id"] for n in response.json()] == [trigger.id, node_1.id, node_2.id] - - order_url = reverse(API_URL_ORDER, kwargs={"workflow_id": workflow.id}) - payload = {"node_ids": [trigger.id, node_2.id, node_1.id]} - response = api_client.post(order_url, payload, **api_kwargs) - assert response.status_code == HTTP_204_NO_CONTENT - - response = api_client.get(list_url, **api_kwargs) - assert [n["id"] for n in response.json()] == [trigger.id, node_2.id, node_1.id] - - -@pytest.mark.django_db -def test_order_nodes_invalid_node(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow_1 = data_fixture.create_automation_workflow(user) - - # Create a node that belongs to another workflow - workflow_2 = data_fixture.create_automation_workflow(user) - node = data_fixture.create_automation_node(user=user, workflow=workflow_2) - - order_url = reverse(API_URL_ORDER, kwargs={"workflow_id": workflow_1.id}) - payload = {"node_ids": [node.id]} - response = api_client.post( - order_url, - payload, - **get_api_kwargs(token), - ) - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.json() == { - "detail": f"The node id {node.id} does not belong to the workflow.", - "error": "ERROR_AUTOMATION_NODE_NOT_IN_WORKFLOW", - } - - -@pytest.mark.django_db -def test_order_nodes_undo_redo(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - automation = data_fixture.create_automation_application(user=user) - workflow = data_fixture.create_automation_workflow(user, automation=automation) - trigger = workflow.get_trigger(specific=False) - node_1 = data_fixture.create_automation_node(user=user, workflow=workflow) - node_2 = data_fixture.create_automation_node(user=user, workflow=workflow) - - api_kwargs = get_api_kwargs(token) - - order_url = reverse(API_URL_ORDER, kwargs={"workflow_id": workflow.id}) - payload = {"node_ids": [node_2.id, node_1.id]} - response = api_client.post(order_url, payload, **api_kwargs) - assert response.status_code == HTTP_204_NO_CONTENT - - list_url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) - response = api_client.get(list_url, **api_kwargs) - assert [n["id"] for n in response.json()] == [trigger.id, node_2.id, node_1.id] - - payload = { - "scopes": { - "workspace": workflow.automation.workspace.id, - "application": workflow.automation.id, - "root": True, - "workflow": workflow.id, - }, - } - response = api_client.patch(reverse(API_URL_UNDO), payload, **api_kwargs) - assert response.status_code == HTTP_200_OK - - response = api_client.get(list_url, **api_kwargs) - assert [n["id"] for n in response.json()] == [trigger.id, node_1.id, node_2.id] - - response = api_client.patch(reverse(API_URL_REDO), payload, **api_kwargs) - assert response.status_code == HTTP_200_OK - - response = api_client.get(list_url, **api_kwargs) - assert [n["id"] for n in response.json()] == [trigger.id, node_2.id, node_1.id] - - @pytest.mark.django_db def test_delete_node(api_client, data_fixture): user, token = data_fixture.create_user_and_token() @@ -435,15 +262,15 @@ def test_delete_node(api_client, data_fixture): def test_delete_trigger_node_disallowed(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() + data_fixture.create_local_baserow_create_row_action_node(workflow=workflow) delete_url = reverse(API_URL_ITEM, kwargs={"node_id": trigger.id}) response = api_client.delete(delete_url, **get_api_kwargs(token)) assert response.status_code == HTTP_400_BAD_REQUEST assert response.json() == { "error": "ERROR_AUTOMATION_NODE_NOT_DELETABLE", - "detail": "Triggers can not be created, deleted or duplicated, " - "they can only be replaced with a different type.", + "detail": "Trigger nodes cannot be deleted if they are followed nodes.", } @@ -489,31 +316,43 @@ def test_delete_node_undo_redo(api_client, data_fixture): def test_duplicate_node(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - action = data_fixture.create_local_baserow_create_row_action_node(workflow=workflow) + action = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="To duplicate" + ) duplicate_url = reverse(API_URL_DUPLICATE, kwargs={"node_id": action.id}) + response = api_client.post(duplicate_url, **get_api_kwargs(token)) + assert response.status_code == HTTP_200_OK response_json = response.json() assert response_json["id"] != action.id - assert response_json["previous_node_output"] == "" - assert response_json["previous_node_id"] == action.id + + workflow.refresh_from_db() + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["To duplicate"]}}, + "To duplicate": {"next": {"": ["To duplicate-"]}}, + "To duplicate-": {}, + } + ) @pytest.mark.django_db def test_duplicate_trigger_node_disallowed(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() api_kwargs = get_api_kwargs(token) duplicate_url = reverse(API_URL_DUPLICATE, kwargs={"node_id": trigger.id}) response = api_client.post(duplicate_url, **api_kwargs) assert response.status_code == HTTP_400_BAD_REQUEST assert response.json() == { - "error": "ERROR_AUTOMATION_TRIGGER_NODE_MODIFICATION_DISALLOWED", - "detail": "Triggers can not be created, deleted or duplicated, " - "they can only be replaced with a different type.", + "error": "ERROR_AUTOMATION_TRIGGER_ALREADY_EXISTS", + "detail": "This workflow already has a trigger", } @@ -536,26 +375,22 @@ def test_duplicate_node_invalid_node(api_client, data_fixture): def test_update_node(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() node = data_fixture.create_automation_node(user=user, workflow=workflow) - assert node.previous_node_output == "" + assert node.label == "" api_kwargs = get_api_kwargs(token) update_url = reverse(API_URL_ITEM, kwargs={"node_id": node.id}) - payload = {"previous_node_output": "foo", "type": "create_row"} + payload = {"label": "foo"} response = api_client.patch(update_url, payload, **api_kwargs) assert response.status_code == HTTP_200_OK assert response.json() == { "id": node.id, - "label": "", - "order": AnyStr(), + "label": "foo", "service": AnyDict(), - "previous_node_id": trigger.id, - "previous_node_output": "foo", - "type": "create_row", + "type": node.get_type().type, "workflow": workflow.id, - "simulate_until_node": False, } @@ -565,7 +400,7 @@ def test_update_node_invalid_node(api_client, data_fixture): api_kwargs = get_api_kwargs(token) update_url = reverse(API_URL_ITEM, kwargs={"node_id": 100}) - payload = {"previous_node_output": "foo", "type": "update_row"} + payload = {"type": "update_row"} response = api_client.patch(update_url, payload, **api_kwargs) assert response.status_code == HTTP_404_NOT_FOUND @@ -583,10 +418,12 @@ def test_update_node_undo_redo(api_client, data_fixture): api_kwargs = get_api_kwargs(token) update_url = reverse(API_URL_ITEM, kwargs={"node_id": node.id}) - payload = {"previous_node_output": "foo", "type": "update_row"} + payload = {"label": "foo"} + response = api_client.patch(update_url, payload, **api_kwargs) + assert response.status_code == HTTP_200_OK - assert response.json()["previous_node_output"] == "foo" + assert response.json()["label"] == "foo" payload = { "scopes": { @@ -598,23 +435,20 @@ def test_update_node_undo_redo(api_client, data_fixture): } response = api_client.patch(reverse(API_URL_UNDO), payload, **api_kwargs) assert response.status_code == HTTP_200_OK - assert node.previous_node_output == "" + assert node.label == "" response = api_client.patch(reverse(API_URL_REDO), payload, **api_kwargs) assert response.status_code == HTTP_200_OK node.refresh_from_db() - assert node.previous_node_output == "foo" + assert node.label == "foo" @pytest.mark.django_db -@pytest.mark.parametrize( - "irreplaceable_types", - (["create_row", "rows_created"], ["rows_created", "create_row"]), -) def test_replace_node_type_with_irreplaceable_type( - api_client, data_fixture, irreplaceable_types + api_client, + data_fixture, ): - original_type, irreplaceable_type = irreplaceable_types + original_type, irreplaceable_type = ["create_row", "rows_created"] user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) node = data_fixture.create_automation_node( @@ -634,36 +468,57 @@ def test_replace_node_type_with_irreplaceable_type( @pytest.mark.django_db -@pytest.mark.parametrize( - "replaceable_types", - (["update_row", "delete_row"], ["rows_created", "rows_updated"]), -) +def test_replace_node_type_with_replaceable_type_trigger( + api_client, + data_fixture, +): + original_type, replaceable_type = ["rows_created", "rows_updated"] + user, token = data_fixture.create_user_and_token() + workflow = data_fixture.create_automation_workflow(user, trigger_type=original_type) + trigger = workflow.get_trigger() + + response = api_client.post( + reverse(API_URL_REPLACE, kwargs={"node_id": trigger.id}), + {"new_type": replaceable_type}, + **get_api_kwargs(token), + ) + + assert response.status_code == HTTP_200_OK + assert response.json() == { + "id": AnyInt(), + "label": "", + "type": replaceable_type, + "workflow": workflow.id, + "service": AnyDict(), + } + + +@pytest.mark.django_db def test_replace_node_type_with_replaceable_type( - api_client, data_fixture, replaceable_types + api_client, + data_fixture, ): - original_type, replaceable_type = replaceable_types + original_type, replaceable_type = ["update_row", "delete_row"] user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() node = data_fixture.create_automation_node( user=user, type=original_type, workflow=workflow ) + response = api_client.post( reverse(API_URL_REPLACE, kwargs={"node_id": node.id}), {"new_type": replaceable_type}, **get_api_kwargs(token), ) + assert response.status_code == HTTP_200_OK assert response.json() == { "id": AnyInt(), "label": "", "type": replaceable_type, "workflow": workflow.id, - "previous_node_id": trigger.id, - "order": AnyStr(), "service": AnyDict(), - "previous_node_output": "", - "simulate_until_node": False, } @@ -671,21 +526,25 @@ def test_replace_node_type_with_replaceable_type( def test_create_router_node(api_client, data_fixture): user, token = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() url = reverse(API_URL_LIST, kwargs={"workflow_id": workflow.id}) + response = api_client.post( url, - {"type": "router"}, + { + "type": "router", + "reference_node_id": trigger.id, + "position": "south", + "output": "", + }, **get_api_kwargs(token), ) + assert response.status_code == HTTP_200_OK assert response.json() == { "id": AnyInt(), - "order": AnyStr(), "label": "", - "previous_node_id": trigger.id, - "previous_node_output": "", "service": { "sample_data": None, "context_data": None, @@ -721,7 +580,6 @@ def test_create_router_node(api_client, data_fixture): "type": "router", }, "type": "router", - "simulate_until_node": False, "workflow": workflow.id, } @@ -738,12 +596,13 @@ def test_updating_router_node_removing_edge_without_output_allowed( workflow=workflow, service=service ) first_edge = data_fixture.create_core_router_service_edge( - service=service, label="Do this", condition="'true'" + service=service, label="Do this", condition="'true'", skip_output_node=True ) - AutomationNode.objects.filter(previous_node_output=first_edge.uid).delete() + second_edge = data_fixture.create_core_router_service_edge( service=service, label="Do that", condition="'true'" ) + response = api_client.patch( reverse(API_URL_ITEM, kwargs={"node_id": router.id}), { @@ -761,6 +620,7 @@ def test_updating_router_node_removing_edge_without_output_allowed( }, **get_api_kwargs(token), ) + assert response.status_code == HTTP_200_OK response_json = response.json() assert response_json["service"]["edges"] == [ @@ -787,7 +647,12 @@ def test_updating_router_node_with_edge_removals_when_they_have_output_nodes_dis edge = data_fixture.create_core_router_service_edge( service=service, label="Do this", condition="'true'" ) - assert AutomationNode.objects.filter(previous_node_output=edge.uid).exists() + + assert ( + workflow.get_graph().get_node_at_position(router, "south", str(edge.uid)) + is not None + ) + response = api_client.patch( reverse(API_URL_ITEM, kwargs={"node_id": router.id}), {"service": {"edges": [], "type": "router"}, "type": "router"}, @@ -813,7 +678,12 @@ def test_deleting_router_node_with_output_nodes_disallowed(api_client, data_fixt edge = data_fixture.create_core_router_service_edge( service=service, label="Do this", condition="'true'" ) - assert AutomationNode.objects.filter(previous_node_output=edge.uid).exists() + + assert ( + workflow.get_graph().get_node_at_position(router, "south", str(edge.uid)) + is not None + ) + response = api_client.delete( reverse(API_URL_ITEM, kwargs={"node_id": router.id}), **get_api_kwargs(token), @@ -837,7 +707,12 @@ def test_replacing_router_node_with_output_nodes_disallowed(api_client, data_fix edge = data_fixture.create_core_router_service_edge( service=service, label="Do this", condition="'true'" ) - assert AutomationNode.objects.filter(previous_node_output=edge.uid).exists() + + assert ( + workflow.get_graph().get_node_at_position(router, "south", str(edge.uid)) + is not None + ) + response = api_client.post( reverse(API_URL_REPLACE, kwargs={"node_id": router.id}), {"new_type": "create_row"}, @@ -948,10 +823,6 @@ def test_simulate_dispatch_trigger_node_with_sample_data( mock_async_start_workflow, api_client, data_fixture ): user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow( - user=user, - trigger_type=LocalBaserowRowsCreatedNodeTriggerType.type, - ) # Create a trigger node with service table, fields, _ = data_fixture.build_table( @@ -960,13 +831,15 @@ def test_simulate_dispatch_trigger_node_with_sample_data( rows=[["Blueberry Muffin"]], ) - trigger_service = data_fixture.create_local_baserow_rows_created_service( - table=table, - integration=data_fixture.create_local_baserow_integration(user=user), - ) - trigger_node = data_fixture.create_automation_node( - user=user, workflow=workflow, type="rows_created", service=trigger_service + workflow = data_fixture.create_automation_workflow( + user=user, + trigger_type=LocalBaserowRowsCreatedNodeTriggerType.type, + trigger_service_kwargs={ + "table": table, + "integration": data_fixture.create_local_baserow_integration(user=user), + }, ) + trigger_node = workflow.get_trigger() # Initially, the sample_data should be empty assert trigger_node.workflow.simulate_until_node is None @@ -1102,79 +975,3 @@ def test_simulate_dispatch_action_node_with_sample_data( workflow.refresh_from_db() assert workflow.simulate_until_node_id == action_node.id - - -@pytest.mark.django_db -@pytest.mark.parametrize( - "node_type", - [ - node_type - for node_type in automation_node_type_registry.get_all() - if not node_type.is_fixed - ], -) -def test_move_movable_node(node_type, api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) - before_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow - ) - node = data_fixture.create_automation_node( - workflow=workflow, - type=node_type.type, - ) - response = api_client.post( - reverse(API_URL_MOVE, kwargs={"node_id": node.id}), - {"previous_node_id": trigger.id, "previous_node_output": ""}, - **get_api_kwargs(token), - ) - assert response.status_code == HTTP_200_OK - assert response.json() == { - "id": node.id, - "label": node.label, - "order": AnyStr(), - "previous_node_id": trigger.id, - "previous_node_output": node.previous_node_output, - "service": AnyDict(), - "type": node_type.type, - "workflow": workflow.id, - "simulate_until_node": False, - } - before_node.refresh_from_db() - assert before_node.previous_node_id == node.id - - -@pytest.mark.django_db -@pytest.mark.parametrize( - "node_type", - [ - node_type - for node_type in automation_node_type_registry.get_all() - if node_type.is_fixed - ], -) -def test_move_fixed_node(node_type, api_client, data_fixture): - user, token = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) - before_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow - ) - node = data_fixture.create_automation_node( - workflow=workflow, - type=node_type.type, - ) - response = api_client.post( - reverse(API_URL_MOVE, kwargs={"node_id": node.id}), - {"previous_node_id": trigger.id, "previous_node_output": ""}, - **get_api_kwargs(token), - ) - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.json() == { - "error": "ERROR_AUTOMATION_NODE_NOT_MOVABLE", - "detail": "This automation node cannot be moved.", - } - before_node.refresh_from_db() - assert before_node.previous_node_id == trigger.id diff --git a/backend/tests/baserow/contrib/automation/api/test_automation_application_views.py b/backend/tests/baserow/contrib/automation/api/test_automation_application_views.py index 9306c1afe4..db6e4b44b8 100644 --- a/backend/tests/baserow/contrib/automation/api/test_automation_application_views.py +++ b/backend/tests/baserow/contrib/automation/api/test_automation_application_views.py @@ -17,6 +17,7 @@ def test_get_automation_application(api_client, data_fixture): workflow = data_fixture.create_automation_workflow( automation=automation, name="test" ) + trigger = workflow.get_trigger() url = reverse("api:applications:item", kwargs={"application_id": automation.id}) @@ -50,6 +51,7 @@ def test_get_automation_application(api_client, data_fixture): "simulate_until_node_id": None, "state": "draft", "published_on": None, + "graph": {"0": trigger.id, str(trigger.id): {}}, } ], } @@ -68,6 +70,7 @@ def test_list_automation_applications(api_client, data_fixture): workflow = data_fixture.create_automation_workflow( automation=automation, name="test" ) + trigger = workflow.get_trigger() url = reverse("api:applications:list", kwargs={"workspace_id": workspace.id}) @@ -102,6 +105,7 @@ def test_list_automation_applications(api_client, data_fixture): "simulate_until_node_id": None, "state": "draft", "published_on": None, + "graph": {"0": trigger.id, str(trigger.id): {}}, } ], } diff --git a/backend/tests/baserow/contrib/automation/api/test_automation_serializer.py b/backend/tests/baserow/contrib/automation/api/test_automation_serializer.py index feae215132..d968b57f46 100644 --- a/backend/tests/baserow/contrib/automation/api/test_automation_serializer.py +++ b/backend/tests/baserow/contrib/automation/api/test_automation_serializer.py @@ -41,6 +41,7 @@ def test_serializer_has_expected_fields(automation_fixture): def test_serializer_get_workflows(automation_fixture): automation = automation_fixture["automation"] workflow = automation_fixture["workflow"] + trigger = workflow.get_trigger() serializer = AutomationSerializer(instance=automation) @@ -55,5 +56,6 @@ def test_serializer_get_workflows(automation_fixture): "state": "draft", "simulate_until_node_id": None, "published_on": None, + "graph": {"0": trigger.id, str(trigger.id): {}}, } ] diff --git a/backend/tests/baserow/contrib/automation/api/workflows/test_automation_workflow_serializer.py b/backend/tests/baserow/contrib/automation/api/workflows/test_automation_workflow_serializer.py index 2f3529e98d..25f4479fd5 100644 --- a/backend/tests/baserow/contrib/automation/api/workflows/test_automation_workflow_serializer.py +++ b/backend/tests/baserow/contrib/automation/api/workflows/test_automation_workflow_serializer.py @@ -33,6 +33,7 @@ def test_automation_workflow_serializer_fields(workflow_fixture): assert sorted(serializer.data.keys()) == [ "allow_test_run_until", "automation_id", + "graph", "id", "name", "order", diff --git a/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py b/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py index df3c5d47ac..f06fcb43e9 100644 --- a/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py +++ b/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py @@ -55,12 +55,11 @@ def test_create_workflow(api_client, data_fixture): "state": "draft", "published_on": None, "simulate_until_node_id": None, + "graph": {}, } workflow = automation.workflows.get(id=response_json["id"]) - assert workflow.automation_workflow_nodes.count() == 1 - node = workflow.automation_workflow_nodes.get().specific - assert node.get_type().is_workflow_trigger + assert workflow.automation_workflow_nodes.count() == 0 @pytest.mark.django_db @@ -124,6 +123,7 @@ def test_read_workflow(api_client, data_fixture): user, token = data_fixture.create_user_and_token() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow(automation=automation) + trigger = workflow.get_trigger() url = reverse(API_URL_WORKFLOW_ITEM, kwargs={"workflow_id": workflow.id}) response = api_client.get( @@ -142,6 +142,7 @@ def test_read_workflow(api_client, data_fixture): "simulate_until_node_id": None, "state": "draft", "published_on": None, + "graph": {"0": trigger.id, str(trigger.id): {}}, } @@ -341,11 +342,13 @@ def test_duplicate_workflow(api_client, data_fixture): workflow = data_fixture.create_automation_workflow( user, automation=automation, name="test" ) + trigger = workflow.get_trigger() url = reverse(API_URL_WORKFLOW_DUPLICATE, kwargs={"workflow_id": workflow.id}) response = api_client.post(url, format="json", HTTP_AUTHORIZATION=f"JWT {token}") assert response.status_code == HTTP_202_ACCEPTED + assert response.json() == { "duplicated_automation_workflow": None, "human_readable_error": "", @@ -359,6 +362,7 @@ def test_duplicate_workflow(api_client, data_fixture): "state": "draft", "published_on": None, "simulate_until_node_id": None, + "graph": {"0": trigger.id, str(trigger.id): {}}, }, "progress_percentage": 0, "state": "pending", diff --git a/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py b/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py index ff5443ca85..05a483851f 100644 --- a/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py +++ b/backend/tests/baserow/contrib/automation/data_providers/test_data_provider_types.py @@ -4,6 +4,7 @@ AutomationDispatchContext, ) from baserow.contrib.automation.data_providers.data_provider_types import ( + CurrentIterationDataProviderType, PreviousNodeProviderType, ) from baserow.core.formula.exceptions import InvalidFormulaContext @@ -14,7 +15,7 @@ def test_previous_node_data_provider_get_data_chunk(data_fixture): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user=user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() first_action = data_fixture.create_local_baserow_create_row_action_node( workflow=workflow, ) @@ -25,11 +26,7 @@ def test_previous_node_data_provider_get_data_chunk(data_fixture): dispatch_context = AutomationDispatchContext(workflow) dispatch_context.after_dispatch( - trigger, DispatchResult(data=[{"field_1": "Horse"}]) - ) - - dispatch_context.after_dispatch( - trigger, DispatchResult(data=[{"field_1": "Horse"}]) + trigger, DispatchResult(data={"results": [{"field_1": "Horse"}]}) ) dispatch_context.after_dispatch( first_action, DispatchResult(data={"field_2": "Badger"}) @@ -54,6 +51,18 @@ def test_previous_node_data_provider_get_data_chunk(data_fixture): # If a formula path references a non-existent node, it should raise an exception. with pytest.raises(InvalidFormulaContext) as exc: PreviousNodeProviderType().get_data_chunk(dispatch_context, ["999", "field_3"]) + assert exc.value.args[0] == "The previous node doesn't exist" + + dispatch_context = AutomationDispatchContext(workflow) + + dispatch_context.after_dispatch( + trigger, DispatchResult(data={"results": [{"field_1": "Horse"}]}) + ) + # Existing node but after + with pytest.raises(InvalidFormulaContext) as exc: + PreviousNodeProviderType().get_data_chunk( + dispatch_context, [str(first_action.id), "field_2"] + ) assert ( exc.value.args[0] == "The previous node id is not present in the dispatch context results" @@ -78,3 +87,68 @@ def test_previous_node_data_provider_import_path(data_fixture): "0", "field_1", ] + + +@pytest.mark.django_db +def test_current_iteration_data_provider_get_data_chunk(data_fixture): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user=user) + trigger = workflow.get_trigger() + iterator = data_fixture.create_core_iterator_action_node( + workflow=workflow, + ) + data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, + ) + + dispatch_context = AutomationDispatchContext(workflow) + + dispatch_context.after_dispatch( + trigger, DispatchResult(data=[{"field_1": "Horse"}, {"field_1": "Duck"}]) + ) + + dispatch_context.after_dispatch( + iterator, DispatchResult(data=[{"field_1": "Horse"}, {"field_1": "Duck"}]) + ) + + dispatch_context.set_current_iteration(iterator, 0) + + assert ( + CurrentIterationDataProviderType().get_data_chunk( + dispatch_context, [str(iterator.id), "item", "field_1"] + ) + == "Horse" + ) + + dispatch_context.set_current_iteration(iterator, 1) + + assert ( + CurrentIterationDataProviderType().get_data_chunk( + dispatch_context, [str(iterator.id), "item", "field_1"] + ) + == "Duck" + ) + + +@pytest.mark.django_db +def test_current_iteration_data_provider_import_path(data_fixture): + data_provider = CurrentIterationDataProviderType() + + node = data_fixture.create_core_iterator_action_node() + + valid_id_mapping = {"automation_workflow_nodes": {1: node.id}} + invalid_id_mapping = {"automation_workflow_nodes": {3: 4}} + + path = ["1", "item", "field_1"] + + assert data_provider.import_path(path, {}) == ["1", "item", "field_1"] + assert data_provider.import_path(path, invalid_id_mapping) == [ + "1", + "item", + "field_1", + ] + assert data_provider.import_path(path, valid_id_mapping) == [ + str(node.id), + "item", + "field_1", + ] diff --git a/backend/tests/baserow/contrib/automation/history/test_history_service.py b/backend/tests/baserow/contrib/automation/history/test_history_service.py index c188e9b334..f6ea29284f 100644 --- a/backend/tests/baserow/contrib/automation/history/test_history_service.py +++ b/backend/tests/baserow/contrib/automation/history/test_history_service.py @@ -25,6 +25,7 @@ def test_get_workflow_history_permission_error(data_fixture): def test_get_workflow_history_returns_ordered_histories(data_fixture): user = data_fixture.create_user() original_workflow = data_fixture.create_automation_workflow(user=user) + history_1 = data_fixture.create_workflow_history( original_workflow=original_workflow ) diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_actions.py b/backend/tests/baserow/contrib/automation/nodes/test_node_actions.py index 55dba47913..fa7020d169 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_actions.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_actions.py @@ -18,7 +18,6 @@ from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.automation.nodes.trash_types import AutomationNodeTrashableItemType from baserow.core.action.handler import ActionHandler -from baserow.core.cache import local_cache from baserow.core.trash.handler import TrashHandler @@ -31,50 +30,72 @@ def test_create_node_action(data_fixture): automation = data_fixture.create_automation_application(workspace=workspace) workflow = data_fixture.create_automation_workflow(user, automation=automation) node_before = data_fixture.create_automation_node( - workflow=workflow, - type=LocalBaserowCreateRowNodeType.type, + workflow=workflow, type=LocalBaserowCreateRowNodeType.type, label="Node before" ) node_after = data_fixture.create_automation_node( workflow=workflow, type=LocalBaserowCreateRowNodeType.type, previous_node=node_before, + label="Node after", ) node_type = automation_node_type_registry.get(LocalBaserowCreateRowNodeType.type) - with local_cache.context(): - node = CreateAutomationNodeActionType.do( - user, - node_type, - workflow, - data={"before_id": node_after.id}, - ) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Node before"]}}, + "Node before": {"next": {"": ["Node after"]}}, + "Node after": {}, + } + ) - # The node is created - node_after.refresh_from_db() - assert node.previous_node_id == node_before.id - assert node_after.previous_node_id == node.id + node = CreateAutomationNodeActionType.do( + user, + node_type, + workflow, + dict(reference_node_id=node_before.id, position="south", output=""), + ) - with local_cache.context(): - ActionHandler.undo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Node before"]}}, + "Node before": {"next": {"": ["create_row"]}}, + "create_row": {"next": {"": ["Node after"]}}, + "Node after": {}, + } + ) + + ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + + workflow.assert_reference( + { + "0": "rows_created", + "Node after": {}, + "Node before": {"next": {"": ["Node after"]}}, + "rows_created": {"next": {"": ["Node before"]}}, + } + ) # The node is trashed node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert node.trashed - assert node_after.previous_node_id == node_before.id - with local_cache.context(): - ActionHandler.redo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) # The node is restored node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert not node.trashed - assert node_after.previous_node_id == node.id + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Node before"]}}, + "Node before": {"next": {"": ["create_row"]}}, + "create_row": {"next": {"": ["Node after"]}}, + "Node after": {}, + } + ) @pytest.mark.django_db @@ -86,24 +107,38 @@ def test_replace_automation_action_node_type(data_fixture): automation = data_fixture.create_automation_application(workspace=workspace) workflow = data_fixture.create_automation_workflow(user, automation=automation) node = data_fixture.create_automation_node( - workflow=workflow, - type=LocalBaserowCreateRowNodeType.type, + workflow=workflow, type=LocalBaserowCreateRowNodeType.type, label="To replace" ) node_after = data_fixture.create_automation_node( - workflow=workflow, type=LocalBaserowCreateRowNodeType.type + workflow=workflow, type=LocalBaserowCreateRowNodeType.type, label="After" + ) + + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "To replace": {"next": {"": ["After"]}}, + "rows_created": {"next": {"": ["To replace"]}}, + } ) - with local_cache.context(): - replaced_node = ReplaceAutomationNodeActionType.do( - user, node.id, LocalBaserowUpdateRowNodeType.type - ) + replaced_node = ReplaceAutomationNodeActionType.do( + user, node.id, LocalBaserowUpdateRowNodeType.type + ) + + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "rows_created": {"next": {"": ["update_row"]}}, + "update_row": {"next": {"": ["After"]}}, + } + ) # The original node is trashed, we have a new node of the new type. node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert node.trashed assert isinstance(replaced_node, LocalBaserowUpdateRowNodeType.model_class) - assert node_after.previous_node_id == replaced_node.id # Confirm that the `node` trash entry exists, and it is # `managed` to prevent users from restoring it manually. @@ -113,18 +148,23 @@ def test_replace_automation_action_node_type(data_fixture): ) assert original_trash_entry.managed - with local_cache.context(): - ActionHandler.undo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "To replace": {"next": {"": ["After"]}}, + "rows_created": {"next": {"": ["To replace"]}}, + } + ) # The original node is restored, the new node is trashed. node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert not node.trashed + replaced_node.refresh_from_db(fields=["trashed"]) assert replaced_node.trashed - assert node_after.previous_node_id == node.id # Confirm that the `replaced_node` trash entry exists, and it # is `managed` to prevent users from restoring it manually. @@ -134,18 +174,22 @@ def test_replace_automation_action_node_type(data_fixture): ) assert replaced_trash_entry.managed - with local_cache.context(): - ActionHandler.redo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "rows_created": {"next": {"": ["update_row"]}}, + "update_row": {"next": {"": ["After"]}}, + } + ) # The original node is trashed again, the new node is restored. node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert node.trashed + replaced_node.refresh_from_db(fields=["trashed"]) assert not replaced_node.trashed - assert node_after.previous_node_id == replaced_node.id # Confirm that the `node` trash entry still exists, and it # is `managed` to prevent users from restoring it manually. @@ -170,19 +214,33 @@ def test_replace_automation_trigger_node_type(data_fixture): type=LocalBaserowCreateRowNodeType.type, ) - with local_cache.context(): - replaced_trigger = ReplaceAutomationNodeActionType.do( - user, original_trigger.id, LocalBaserowRowsUpdatedNodeTriggerType.type - ) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["create_row"]}}, + "create_row": {}, + } + ) + + replaced_trigger = ReplaceAutomationNodeActionType.do( + user, original_trigger.id, LocalBaserowRowsUpdatedNodeTriggerType.type + ) + + workflow.assert_reference( + { + "0": "rows_updated", + "rows_updated": {"next": {"": ["create_row"]}}, + "create_row": {}, + } + ) # The original trigger is trashed, we have a new trigger of the new type. original_trigger.refresh_from_db(fields=["trashed"]) - action_node.refresh_from_db() + assert original_trigger.trashed assert isinstance( replaced_trigger, LocalBaserowRowsUpdatedNodeTriggerType.model_class ) - assert action_node.previous_node_id == replaced_trigger.id # Confirm that the `original_trigger` trash entry exists, and # it is `managed` to prevent users from restoring it manually. @@ -192,18 +250,22 @@ def test_replace_automation_trigger_node_type(data_fixture): ) assert original_trash_entry.managed - with local_cache.context(): - ActionHandler.undo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["create_row"]}}, + "create_row": {}, + } + ) # The original trigger is restored, the new trigger is trashed. original_trigger.refresh_from_db(fields=["trashed"]) - action_node.refresh_from_db() assert not original_trigger.trashed + replaced_trigger.refresh_from_db(fields=["trashed"]) assert replaced_trigger.trashed - assert action_node.previous_node_id == original_trigger.id # Confirm that the `replaced_trigger` trash entry exists, and # it is `managed` to prevent users from restoring it manually. @@ -213,18 +275,22 @@ def test_replace_automation_trigger_node_type(data_fixture): ) assert replaced_trash_entry.managed - with local_cache.context(): - ActionHandler.redo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + + workflow.assert_reference( + { + "0": "rows_updated", + "rows_updated": {"next": {"": ["create_row"]}}, + "create_row": {}, + } + ) # The original trigger is trashed again, the new trigger is restored. original_trigger.refresh_from_db(fields=["trashed"]) - action_node.refresh_from_db() assert original_trigger.trashed + replaced_trigger.refresh_from_db(fields=["trashed"]) assert not replaced_trigger.trashed - assert action_node.previous_node_id == replaced_trigger.id # Confirm that the `original_trigger` trash entry still exists, # and it is `managed` to prevent users from restoring it manually. @@ -244,48 +310,69 @@ def test_delete_node_action(data_fixture): automation = data_fixture.create_automation_application(workspace=workspace) workflow = data_fixture.create_automation_workflow(user, automation=automation) node_before = data_fixture.create_automation_node( - workflow=workflow, - type=LocalBaserowCreateRowNodeType.type, + workflow=workflow, type=LocalBaserowCreateRowNodeType.type, label="Before" ) node = data_fixture.create_automation_node( - workflow=workflow, - type=LocalBaserowCreateRowNodeType.type, - previous_node=node_before, + workflow=workflow, type=LocalBaserowCreateRowNodeType.type, label="To delete" ) node_after = data_fixture.create_automation_node( - workflow=workflow, type=LocalBaserowCreateRowNodeType.type, previous_node=node + workflow=workflow, type=LocalBaserowCreateRowNodeType.type, label="After" + ) + + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "Before": {"next": {"": ["To delete"]}}, + "To delete": {"next": {"": ["After"]}}, + "rows_created": {"next": {"": ["Before"]}}, + } ) - with local_cache.context(): - DeleteAutomationNodeActionType.do(user, node.id) + DeleteAutomationNodeActionType.do(user, node.id) + + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "Before": {"next": {"": ["After"]}}, + "rows_created": {"next": {"": ["Before"]}}, + } + ) # The node is trashed node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert node.trashed - assert node_after.previous_node_id == node_before.id - with local_cache.context(): - ActionHandler.undo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "Before": {"next": {"": ["To delete"]}}, + "To delete": {"next": {"": ["After"]}}, + "rows_created": {"next": {"": ["Before"]}}, + } + ) # The original node is restored node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert not node.trashed - assert node_after.previous_node_id == node.id - with local_cache.context(): - ActionHandler.redo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + workflow.assert_reference( + { + "0": "rows_created", + "After": {}, + "Before": {"next": {"": ["After"]}}, + "rows_created": {"next": {"": ["Before"]}}, + } + ) # The node is trashed again node.refresh_from_db(fields=["trashed"]) - node_after.refresh_from_db() assert node.trashed - assert node_after.previous_node_id == node_before.id @pytest.mark.django_db @@ -297,35 +384,61 @@ def test_delete_node_action_after_nothing(data_fixture): automation = data_fixture.create_automation_application(workspace=workspace) workflow = data_fixture.create_automation_workflow(user, automation=automation) node_before = data_fixture.create_automation_node( - workflow=workflow, - type=LocalBaserowCreateRowNodeType.type, + workflow=workflow, type=LocalBaserowCreateRowNodeType.type, label="Before" ) node = data_fixture.create_automation_node( workflow=workflow, type=LocalBaserowCreateRowNodeType.type, previous_node=node_before, + label="To delete", ) + workflow.assert_reference( + { + "0": "rows_created", + "Before": {"next": {"": ["To delete"]}}, + "To delete": {}, + "rows_created": {"next": {"": ["Before"]}}, + } + ) + + DeleteAutomationNodeActionType.do(user, node.id) - with local_cache.context(): - DeleteAutomationNodeActionType.do(user, node.id) + workflow.assert_reference( + { + "0": "rows_created", + "Before": {"next": {"": []}}, + "rows_created": {"next": {"": ["Before"]}}, + } + ) # The node is trashed node.refresh_from_db(fields=["trashed"]) assert node.trashed - with local_cache.context(): - ActionHandler.undo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + + workflow.assert_reference( + { + "0": "rows_created", + "Before": {"next": {"": ["To delete"]}}, + "To delete": {}, + "rows_created": {"next": {"": ["Before"]}}, + } + ) # The original node is restored node.refresh_from_db(fields=["trashed"]) assert not node.trashed - with local_cache.context(): - ActionHandler.redo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) + + workflow.assert_reference( + { + "0": "rows_created", + "Before": {"next": {"": []}}, + "rows_created": {"next": {"": ["Before"]}}, + } + ) # The node is trashed again node.refresh_from_db(fields=["trashed"]) @@ -341,44 +454,60 @@ def test_duplicate_node_action(data_fixture): automation = data_fixture.create_automation_application(workspace=workspace) workflow = data_fixture.create_automation_workflow(user, automation=automation) source_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, + workflow=workflow, label="Source" ) after_source_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, + workflow=workflow, label="After" + ) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Source"]}}, + "Source": {"next": {"": ["After"]}}, + "After": {}, + } ) - with local_cache.context(): - duplicated_node = DuplicateAutomationNodeActionType.do(user, source_node.id) + duplicated_node = DuplicateAutomationNodeActionType.do(user, source_node.id) - # The node is duplicated - assert duplicated_node.previous_node_id == source_node.id - after_source_node.refresh_from_db() - assert after_source_node.previous_node_id == duplicated_node.id - assert after_source_node.previous_node_output == "" + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Source"]}}, + "Source": {"next": {"": ["Source-"]}}, + "Source-": {"next": {"": ["After"]}}, + "After": {}, + } + ) - with local_cache.context(): - ActionHandler.undo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - # The duplicated node is trashed - duplicated_node.refresh_from_db(fields=["trashed"]) + duplicated_node.refresh_from_db() assert duplicated_node.trashed - after_source_node.refresh_from_db() - assert after_source_node.previous_node_id == source_node.id - assert after_source_node.previous_node_output == "" - with local_cache.context(): - ActionHandler.redo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Source"]}}, + "Source": {"next": {"": ["After"]}}, + "After": {}, + } + ) + + ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - # The duplicated node is restored - duplicated_node.refresh_from_db(fields=["trashed"]) + duplicated_node.refresh_from_db() assert not duplicated_node.trashed - after_source_node.refresh_from_db() - assert after_source_node.previous_node_id == duplicated_node.id - assert after_source_node.previous_node_output == "" + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Source"]}}, + "Source": {"next": {"": ["Source-"]}}, + "Source-": {"next": {"": ["After"]}}, + "After": {}, + } + ) @pytest.mark.django_db @@ -399,50 +528,85 @@ def test_duplicate_node_action_with_multiple_outputs(data_fixture): edge2_output = core_router_with_edges.edge2_output fallback_output_node = core_router_with_edges.fallback_output_node - with local_cache.context(): - duplicated_node = DuplicateAutomationNodeActionType.do(user, source_node.id) + workflow.assert_reference( + { + "0": "rows_created", + "router": { + "next": { + "Default": ["fallback node"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "rows_created": {"next": {"": ["router"]}}, + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + } + ) - # The node is duplicated - assert duplicated_node.previous_node_id == source_node.id + duplicated_node = DuplicateAutomationNodeActionType.do(user, source_node.id) + duplicated_node.label = "Duplicated router" + duplicated_node.save() + + assert duplicated_node.id != source_node.id + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Default": ["Duplicated router"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "Duplicated router": {"next": {"Default": ["fallback node"]}}, + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + } + ) - # The edge1/edge2 outputs are intact. - edge1_output.refresh_from_db() - assert edge1_output.previous_node_id == source_node.id - assert edge1_output.previous_node_output == str(edge1.uid) - edge2_output.refresh_from_db() - assert edge2_output.previous_node_id == source_node.id - assert edge2_output.previous_node_output == str(edge2.uid) + ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - # The original fallback output node is now after our duplicated router. - fallback_output_node.refresh_from_db() - assert fallback_output_node.previous_node_id == duplicated_node.id - assert fallback_output_node.previous_node_output == "" + workflow.assert_reference( + { + "0": "rows_created", + "router": { + "next": { + "Default": ["fallback node"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "rows_created": {"next": {"": ["router"]}}, + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + } + ) - with local_cache.context(): - ActionHandler.undo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) + ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - # The duplicated node is trashed - duplicated_node.refresh_from_db(fields=["trashed"]) - assert duplicated_node.trashed - fallback_output_node.refresh_from_db() - # The original fallback output node is now after our source router, again. - assert fallback_output_node.previous_node_id == source_node.id - assert fallback_output_node.previous_node_output == "" - - with local_cache.context(): - ActionHandler.redo( - user, [WorkflowActionScopeType.value(workflow.id)], session_id - ) - - # The duplicated node is restored - duplicated_node.refresh_from_db(fields=["trashed"]) - assert not duplicated_node.trashed - fallback_output_node.refresh_from_db() - # The original fallback output node is now after our duplicated router, again. - assert fallback_output_node.previous_node_id == duplicated_node.id - assert fallback_output_node.previous_node_output == "" + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Default": ["Duplicated router"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "Duplicated router": {"next": {"Default": ["fallback node"]}}, + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + } + ) @pytest.mark.django_db @@ -453,35 +617,64 @@ def test_move_node_action(data_fixture): workspace = data_fixture.create_workspace(user=user) automation = data_fixture.create_automation_application(workspace=workspace) workflow = data_fixture.create_automation_workflow(user, automation=automation) - after_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, + first_action = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="first action" ) - previous_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, + second_action = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="second action" ) node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, + workflow=workflow, label="moved node" ) - moved_node = MoveAutomationNodeActionType.do(user, node.id, after_node.id) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["first action"]}}, + "first action": {"next": {"": ["second action"]}}, + "second action": {"next": {"": ["moved node"]}}, + "moved node": {}, + } + ) + + moved_node = MoveAutomationNodeActionType.do( + user, node.id, first_action.id, "south", "" + ) - assert moved_node.previous_node_id == after_node.id - previous_node.refresh_from_db() - assert previous_node.previous_node_id == moved_node.id + assert moved_node == node + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["first action"]}}, + "first action": {"next": {"": ["moved node"]}}, + "moved node": {"next": {"": ["second action"]}}, + "second action": {"next": {"": []}}, + } + ) ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - moved_node.refresh_from_db() - assert moved_node.previous_node_id == previous_node.id - previous_node.refresh_from_db() - assert previous_node.previous_node_id == after_node.id + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["first action"]}}, + "first action": {"next": {"": ["second action"]}}, + "second action": {"next": {"": ["moved node"]}}, + "moved node": {}, + } + ) ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - moved_node.refresh_from_db() - assert moved_node.previous_node_id == after_node.id - previous_node.refresh_from_db() - assert previous_node.previous_node_id == moved_node.id + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["first action"]}}, + "first action": {"next": {"": ["moved node"]}}, + "moved node": {"next": {"": ["second action"]}}, + "second action": {"next": {"": []}}, + } + ) @pytest.mark.django_db @@ -502,23 +695,81 @@ def test_move_node_action_to_output(data_fixture): edge2 = core_router_with_edges.edge2 edge2_output = core_router_with_edges.edge2_output # <- from here + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Default": ["fallback node"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "fallback node": {}, + "output edge 2": {}, + "output edge 1": {}, + } + ) + moved_node = MoveAutomationNodeActionType.do( - user, edge2_output.id, router.id, edge1_output.previous_node_output + user, edge2_output.id, router.id, "south", str(edge1.uid) ) # The node we're trying to move is `edge2_output` - assert moved_node == edge2_output - assert moved_node.previous_node_id == router.id - assert moved_node.previous_node_output == str(edge1.uid) + assert moved_node.id == edge2_output.id + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Do this": ["output edge 2"], + "Do that": [], + "Default": ["fallback node"], + } + }, + "output edge 2": {"next": {"": ["output edge 1"]}}, + "output edge 1": {}, + "fallback node": {}, + } + ) ActionHandler.undo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - moved_node.refresh_from_db() - assert moved_node.previous_node_id == router.id - assert moved_node.previous_node_output == str(edge2.uid) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Default": ["fallback node"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + } + ) ActionHandler.redo(user, [WorkflowActionScopeType.value(workflow.id)], session_id) - moved_node.refresh_from_db() - assert moved_node.previous_node_id == router.id - assert moved_node.previous_node_output == str(edge1.uid) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Do this": ["output edge 2"], + "Do that": [], + "Default": ["fallback node"], + } + }, + "output edge 2": {"next": {"": ["output edge 1"]}}, + "output edge 1": {}, + "fallback node": {}, + } + ) diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_dispatch.py b/backend/tests/baserow/contrib/automation/nodes/test_node_dispatch.py new file mode 100644 index 0000000000..1f37100dde --- /dev/null +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_dispatch.py @@ -0,0 +1,408 @@ +import pytest + +from baserow.contrib.automation.automation_dispatch_context import ( + AutomationDispatchContext, +) +from baserow.contrib.automation.nodes.handler import AutomationNodeHandler +from baserow.contrib.automation.workflows.constants import WorkflowState + + +@pytest.mark.django_db +def test_run_workflow_with_create_row_action(data_fixture): + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + integration = data_fixture.create_local_baserow_integration(user=user) + database = data_fixture.create_database_application(workspace=workspace) + trigger_table = data_fixture.create_database_table(database=database) + action_table = data_fixture.create_database_table(database=database) + action_table_field = data_fixture.create_text_field(table=action_table) + workflow = data_fixture.create_automation_workflow(user) + trigger = workflow.get_trigger() + trigger_service = trigger.service.specific + trigger_service.table = trigger_table + trigger_service.integration = integration + trigger_service.save() + action_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, + service=data_fixture.create_local_baserow_upsert_row_service( + table=action_table, + integration=integration, + ), + ) + action_node.service.field_mappings.create(field=action_table_field, value="'Horse'") + + action_table_model = action_table.get_model() + assert action_table_model.objects.count() == 0 + + dispatch_context = AutomationDispatchContext(workflow, {}) + AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) + + row = action_table_model.objects.first() + assert getattr(row, action_table_field.db_column) == "Horse" + assert dispatch_context.dispatch_history == [trigger.id, action_node.id] + + +@pytest.mark.django_db +def test_run_workflow_with_update_row_action(data_fixture): + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + integration = data_fixture.create_local_baserow_integration(user=user) + database = data_fixture.create_database_application(workspace=workspace) + trigger_table = data_fixture.create_database_table(database=database) + action_table = data_fixture.create_database_table(database=database) + action_table_field = data_fixture.create_text_field(table=action_table) + action_table_row = action_table.get_model().objects.create( + **{f"field_{action_table_field.id}": "Horse"} + ) + workflow = data_fixture.create_automation_workflow(user) + trigger = workflow.get_trigger() + trigger_service = trigger.service.specific + trigger_service.table = trigger_table + trigger_service.integration = integration + trigger_service.save() + action_node = data_fixture.create_local_baserow_update_row_action_node( + workflow=workflow, + service=data_fixture.create_local_baserow_upsert_row_service( + table=action_table, + integration=integration, + row_id=action_table_row.id, + ), + ) + action_node.service.field_mappings.create( + field=action_table_field, value="'Badger'" + ) + + dispatch_context = AutomationDispatchContext(workflow, {}) + AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) + + action_table_row.refresh_from_db() + assert getattr(action_table_row, action_table_field.db_column) == "Badger" + assert dispatch_context.dispatch_history == [trigger.id, action_node.id] + + +@pytest.mark.django_db +def test_run_workflow_with_delete_row_action(data_fixture): + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + integration = data_fixture.create_local_baserow_integration(user=user) + database = data_fixture.create_database_application(workspace=workspace) + trigger_table = data_fixture.create_database_table(database=database) + action_table = data_fixture.create_database_table(database=database) + action_table_field = data_fixture.create_text_field(table=action_table) + action_table_row = action_table.get_model().objects.create( + **{f"field_{action_table_field.id}": "Mouse"} + ) + workflow = data_fixture.create_automation_workflow( + user=user, state=WorkflowState.LIVE + ) + trigger = workflow.get_trigger() + trigger_service = trigger.service.specific + trigger_service.table = trigger_table + trigger_service.integration = integration + trigger_service.save() + action_node = data_fixture.create_local_baserow_delete_row_action_node( + workflow=workflow, + service=data_fixture.create_local_baserow_delete_row_service( + table=action_table, + integration=integration, + row_id=action_table_row.id, + ), + ) + + assert action_table.get_model().objects.all().count() == 1 + + dispatch_context = AutomationDispatchContext(workflow, {}) + AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) + + assert action_table.get_model().objects.all().count() == 0 + assert dispatch_context.dispatch_history == [trigger.id, action_node.id] + + +@pytest.mark.django_db +def test_run_workflow_with_router_action(data_fixture): + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + integration = data_fixture.create_local_baserow_integration(user=user) + database = data_fixture.create_database_application(workspace=workspace) + trigger_table = data_fixture.create_database_table(database=database) + workflow = data_fixture.create_automation_workflow( + user=user, + state=WorkflowState.LIVE, + trigger_service_kwargs={"table": trigger_table, "integration": integration}, + ) + + trigger = workflow.get_trigger() + + router_node = data_fixture.create_core_router_action_node( + workflow=workflow, + ) + + data_fixture.create_core_router_service_edge( + service=router_node.service, label="Edge 1", condition="'false'" + ) + + action_table = data_fixture.create_database_table(database=database) + action_table_field = data_fixture.create_text_field(table=action_table) + action_table_row = action_table.get_model().objects.create( + **{f"field_{action_table_field.id}": "Horse"} + ) + edge2 = data_fixture.create_core_router_service_edge( + service=router_node.service, + label="Edge 2", + condition="'true'", + skip_output_node=True, + ) + edge2_output_node = data_fixture.create_local_baserow_update_row_action_node( + workflow=workflow, + reference_node=router_node, + position="south", + output=edge2.uid, + service_kwargs={ + "table": action_table, + "integration": integration, + "row_id": action_table_row.id, + }, + ) + edge2_output_node.service.field_mappings.create( + field=action_table_field, value="'Badger'" + ) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": {"Edge 1": ["Edge 1 output node"], "Edge 2": ["update_row"]} + }, + "Edge 1 output node": {}, + "update_row": {}, + } + ) + + dispatch_context = AutomationDispatchContext(workflow, {}) + AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) + + action_table_row.refresh_from_db() + assert getattr(action_table_row, action_table_field.db_column) == "Badger" + assert dispatch_context.dispatch_history == [ + trigger.id, + router_node.id, + edge2_output_node.id, + ] + + +@pytest.fixture +def iterator_graph_fixture(data_fixture): + """ + Fixture that creates the following graph: + rows_created -> iterator [ -> create_row -> create_row3 ] -> create_row2 + + trigger sample data are + [ + {"field_1": "value 1", "field_2": "other 1"}, + {"field_1": "value 2", "field_2": "other 2"}, + ] + """ + + user = data_fixture.create_user() + + trigger_table, trigger_table_fields, _ = data_fixture.build_table( + user=user, + columns=[("Name", "text")], + rows=[], + ) + + action_table, action_table_fields, _ = data_fixture.build_table( + user=user, + columns=[("Name", "text")], + rows=[], + ) + action2_table, action2_table_fields, _ = data_fixture.build_table( + user=user, + columns=[("Name", "text")], + rows=[], + ) + action3_table, action3_table_fields, _ = data_fixture.build_table( + user=user, + columns=[("Name", "text")], + rows=[], + ) + + integration = data_fixture.create_local_baserow_integration(user=user) + + workflow = data_fixture.create_automation_workflow( + user=user, + state=WorkflowState.LIVE, + trigger_type="rows_created", + trigger_service_kwargs={ + "table": trigger_table, + "integration": integration, + "sample_data": { + "data": { + "results": [ + {"field_1": "value 1", "field_2": "other 1"}, + {"field_1": "value 2", "field_2": "other 2"}, + ] + } + }, + }, + ) + + trigger = workflow.get_trigger() + + iterator_node = data_fixture.create_core_iterator_action_node( + workflow=workflow, + reference_node=trigger, + position="south", + output="", + service_kwargs={ + "source": f'get("previous_node.{trigger.id}")', + "integration": integration, + }, + ) + + action_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, + reference_node=iterator_node, + position="child", + output="", + label="First action", + service_kwargs={"table": action_table, "integration": integration}, + ) + action_node.service.specific.field_mappings.create( + field=action_table_fields[0], + value=f'get("current_iteration.{iterator_node.id}.item.field_1")', + ) + + action2_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, + reference_node=iterator_node, + position="south", + output="", + label="After iterator", + service_kwargs={"table": action2_table, "integration": integration}, + ) + action2_node.service.specific.field_mappings.create( + field=action2_table_fields[0], + value=f'get("previous_node.{iterator_node.id}.*.field_1")', + ) + + action3_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, + reference_node=action_node, + position="south", + output="", + label="Second action", + service_kwargs={"table": action3_table, "integration": integration}, + ) + action3_node.service.specific.field_mappings.create( + field=action3_table_fields[0], + value=f'get("current_iteration.{iterator_node.id}.item.field_2")', + ) + + return { + "workflow": workflow, + "action_node": action_node, + "action_table": action_table, + "action_table_fields": action_table_fields, + "action2_table": action2_table, + "action2_table_fields": action2_table_fields, + "action3_table": action3_table, + "action3_table_fields": action3_table_fields, + } + + +@pytest.mark.django_db +def test_run_workflow_with_iterator_action(iterator_graph_fixture): + workflow = iterator_graph_fixture["workflow"] + action_table = iterator_graph_fixture["action_table"] + action_table_fields = iterator_graph_fixture["action_table_fields"] + action2_table = iterator_graph_fixture["action2_table"] + action2_table_fields = iterator_graph_fixture["action2_table_fields"] + action3_table = iterator_graph_fixture["action3_table"] + action3_table_fields = iterator_graph_fixture["action3_table_fields"] + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["iterator"]}}, + "iterator": { + "children": ["First action"], + "next": {"": ["After iterator"]}, + }, + "First action": {"next": {"": ["Second action"]}}, + "Second action": {}, + "After iterator": {}, + } + ) + + dispatch_context = AutomationDispatchContext( + workflow, + { + "results": [ + {"field_1": "value 1", "field_2": "other 1"}, + {"field_1": "value 2", "field_2": "other 2"}, + ] + }, + ) + + AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) + + # At this point all node should have been executed + rows = list(action_table.get_model().objects.all()) + assert len(rows) == 2 + + assert getattr(rows[0], action_table_fields[0].db_column) == "value 1" + assert getattr(rows[1], action_table_fields[0].db_column) == "value 2" + + rows2 = list(action2_table.get_model().objects.all()) + assert len(rows2) == 1 + assert getattr(rows2[0], action2_table_fields[0].db_column) == "value 1,value 2" + + rows3 = list(action3_table.get_model().objects.all()) + assert len(rows3) == 2 + + assert getattr(rows3[0], action3_table_fields[0].db_column) == "other 1" + assert getattr(rows3[1], action3_table_fields[0].db_column) == "other 2" + + +@pytest.mark.django_db +def test_run_workflow_with_iterator_action_simulate(iterator_graph_fixture): + workflow = iterator_graph_fixture["workflow"] + action_node = iterator_graph_fixture["action_node"] + action_table = iterator_graph_fixture["action_table"] + action_table_fields = iterator_graph_fixture["action_table_fields"] + action2_table = iterator_graph_fixture["action2_table"] + action3_table = iterator_graph_fixture["action3_table"] + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["iterator"]}}, + "iterator": { + "children": ["First action"], + "next": {"": ["After iterator"]}, + }, + "First action": {"next": {"": ["Second action"]}}, + "Second action": {}, + "After iterator": {}, + } + ) + + dispatch_context = AutomationDispatchContext( + workflow, + simulate_until_node=action_node, + ) + AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) + + # At this point only nodes until the action_node should have been executed + rows = list(action_table.get_model().objects.all()) + assert len(rows) == 1 + + assert getattr(rows[0], action_table_fields[0].db_column) == "value 1" + + rows2 = list(action2_table.get_model().objects.all()) + assert len(rows2) == 0 + + rows3 = list(action3_table.get_model().objects.all()) + assert len(rows3) == 0 diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py b/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py index 068adeb797..dfa62b010b 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_handler.py @@ -5,14 +5,12 @@ from baserow.contrib.automation.automation_dispatch_context import ( AutomationDispatchContext, ) -from baserow.contrib.automation.nodes.exceptions import ( - AutomationNodeDoesNotExist, - AutomationNodeNotInWorkflow, -) +from baserow.contrib.automation.nodes.exceptions import AutomationNodeDoesNotExist from baserow.contrib.automation.nodes.handler import AutomationNodeHandler -from baserow.contrib.automation.nodes.models import LocalBaserowCreateRowActionNode +from baserow.contrib.automation.nodes.models import LocalBaserowRowsCreatedTriggerNode from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.integrations.local_baserow.models import LocalBaserowRowsCreated +from baserow.core.cache import local_cache from baserow.core.trash.handler import TrashHandler from baserow.core.utils import MirrorDict from baserow.test_utils.helpers import AnyDict, AnyInt, AnyStr @@ -21,64 +19,22 @@ @pytest.mark.django_db def test_create_node(data_fixture): user = data_fixture.create_user() - workflow = data_fixture.create_automation_workflow() + workflow = data_fixture.create_automation_workflow(create_trigger=False) - node_type = automation_node_type_registry.get("create_row") - prepared_values = node_type.prepare_values({}, user) + node_type = automation_node_type_registry.get("rows_created") + prepared_values = node_type.prepare_values({"workflow": workflow}, user) - node = AutomationNodeHandler().create_node( - node_type, workflow=workflow, **prepared_values - ) + node = AutomationNodeHandler().create_node(node_type, **prepared_values) - assert isinstance(node, LocalBaserowCreateRowActionNode) + assert isinstance(node, LocalBaserowRowsCreatedTriggerNode) @pytest.mark.django_db -def test_create_node_at_the_end(data_fixture): - user = data_fixture.create_user() - workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) - node_type = automation_node_type_registry.get("create_row") - - prepared_values = node_type.prepare_values({}, user) - - node = AutomationNodeHandler().create_node( - node_type, workflow=workflow, **prepared_values - ) - - assert node.previous_node.id == trigger.id - - -@pytest.mark.django_db -def test_create_node_applies_previous_node_id(data_fixture): +def test_get_nodes(data_fixture, django_assert_num_queries): workflow = data_fixture.create_automation_workflow() trigger = workflow.get_trigger() - first = data_fixture.create_local_baserow_create_row_action_node(workflow=workflow) - second = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, - ) - - assert trigger.previous_node_id is None - assert first.previous_node_id == trigger.id - assert second.previous_node_id == first.id - - before_second = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, before=second - ) - trigger.refresh_from_db() - first.refresh_from_db() - second.refresh_from_db() - - assert trigger.previous_node_id is None - assert first.previous_node_id == trigger.id - assert before_second.previous_node_id == first.id - assert second.previous_node_id == before_second.id - -@pytest.mark.django_db -def test_get_nodes(data_fixture, django_assert_num_queries): - workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) + local_cache.clear() with django_assert_num_queries(1): nodes_qs = AutomationNodeHandler().get_nodes(workflow, specific=False) @@ -93,7 +49,7 @@ def test_get_nodes(data_fixture, django_assert_num_queries): @pytest.mark.django_db def test_get_nodes_excludes_trashed_application(data_fixture): user = data_fixture.create_user() - node = data_fixture.create_local_baserow_rows_created_trigger_node() + node = data_fixture.create_automation_node() workflow = node.workflow automation = workflow.automation @@ -133,13 +89,11 @@ def test_update_node(data_fixture): user = data_fixture.create_user() node = data_fixture.create_automation_node(user=user) - assert node.previous_node_output == "" + assert node.label == "" - updated_node = AutomationNodeHandler().update_node( - node, previous_node_output="foo result" - ) + updated_node = AutomationNodeHandler().update_node(node, label="foo result") - assert updated_node.previous_node_output == "foo result" + assert updated_node.label == "foo result" @pytest.mark.django_db @@ -152,90 +106,23 @@ def test_export_prepared_values(data_fixture): "label": "My node", "service": AnyDict(), "workflow": node.workflow_id, - "previous_node_id": node.previous_node_id, - "previous_node_output": node.previous_node_output, } -@pytest.mark.django_db -def test_get_nodes_order(data_fixture): - workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) - node_1 = data_fixture.create_automation_node(workflow=workflow) - node_2 = data_fixture.create_automation_node(workflow=workflow) - - order = AutomationNodeHandler().get_nodes_order(workflow) - - assert order == [trigger.id, node_1.id, node_2.id] - - -@pytest.mark.django_db -def test_order_nodes(data_fixture): - workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) - node_1 = data_fixture.create_automation_node(workflow=workflow) - node_2 = data_fixture.create_automation_node(workflow=workflow) - - order = AutomationNodeHandler().get_nodes_order(workflow) - assert order == [trigger.id, node_1.id, node_2.id] - - new_order = AutomationNodeHandler().order_nodes( - workflow, [trigger.id, node_2.id, node_1.id] - ) - assert new_order == [trigger.id, node_2.id, node_1.id] - - order = AutomationNodeHandler().get_nodes_order(workflow) - assert order == [trigger.id, node_2.id, node_1.id] - - -@pytest.mark.django_db -def test_order_nodes_excludes_trashed_application(data_fixture): - user = data_fixture.create_user() - workflow = data_fixture.create_automation_workflow() - node_1 = data_fixture.create_automation_node(workflow=workflow) - node_2 = data_fixture.create_automation_node(workflow=workflow) - automation = workflow.automation - - TrashHandler.trash(user, automation.workspace, automation, automation) - - with pytest.raises(AutomationNodeNotInWorkflow) as e: - AutomationNodeHandler().order_nodes(workflow, [node_2.id, node_1.id]) - - assert str(e.value) == f"The node {node_2.id} does not belong to the workflow." - - -@pytest.mark.django_db -def test_order_nodes_invalid_node(data_fixture): - workflow_1 = data_fixture.create_automation_workflow() - node_1 = data_fixture.create_automation_node(workflow=workflow_1) - workflow_2 = data_fixture.create_automation_workflow() - node_2 = data_fixture.create_automation_node(workflow=workflow_2) - - with pytest.raises(AutomationNodeNotInWorkflow) as e: - AutomationNodeHandler().order_nodes(workflow_1, [node_2.id, node_1.id]) - - assert str(e.value) == f"The node {node_2.id} does not belong to the workflow." - - @pytest.mark.django_db def test_duplicate_node(data_fixture): workflow = data_fixture.create_automation_workflow() action1 = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, + workflow=workflow, label="test" ) - action2 = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, - ) - duplication = AutomationNodeHandler().duplicate_node(action1) - action2.refresh_from_db() - assert duplication.duplicated_node.previous_node_id == action1.id - assert action2.previous_node_id == duplication.duplicated_node.id + duplicated_node = AutomationNodeHandler().duplicate_node(action1) + + assert duplicated_node.label == "test" @pytest.mark.django_db def test_export_node(data_fixture): workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) node = data_fixture.create_automation_node( workflow=workflow, ) @@ -245,10 +132,6 @@ def test_export_node(data_fixture): assert result == { "id": node.id, "label": node.label, - "order": str(node.order), - "parent_node_id": None, - "previous_node_id": trigger.id, - "previous_node_output": "", "service": AnyDict(), "type": "create_row", "workflow_id": node.workflow.id, @@ -258,19 +141,21 @@ def test_export_node(data_fixture): @pytest.mark.django_db def test_import_node(data_fixture): workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() node = data_fixture.create_automation_node(workflow=workflow) - assert workflow.automation_workflow_nodes.contains(trigger) + assert workflow.automation_workflow_nodes.contains(trigger.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(node.automationnode_ptr) exported_node = AutomationNodeHandler().export_node(node) + exported_node["label"] = "Imported" id_mapping = { "integrations": MirrorDict(), "automation_workflow_nodes": MirrorDict(), } result = AutomationNodeHandler().import_node(workflow, exported_node, id_mapping) - assert workflow.automation_workflow_nodes.contains(trigger) + + assert workflow.automation_workflow_nodes.contains(trigger.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(node.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(result.automationnode_ptr) @@ -278,19 +163,20 @@ def test_import_node(data_fixture): @pytest.mark.django_db def test_import_nodes(data_fixture): workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() node = data_fixture.create_automation_node(workflow=workflow) - assert workflow.automation_workflow_nodes.contains(trigger) + assert workflow.automation_workflow_nodes.contains(trigger.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(node.automationnode_ptr) exported_node = AutomationNodeHandler().export_node(node) + exported_node["label"] = "Imported" id_mapping = { "integrations": MirrorDict(), "automation_workflow_nodes": MirrorDict(), } result = AutomationNodeHandler().import_nodes(workflow, [exported_node], id_mapping) - assert workflow.automation_workflow_nodes.contains(trigger) + assert workflow.automation_workflow_nodes.contains(trigger.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(node.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(result[0].automationnode_ptr) @@ -298,12 +184,14 @@ def test_import_nodes(data_fixture): @pytest.mark.django_db def test_import_node_only(data_fixture): workflow = data_fixture.create_automation_workflow() - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() node = data_fixture.create_automation_node(workflow=workflow) - assert workflow.automation_workflow_nodes.contains(trigger) + + assert workflow.automation_workflow_nodes.contains(trigger.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(node.automationnode_ptr) exported_node = AutomationNodeHandler().export_node(node) + exported_node["label"] = "Imported" id_mapping = { "integrations": MirrorDict(), "automation_workflow_nodes": MirrorDict(), @@ -311,7 +199,7 @@ def test_import_node_only(data_fixture): new_node = AutomationNodeHandler().import_node_only( workflow, exported_node, id_mapping ) - assert workflow.automation_workflow_nodes.contains(trigger) + assert workflow.automation_workflow_nodes.contains(trigger.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(node.automationnode_ptr) assert workflow.automation_workflow_nodes.contains(new_node.automationnode_ptr) @@ -326,21 +214,21 @@ def test_import_node_only(data_fixture): @pytest.mark.django_db def test_simulate_dispatch_node_trigger(data_fixture): user, _ = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user=user) - database = data_fixture.create_database_application( - user=user, workspace=workflow.automation.workspace - ) + database = data_fixture.create_database_application(user=user) table, fields, _ = data_fixture.build_table( user=user, columns=[("Name", "text")], rows=[], database=database ) - trigger_node = data_fixture.create_local_baserow_rows_created_trigger_node( - workflow=workflow, service_kwargs={"table": table} + workflow = data_fixture.create_automation_workflow( + user=user, trigger_service_kwargs={"table": table} ) + + trigger_node = workflow.get_trigger() + assert workflow.simulate_until_node is None + action_node = data_fixture.create_automation_node( workflow=workflow, type="create_row", - previous_node_id=trigger_node.id, ) # Set initial fake data for the action_node, since we want to test @@ -553,7 +441,7 @@ def test_simulate_dispatch_node_dispatches_correct_edge_node(data_fixture): ) router_a = data_fixture.create_core_router_action_node( - workflow=workflow, previous_node_id=trigger_node.id + workflow=workflow, label="Router A" ) router_a_edge_1 = data_fixture.create_core_router_service_edge( service=router_a.service, @@ -570,8 +458,10 @@ def test_simulate_dispatch_node_dispatches_correct_edge_node(data_fixture): router_b = data_fixture.create_core_router_action_node( workflow=workflow, - previous_node_id=router_a.id, - previous_node_output=router_a_edge_1.uid, + reference_node=router_a, + position="south", + output=router_a_edge_1.uid, + label="Router B", ) router_b_edge_1 = data_fixture.create_core_router_service_edge( service=router_b.service, @@ -590,14 +480,20 @@ def test_simulate_dispatch_node_dispatches_correct_edge_node(data_fixture): data_fixture, user, workflow.automation, "apple" ) node_b = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, service=node_b_service, previous_node_id=router_a.id + workflow=workflow, + service=node_b_service, + reference_node=router_a, + label="Create row A", ) node_c_1_service = create_action_node_service( data_fixture, user, workflow.automation, "banana" ) node_c_1 = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, service=node_c_1_service, previous_node_id=router_b.id + workflow=workflow, + service=node_c_1_service, + reference_node=router_b, + label="Create row B", ) node_c_2_service = create_action_node_service( data_fixture, user, workflow.automation, "cherry" @@ -605,8 +501,10 @@ def test_simulate_dispatch_node_dispatches_correct_edge_node(data_fixture): node_c_2 = data_fixture.create_local_baserow_create_row_action_node( workflow=workflow, service=node_c_2_service, - previous_node_id=router_b.id, - previous_node_output=router_b_edge_2.uid, + reference_node=router_b, + position="south", + output=str(router_b_edge_2.uid), + label="Create row B, on edge", ) nodes = [trigger_node, router_a, router_b, node_b, node_c_1, node_c_2] @@ -617,6 +515,25 @@ def test_simulate_dispatch_node_dispatches_correct_edge_node(data_fixture): workflow, None, simulate_until_node=node_c_2 ) + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["Router A"]}}, + "Router A": { + "next": {"": ["Create row A"], "Router A, Edge 1": ["Router B"]} + }, + "Create row A": {}, + "Create row B": {}, + "Create row B, on edge": {}, + "Router B": { + "next": { + "": ["Create row B"], + "Router B, Edge 2": ["Create row B, on edge"], + } + }, + } + ) + AutomationNodeHandler().dispatch_node(node_c_2, dispatch_context) # node_c_2 is intentionally excluded. here diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_models.py b/backend/tests/baserow/contrib/automation/nodes/test_node_models.py index ba9ca468ae..f0929e10df 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_models.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_models.py @@ -30,7 +30,7 @@ def test_get_previous_service_outputs(data_fixture): trigger = workflow.get_trigger() router_a = data_fixture.create_core_router_action_node( - workflow=workflow, previous_node_id=trigger.id + workflow=workflow, label="router a" ) router_a_edge_1 = data_fixture.create_core_router_service_edge( service=router_a.service, @@ -47,9 +47,12 @@ def test_get_previous_service_outputs(data_fixture): router_b = data_fixture.create_core_router_action_node( workflow=workflow, - previous_node_id=router_a.id, - previous_node_output=router_a_edge_1.uid, + reference_node=router_a, + position="south", + output=router_a_edge_1.uid, + label="router b", ) + data_fixture.create_core_router_service_edge( service=router_b.service, label="Router B, Edge 1", @@ -64,16 +67,42 @@ def test_get_previous_service_outputs(data_fixture): ) data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, previous_node_id=router_a.id + workflow=workflow, + reference_node=router_a, + position="south", + output="", + label="action a", ) data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, previous_node_id=router_b.id + workflow=workflow, + reference_node=router_b, + position="south", + output="", + label="action b", ) node_c_2 = data_fixture.create_local_baserow_create_row_action_node( workflow=workflow, - previous_node_id=router_b.id, - previous_node_output=router_b_edge_2.uid, + reference_node=router_b, + position="south", + output=router_b_edge_2.uid, + label="action b on edge", + ) + + # TODO add a container + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router a"]}}, + "router a": {"next": {"": ["action a"], "Router A, Edge 1": ["router b"]}}, + "action a": {}, + "router b": { + "next": {"": ["action b"], "Router B, Edge 2": ["action b on edge"]} + }, + "action b": {}, + "action b on edge": {}, + } ) result = node_c_2.get_previous_service_outputs() diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_service.py b/backend/tests/baserow/contrib/automation/nodes/test_node_service.py index bdbf88087a..32c2bf32a8 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_service.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_service.py @@ -3,69 +3,132 @@ import pytest from baserow.contrib.automation.nodes.exceptions import ( - AutomationNodeBeforeInvalid, AutomationNodeDoesNotExist, AutomationNodeNotMovable, + AutomationNodeReferenceNodeInvalid, ) -from baserow.contrib.automation.nodes.handler import AutomationNodeHandler from baserow.contrib.automation.nodes.models import LocalBaserowCreateRowActionNode -from baserow.contrib.automation.nodes.registries import ( - ReplaceAutomationNodeTrashOperationType, - automation_node_type_registry, -) +from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.automation.nodes.service import AutomationNodeService from baserow.contrib.automation.nodes.trash_types import AutomationNodeTrashableItemType -from baserow.contrib.automation.nodes.types import NextAutomationNodeValues from baserow.core.exceptions import UserNotInWorkspace from baserow.core.trash.handler import TrashHandler +from baserow.test_utils.fixtures import Fixtures SERVICE_PATH = "baserow.contrib.automation.nodes.service" @patch(f"{SERVICE_PATH}.automation_node_created") @pytest.mark.django_db -def test_create_node(mocked_signal, data_fixture): +def test_create_node(mocked_signal, data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) node_type = automation_node_type_registry.get("create_row") service = AutomationNodeService() - node = service.create_node(user, node_type, workflow) + node = service.create_node( + user, + node_type, + workflow, + reference_node_id=workflow.get_trigger().id, + position="south", + output="", + ) + + workflow.assert_reference( + { + "0": "rows_created", + "create_row": {}, + "rows_created": {"next": {"": ["create_row"]}}, + } + ) assert isinstance(node, LocalBaserowCreateRowActionNode) + mocked_signal.send.assert_called_once_with(service, node=node, user=user) +@patch(f"{SERVICE_PATH}.automation_node_created") @pytest.mark.django_db -def test_create_node_before_invalid(data_fixture): +def test_create_node_as_child(mocked_signal, data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) - workflow_b = data_fixture.create_automation_workflow(user) - node1_b = workflow_b.get_trigger(specific=False) - node2_b = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow_b + iterator = data_fixture.create_core_iterator_action_node(workflow=workflow) + node_type = automation_node_type_registry.get("create_row") + + service = AutomationNodeService() + node = service.create_node( + user, + node_type, + workflow, + reference_node_id=iterator.id, + position="child", + output="", + ) + + workflow.assert_reference( + { + "0": "rows_created", + "create_row": {}, + "iterator": {"children": ["create_row"]}, + "rows_created": {"next": {"": ["iterator"]}}, + } + ) + + assert isinstance(node, LocalBaserowCreateRowActionNode) + mocked_signal.send.assert_called_once_with(service, node=node, user=user) + + +@pytest.mark.django_db +def test_create_node_as_child_not_in_container(data_fixture: Fixtures): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user) + create_row = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow ) node_type = automation_node_type_registry.get("create_row") - with pytest.raises(AutomationNodeBeforeInvalid) as exc: - AutomationNodeService().create_node( - user, node_type, workflow=workflow, before=node2_b + service = AutomationNodeService() + + with pytest.raises(AutomationNodeReferenceNodeInvalid) as exc: + service.create_node( + user, + node_type, + workflow, + reference_node_id=create_row.id, + position="child", + output="", ) - assert ( - exc.value.args[0] - == "The `before` node must belong to the same workflow as the one supplied." - ) - with pytest.raises(AutomationNodeBeforeInvalid) as exc: + assert exc.value.args[0] == f"The reference node {create_row.id} can't have child" + + +@pytest.mark.django_db +def test_create_node_reference_node_invalid(data_fixture: Fixtures): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user) + workflow_b = data_fixture.create_automation_workflow(user) + node1_b = workflow_b.get_trigger() + node2_b = data_fixture.create_automation_node(workflow=workflow_b) + + node_type = automation_node_type_registry.get("create_row") + + with pytest.raises(AutomationNodeReferenceNodeInvalid) as exc: AutomationNodeService().create_node( - user, node_type, workflow=workflow_b, before=node1_b + user, + node_type, + workflow=workflow, + reference_node_id=node2_b.id, + position="south", + output="", ) - assert exc.value.args[0] == "You cannot create an automation node before a trigger." + + assert exc.value.args[0] == f"The reference node {node2_b.id} doesn't exist" @pytest.mark.django_db -def test_create_node_permission_error(data_fixture): +def test_create_node_permission_error(data_fixture: Fixtures): workflow = data_fixture.create_automation_workflow() node_type = automation_node_type_registry.get("create_row") another_user = data_fixture.create_user() @@ -80,7 +143,7 @@ def test_create_node_permission_error(data_fixture): @pytest.mark.django_db -def test_get_node(data_fixture): +def test_get_node(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) node = data_fixture.create_automation_node(user=user, workflow=workflow) @@ -91,7 +154,7 @@ def test_get_node(data_fixture): @pytest.mark.django_db -def test_get_node_invalid_node_id(data_fixture): +def test_get_node_invalid_node_id(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() with pytest.raises(AutomationNodeDoesNotExist) as e: @@ -101,7 +164,7 @@ def test_get_node_invalid_node_id(data_fixture): @pytest.mark.django_db -def test_get_node_permission_error(data_fixture): +def test_get_node_permission_error(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() another_user, _ = data_fixture.create_user_and_token() node = data_fixture.create_automation_node(user=user) @@ -116,7 +179,7 @@ def test_get_node_permission_error(data_fixture): @pytest.mark.django_db -def test_get_nodes(data_fixture): +def test_get_nodes(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) trigger = workflow.get_trigger() @@ -125,7 +188,7 @@ def test_get_nodes(data_fixture): @pytest.mark.django_db -def test_get_nodes_permission_error(data_fixture): +def test_get_nodes_permission_error(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() another_user, _ = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) @@ -141,17 +204,16 @@ def test_get_nodes_permission_error(data_fixture): @patch(f"{SERVICE_PATH}.automation_node_updated") @pytest.mark.django_db -def test_update_node(mocked_signal, data_fixture): +def test_update_node(mocked_signal, data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) node = data_fixture.create_automation_node(user=user, workflow=workflow) - assert node.previous_node_output == "" service = AutomationNodeService() - updated_node = service.update_node(user, node.id, previous_node_output="foo") + updated_node = service.update_node(user, node.id, label="foo") node.refresh_from_db() - assert node.previous_node_output == "foo" + assert node.label == "foo" mocked_signal.send.assert_called_once_with( service, user=user, node=updated_node.node @@ -159,25 +221,23 @@ def test_update_node(mocked_signal, data_fixture): @pytest.mark.django_db -def test_update_node_invalid_node_id(data_fixture): +def test_update_node_invalid_node_id(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() with pytest.raises(AutomationNodeDoesNotExist) as e: - AutomationNodeService().update_node(user, 100, previous_node_output="foo") + AutomationNodeService().update_node(user, 100, label="foo") assert str(e.value) == "The node 100 does not exist." @pytest.mark.django_db -def test_update_node_permission_error(data_fixture): +def test_update_node_permission_error(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() another_user, _ = data_fixture.create_user_and_token() node = data_fixture.create_automation_node(user=user) with pytest.raises(UserNotInWorkspace) as e: - AutomationNodeService().update_node( - another_user, node.id, previous_node_output="foo" - ) + AutomationNodeService().update_node(another_user, node.id, label="foo") assert str(e.value) == ( f"User {another_user.email} doesn't belong to " @@ -187,7 +247,7 @@ def test_update_node_permission_error(data_fixture): @patch(f"{SERVICE_PATH}.automation_node_deleted") @pytest.mark.django_db -def test_delete_node(mocked_signal, data_fixture): +def test_delete_node(mocked_signal, data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) node = data_fixture.create_automation_node(user=user, workflow=workflow) @@ -209,26 +269,7 @@ def test_delete_node(mocked_signal, data_fixture): @pytest.mark.django_db -def test_delete_node_with_managed_trash_entry(data_fixture): - user = data_fixture.create_user() - workflow = data_fixture.create_automation_workflow(user) - node = data_fixture.create_automation_node(user=user, workflow=workflow) - - AutomationNodeService().delete_node( - user, node.id, trash_operation_type=ReplaceAutomationNodeTrashOperationType.type - ) - node.refresh_from_db() - assert node.trashed - - trash_entry = TrashHandler.get_trash_entry( - AutomationNodeTrashableItemType.type, - node.id, - ) - assert trash_entry.managed - - -@pytest.mark.django_db -def test_delete_node_invalid_node_id(data_fixture): +def test_delete_node_invalid_node_id(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() with pytest.raises(AutomationNodeDoesNotExist) as e: @@ -238,7 +279,7 @@ def test_delete_node_invalid_node_id(data_fixture): @pytest.mark.django_db -def test_delete_node_permission_error(data_fixture): +def test_delete_node_permission_error(data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() another_user, _ = data_fixture.create_user_and_token() node = data_fixture.create_automation_node(user=user) @@ -252,77 +293,41 @@ def test_delete_node_permission_error(data_fixture): ) -@patch(f"{SERVICE_PATH}.automation_nodes_reordered") +@patch(f"{SERVICE_PATH}.automation_node_created") @pytest.mark.django_db -def test_order_nodes(mocked_signal, data_fixture): +def test_duplicate_node(mocked_signal, data_fixture: Fixtures): user, _ = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) - node_1 = data_fixture.create_automation_node(user=user, workflow=workflow) - node_2 = data_fixture.create_automation_node(user=user, workflow=workflow) - - node_order = AutomationNodeHandler().get_nodes_order(workflow) - assert node_order == [trigger.id, node_1.id, node_2.id] + node = data_fixture.create_automation_node(workflow=workflow, label="test") service = AutomationNodeService() - new_order = service.order_nodes(user, workflow, [trigger.id, node_2.id, node_1.id]) - assert new_order == [trigger.id, node_2.id, node_1.id] + duplicated_node = service.duplicate_node(user, node.id) - node_order = AutomationNodeHandler().get_nodes_order(workflow) - assert node_order == [trigger.id, node_2.id, node_1.id] - mocked_signal.send.assert_called_once_with( - service, workflow=workflow, order=node_order, user=user - ) - - -@pytest.mark.django_db -def test_order_nodes_permission_error(data_fixture): - user, _ = data_fixture.create_user_and_token() - another_user, _ = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - node_1 = data_fixture.create_automation_node(user=user, workflow=workflow) - node_2 = data_fixture.create_automation_node(user=user, workflow=workflow) - - with pytest.raises(UserNotInWorkspace) as e: - AutomationNodeService().order_nodes( - another_user, workflow, [node_2.id, node_1.id] - ) + assert duplicated_node == workflow.automation_workflow_nodes.all()[2].specific - assert str(e.value) == ( - f"User {another_user.email} doesn't belong to " - f"workspace {workflow.automation.workspace}." + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["test"]}}, + "test": {"next": {"": ["test-"]}}, + "test-": {}, + } ) + assert duplicated_node.label == "test" -@patch(f"{SERVICE_PATH}.automation_node_created") -@pytest.mark.django_db -def test_duplicate_node(mocked_signal, data_fixture): - user, _ = data_fixture.create_user_and_token() - workflow = data_fixture.create_automation_workflow(user) - node = data_fixture.create_automation_node(workflow=workflow) - - service = AutomationNodeService() - duplication = service.duplicate_node(user, node) - - assert ( - duplication.duplicated_node - == workflow.automation_workflow_nodes.all()[2].specific - ) - - mocked_signal.send.assert_called_once_with( - service, node=duplication.duplicated_node, user=user - ) + mocked_signal.send.assert_called_once_with(service, node=duplicated_node, user=user) @pytest.mark.django_db -def test_duplicate_node_permission_error(data_fixture): +def test_duplicate_node_permission_error(data_fixture: Fixtures): user = data_fixture.create_user() another_user, _ = data_fixture.create_user_and_token() workflow = data_fixture.create_automation_workflow(user) node = data_fixture.create_automation_node(user=user, workflow=workflow) with pytest.raises(UserNotInWorkspace) as e: - AutomationNodeService().duplicate_node(another_user, node) + AutomationNodeService().duplicate_node(another_user, node.id) assert str(e.value) == ( f"User {another_user.email} doesn't belong to " @@ -331,10 +336,10 @@ def test_duplicate_node_permission_error(data_fixture): @pytest.mark.django_db -def test_replace_simple_node(data_fixture): +def test_replace_simple_node(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() original_node = data_fixture.create_automation_node(workflow=workflow) node_type = automation_node_type_registry.get("update_row") @@ -342,20 +347,28 @@ def test_replace_simple_node(data_fixture): replace_result = AutomationNodeService().replace_node( user, original_node.id, node_type.type ) + original_node.refresh_from_db() assert original_node.trashed - assert replace_result.node.get_type() == node_type - assert replace_result.node.previous_node_id == trigger.id - assert replace_result.original_node_id == original_node.id - assert replace_result.original_node_type == "create_row" + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["update_row"]}}, + "update_row": {}, + } + ) + + replace_result.node.id != original_node.id + replace_result.original_node_type == original_node.get_type().type + replace_result.original_node_id == original_node.id @pytest.mark.django_db -def test_replace_node_in_first(data_fixture): +def test_replace_node_in_first(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() first_node = data_fixture.create_automation_node(workflow=workflow) second_node = data_fixture.create_automation_node(workflow=workflow) last_node = data_fixture.create_automation_node( @@ -369,24 +382,25 @@ def test_replace_node_in_first(data_fixture): assert workflow.automation_workflow_nodes.count() == 4 - second_node.refresh_from_db() - last_node.refresh_from_db() - - assert replace_result.node.id == second_node.previous_node.id - assert replace_result.node.previous_node_id == trigger.id - assert last_node.previous_node.id == second_node.id - - assert second_node.previous_node.get_type().type == "update_row" + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["update_row"]}}, + "update_row": {"next": {"": ["create_row"]}}, + "create_row": {"next": {"": ["create_row-"]}}, + "create_row-": {}, + } + ) @pytest.mark.django_db -def test_replace_node_in_middle(data_fixture): +def test_replace_node_in_middle(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) - first_node = data_fixture.create_automation_node(workflow=workflow) + trigger = workflow.get_trigger() + first_node = data_fixture.create_automation_node(workflow=workflow, label="first") node_to_replace = data_fixture.create_automation_node(workflow=workflow) - last_node = data_fixture.create_automation_node(workflow=workflow) + last_node = data_fixture.create_automation_node(workflow=workflow, label="last") node_type = automation_node_type_registry.get("update_row") @@ -394,119 +408,103 @@ def test_replace_node_in_middle(data_fixture): user, node_to_replace.id, node_type.type ) - assert workflow.automation_workflow_nodes.count() == 4 - - last_node.refresh_from_db() - first_node.refresh_from_db() - - assert replace_result.node.id == last_node.previous_node.id - assert replace_result.node.previous_node.id == first_node.id - assert first_node.previous_node_id == trigger.id + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["first"]}}, + "first": {"next": {"": ["update_row"]}}, + "update_row": {"next": {"": ["last"]}}, + "last": {}, + } + ) - assert last_node.previous_node.get_type().type == "update_row" + assert workflow.automation_workflow_nodes.count() == 4 @pytest.mark.django_db -def test_replace_node_in_last(data_fixture): +def test_replace_node_in_last(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() first_node = data_fixture.create_automation_node(workflow=workflow) second_node = data_fixture.create_automation_node(workflow=workflow) last_node = data_fixture.create_automation_node(workflow=workflow) node_type = automation_node_type_registry.get("update_row") - service = AutomationNodeService() - replace_result = service.replace_node(user, last_node.id, node_type.type) - - first_node.refresh_from_db() - second_node.refresh_from_db() - - assert replace_result.node.previous_node.id == second_node.id - assert second_node.previous_node.id == first_node.id - assert first_node.previous_node_id == trigger.id + replace_result = AutomationNodeService().replace_node( + user, last_node.id, node_type.type + ) - assert ( - workflow.automation_workflow_nodes.get(previous_node=second_node) - .get_type() - .type - == "update_row" + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["create_row"]}}, + "create_row": {"next": {"": ["create_row-"]}}, + "create_row-": {"next": {"": ["update_row"]}}, + "update_row": {}, + } ) + assert workflow.automation_workflow_nodes.count() == 4 + @pytest.mark.django_db -def test_move_fixed_node_throws_exception(data_fixture): +def test_move_fixed_node_throws_exception(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() action1 = data_fixture.create_automation_node(workflow=workflow) + with pytest.raises(AutomationNodeNotMovable) as exc: - AutomationNodeService().move_node(user, trigger.id, action1.id) - assert exc.value.args[0] == "This automation node cannot be moved." + AutomationNodeService().move_node(user, trigger.id, action1.id, "south", "") + + assert exc.value.args[0] == "Trigger nodes cannot be moved." @pytest.mark.django_db -def test_move_simple_node(data_fixture): +def test_move_simple_node(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) - action1 = data_fixture.create_automation_node(workflow=workflow) + action1 = data_fixture.create_automation_node(workflow=workflow, label="action1") + # <-- to here - action2 = data_fixture.create_automation_node(workflow=workflow) - action3 = data_fixture.create_automation_node(workflow=workflow) # <- from here - action4 = data_fixture.create_automation_node(workflow=workflow) + action2 = data_fixture.create_automation_node(workflow=workflow, label="action2") + action3 = data_fixture.create_automation_node( + workflow=workflow, label="action3" + ) # <- from here + action4 = data_fixture.create_automation_node(workflow=workflow, label="action4") # move `action3` to be after `trigger` - move_result = AutomationNodeService().move_node(user, action3.id, action1.id) + move_result = AutomationNodeService().move_node( + user, action3.id, reference_node_id=action1.id, position="south", output="" + ) + + workflow.assert_reference( + { + "0": "rows_created", + "action1": {"next": {"": ["action3"]}}, + "action3": {"next": {"": ["action2"]}}, + "action2": {"next": {"": ["action4"]}}, + "action4": {}, + "rows_created": {"next": {"": ["action1"]}}, + } + ) # The node we're trying to move is `action3` assert move_result.node == action3 - assert move_result.node.previous_node_id == action1.id - assert move_result.node.previous_node_output == "" - - # The node's origin previous node was `action2` - assert move_result.origin_previous_node_id == action2.id - assert move_result.origin_previous_node_output == "" - - # Before the move, at the origin, `action4` was after `action3` - assert move_result.origin_old_next_nodes_values == [ - NextAutomationNodeValues( - id=action4.id, previous_node_id=action3.id, previous_node_output="" - ) - ] - - # After the move, at the origin, `action4` is now after `action2` - assert move_result.origin_new_next_nodes_values == [ - NextAutomationNodeValues( - id=action4.id, previous_node_id=action2.id, previous_node_output="" - ) - ] - - # Before the move, at the destination, `action2` was after `action1`. - assert move_result.destination_old_next_nodes_values == [ - NextAutomationNodeValues( - id=action2.id, previous_node_id=action1.id, previous_node_output="" - ) - ] - - # After the move, at the destination, `action2` is now after `action3`. - assert move_result.destination_new_next_nodes_values == [ - NextAutomationNodeValues( - id=action2.id, previous_node_id=action3.id, previous_node_output="" - ) - ] - - # The node's destination previous node is now `action1`. - assert move_result.destination_previous_node_id == action1.id - assert move_result.destination_previous_node_output == "" + assert move_result.previous_reference_node == action2 + assert move_result.previous_position == "south" + assert move_result.previous_output == "" @pytest.mark.django_db -def test_move_node_to_edge_above_existing_output(data_fixture): +def test_move_node_to_edge_above_existing_output(data_fixture: Fixtures): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) core_router_with_edges = data_fixture.create_core_router_action_node_with_edges( workflow=workflow, + reference_node=workflow.get_trigger(), ) router = core_router_with_edges.router edge1 = core_router_with_edges.edge1 @@ -514,41 +512,168 @@ def test_move_node_to_edge_above_existing_output(data_fixture): edge1_output = core_router_with_edges.edge1_output edge2 = core_router_with_edges.edge2 edge2_output = core_router_with_edges.edge2_output # <- from here + fallback_output_node = core_router_with_edges.fallback_output_node # move `edge2_output` to be *above* `edge1_output` inside `edge1` move_result = AutomationNodeService().move_node( - user, edge2_output.id, router.id, edge1_output.previous_node_output + user, + edge2_output.id, + reference_node_id=router.id, + position="south", + output=str(edge1.uid), + ) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Do this": ["output edge 2"], + "Do that": [], + "Default": ["fallback node"], + } + }, + "output edge 2": {"next": {"": ["output edge 1"]}}, + "output edge 1": {}, + "fallback node": {}, + } ) - # The node we're trying to move is `edge2_output` assert move_result.node == edge2_output - assert move_result.node.previous_node_id == router.id - assert move_result.node.previous_node_output == str(edge1.uid) + assert move_result.previous_reference_node == router + assert move_result.previous_position == "south" + assert move_result.previous_output == str(edge2.uid) + - # The node's origin previous node was `action2` - assert move_result.origin_previous_node_id == router.id - assert move_result.origin_previous_node_output == str(edge2.uid) +@pytest.mark.django_db +def test_move_node_in_container(data_fixture: Fixtures): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user) + action1 = data_fixture.create_automation_node(workflow=workflow, label="action1") + + iterator = data_fixture.create_core_iterator_action_node( + workflow=workflow + ) # <-inside here + action2 = data_fixture.create_automation_node(workflow=workflow, label="action2") + action3 = data_fixture.create_automation_node( + workflow=workflow, label="action3" + ) # <- from here + action4 = data_fixture.create_automation_node(workflow=workflow, label="action4") + + # move `action3` to be the first child of iterator + move_result = AutomationNodeService().move_node( + user, action3.id, reference_node_id=iterator.id, position="child", output="" + ) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["action1"]}}, + "action1": {"next": {"": ["iterator"]}}, + "iterator": {"children": ["action3"], "next": {"": ["action2"]}}, + "action3": {}, + "action2": {"next": {"": ["action4"]}}, + "action4": {}, + } + ) + + # The node we're trying to move is `action3` + assert move_result.node == action3 + assert move_result.previous_reference_node == action2 + assert move_result.previous_position == "south" + assert move_result.previous_output == "" + + +@pytest.mark.django_db +def test_move_node_outside_of_container(data_fixture: Fixtures): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user) + action1 = data_fixture.create_automation_node(workflow=workflow, label="action1") + + iterator = data_fixture.create_core_iterator_action_node(workflow=workflow) + action2 = data_fixture.create_automation_node( + workflow=workflow, label="action2", reference_node=iterator, position="child" + ) # <- from here + action3 = data_fixture.create_automation_node(workflow=workflow, label="action3") + # <- to here + action4 = data_fixture.create_automation_node(workflow=workflow, label="action4") + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["action1"]}}, + "action1": {"next": {"": ["iterator"]}}, + "iterator": {"children": ["action2"], "next": {"": ["action3"]}}, + "action2": {}, + "action3": {"next": {"": ["action4"]}}, + "action4": {}, + } + ) + + # move `action3` to be the first child of iterator + move_result = AutomationNodeService().move_node( + user, action2.id, reference_node_id=action3.id, position="south", output="" + ) - # Before the move, at the origin, there are no next nodes after `edge2_output`. - assert move_result.origin_old_next_nodes_values == [] + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["action1"]}}, + "action1": {"next": {"": ["iterator"]}}, + "iterator": {"children": [], "next": {"": ["action3"]}}, + "action2": {"next": {"": ["action4"]}}, + "action3": {"next": {"": ["action2"]}}, + "action4": {}, + } + ) + + # The node we're trying to move is `action3` + assert move_result.node == action2 + assert move_result.previous_reference_node == iterator + assert move_result.previous_position == "child" + assert move_result.previous_output == "" - # After the move, at the origin, there are still no next nodes. - assert move_result.origin_new_next_nodes_values == [] - # Before the move, at the destination, `edge1_output` was after `router`. - assert move_result.destination_old_next_nodes_values == [ - NextAutomationNodeValues( - id=edge1_output.id, - previous_node_id=router.id, - previous_node_output=str(edge1.uid), +@pytest.mark.django_db +def test_move_node_invalid_reference_node(data_fixture: Fixtures): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user) + action1 = data_fixture.create_automation_node(workflow=workflow, label="action1") + + action2 = data_fixture.create_automation_node(workflow=workflow, label="action2") + action3 = data_fixture.create_automation_node(workflow=workflow, label="action3") + action4 = data_fixture.create_automation_node(workflow=workflow, label="action4") + + workflow_b = data_fixture.create_automation_workflow(user) + node1_b = workflow_b.get_trigger() + + with pytest.raises(AutomationNodeReferenceNodeInvalid) as exc: + AutomationNodeService().move_node( + user, action3.id, reference_node_id=99999999, position="south", output="" ) - ] - - # After the move, at the destination, `edge1_output` is now after `edge2_output`. - assert move_result.destination_new_next_nodes_values == [ - NextAutomationNodeValues( - id=edge1_output.id, - previous_node_id=edge2_output.id, - previous_node_output="", + + assert exc.value.args[0] == "The reference node 99999999 doesn't exist" + + with pytest.raises(AutomationNodeReferenceNodeInvalid) as exc: + AutomationNodeService().move_node( + user, action3.id, reference_node_id=action3.id, position="south", output="" + ) + + assert ( + exc.value.args[0] == "The reference node and the moved node must be different" + ) + + with pytest.raises(AutomationNodeReferenceNodeInvalid) as exc: + AutomationNodeService().move_node( + user, action3.id, reference_node_id=node1_b.id, position="south", output="" ) - ] + + assert exc.value.args[0] == f"The reference node {node1_b.id} doesn't exist" + + with pytest.raises(AutomationNodeReferenceNodeInvalid) as exc: + AutomationNodeService().move_node( + user, action3.id, reference_node_id=action2.id, position="child", output="" + ) + + assert exc.value.args[0] == f"The reference node {action2.id} can't have child" diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_trash_types.py b/backend/tests/baserow/contrib/automation/nodes/test_node_trash_types.py index 3510329879..b5a420ccac 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_trash_types.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_trash_types.py @@ -1,41 +1,182 @@ import pytest from baserow.contrib.automation.nodes.trash_types import AutomationNodeTrashableItemType +from baserow.core.trash.exceptions import TrashItemRestorationDisallowed from baserow.core.trash.handler import TrashHandler @pytest.mark.django_db -def test_trashing_and_restoring_node_updates_next_node_previous_node_id(data_fixture): +def test_trashing_and_restoring_node_updates_graph(data_fixture): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user) trigger = workflow.get_trigger() - first = data_fixture.create_local_baserow_create_row_action_node(workflow=workflow) + first = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="first action" + ) second = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, + workflow=workflow, label="second action" ) - assert trigger.previous_node_id is None - assert first.previous_node_id == trigger.id - assert second.previous_node_id == first.id + workflow.assert_reference( + { + "0": "rows_created", + "first action": {"next": {"": ["second action"]}}, + "rows_created": {"next": {"": ["first action"]}}, + "second action": {}, + } + ) automation = workflow.automation - TrashHandler.trash(user, automation.workspace, automation, first) + trash_entry = TrashHandler.trash(user, automation.workspace, automation, first) - trigger.refresh_from_db() - second.refresh_from_db() + assert trash_entry.additional_restoration_data == ( + str(trigger.id), + "south", + "", + ) - assert trigger.previous_node_id is None - assert second.previous_node_id == trigger.id + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["second action"]}}, + "second action": {}, + } + ) TrashHandler.restore_item( user, AutomationNodeTrashableItemType.type, first.id, ) + workflow.assert_reference( + { + "0": "rows_created", + "first action": {"next": {"": ["second action"]}}, + "rows_created": {"next": {"": ["first action"]}}, + "second action": {}, + } + ) + + +@pytest.mark.django_db +def test_trashing_and_restoring_node_updates_graph_with_router(data_fixture): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user=user) + + initial_router = data_fixture.create_core_router_action_node( + workflow=workflow, + label="First router", + ) + initial_router_edge = data_fixture.create_core_router_service_edge( + label="To second router", + condition="'true'", + service=initial_router.service, + skip_output_node=True, + ) + + # Second router + second_router = data_fixture.create_core_router_action_node( + workflow=workflow, + label="Second router", + reference_node=initial_router, + position="south", + output=initial_router_edge.uid, + ) + + second_router_edge = data_fixture.create_core_router_service_edge( + label="To create row", + condition="'true'", + service=second_router.service, + ) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["First router"]}}, + "First router": {"next": {"To second router": ["Second router"]}}, + "Second router": {"next": {"To create row": ["To create row output node"]}}, + "To create row output node": {}, + } + ) + + automation = workflow.automation + + trash_entry = TrashHandler.trash( + user, automation.workspace, automation, second_router + ) - trigger.refresh_from_db() - second.refresh_from_db() + assert trash_entry.additional_restoration_data == ( + str(initial_router.id), + "south", + str(initial_router_edge.uid), + ) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["First router"]}}, + "First router": { + "next": {"To second router": ["To create row output node"]} + }, + "To create row output node": {}, + } + ) + + TrashHandler.restore_item( + user, + AutomationNodeTrashableItemType.type, + second_router.id, + ) + workflow.assert_reference( + { + "0": "rows_created", + "First router": {"next": {"To second router": ["Second router"]}}, + "Second router": {"next": {"": ["To create row output node"]}}, + "To create row output node": {}, + "rows_created": {"next": {"": ["First router"]}}, + } + ) + + +@pytest.mark.django_db +def test_restoring_a_trashed_output_node_after_its_edge_is_destroyed_is_disallowed( + data_fixture, +): + user = data_fixture.create_user() + workflow = data_fixture.create_automation_workflow(user=user) - assert trigger.previous_node_id is None - assert first.previous_node_id == trigger.id - assert second.previous_node_id == first.id + router = data_fixture.create_core_router_action_node(workflow=workflow) + + edge = data_fixture.create_core_router_service_edge( + service=router.service, label="Edge 1", condition="'false'" + ) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": {"next": {"Edge 1": ["Edge 1 output node"]}}, + "Edge 1 output node": {}, + } + ) + + output_node = workflow.get_graph().get_node_at_position( + router, "south", str(edge.uid) + ) + + automation = workflow.automation + TrashHandler.trash(user, automation.workspace, automation, output_node) + + edge.delete() + + with pytest.raises(TrashItemRestorationDisallowed) as exc: + TrashHandler.restore_item( + user, + AutomationNodeTrashableItemType.type, + output_node.id, + ) + + assert ( + exc.value.args[0] == "This automation node cannot " + "be restored as its branch has been deleted." + ) diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_types.py b/backend/tests/baserow/contrib/automation/nodes/test_node_types.py index 7a99a72b92..96b5ec420a 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_types.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_types.py @@ -1,22 +1,28 @@ -import json import uuid from unittest.mock import MagicMock, patch from django.urls import reverse import pytest -from rest_framework.status import HTTP_204_NO_CONTENT +from rest_framework.status import ( + HTTP_202_ACCEPTED, + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, +) from baserow.contrib.automation.automation_dispatch_context import ( AutomationDispatchContext, ) -from baserow.contrib.automation.nodes.handler import AutomationNodeHandler from baserow.contrib.automation.nodes.registries import automation_node_type_registry +from baserow.contrib.automation.nodes.service import AutomationNodeService from baserow.contrib.automation.workflows.constants import WorkflowState from baserow.contrib.automation.workflows.service import AutomationWorkflowService from baserow.core.handler import CoreHandler from baserow.core.services.types import DispatchResult -from baserow.core.utils import MirrorDict +from tests.baserow.contrib.automation.api.utils import get_api_kwargs + +API_URL_BASE = "api:automation:nodes" +API_URL_MOVE = f"{API_URL_BASE}:move" def test_automation_node_type_is_replaceable_with(): @@ -84,7 +90,7 @@ def test_automation_node_type_create_row_prepare_values_without_instance(data_fi user = data_fixture.create_user() node = data_fixture.create_automation_node(user=user, type="create_row") - values = {"service": {}} + values = {"service": {}, "workflow": node.workflow} result = node.get_type().prepare_values(values, user) # Since we didn't pass in a service, a new service is created @@ -112,7 +118,8 @@ def test_automation_node_type_create_row_dispatch(mock_dispatch, data_fixture): @pytest.mark.django_db def test_automation_node_type_rows_created_prepare_values_with_instance(data_fixture): user = data_fixture.create_user() - node = data_fixture.create_automation_node(user=user, type="rows_created") + workflow = data_fixture.create_automation_workflow(user=user, create_trigger=False) + node = data_fixture.create_automation_node(workflow=workflow, type="rows_created") values = {"service": {}} result = node.get_type().prepare_values(values, user, instance=node) @@ -122,9 +129,10 @@ def test_automation_node_type_rows_created_prepare_values_with_instance(data_fix @pytest.mark.django_db def test_service_node_type_rows_created_prepare_values_without_instance(data_fixture): user = data_fixture.create_user() - node = data_fixture.create_automation_node(user=user, type="rows_created") + workflow = data_fixture.create_automation_workflow(user=user, create_trigger=False) + node = data_fixture.create_automation_node(workflow=workflow, type="rows_created") - values = {"service": {}} + values = {"service": {}, "workflow": node.workflow} result = node.get_type().prepare_values(values, user) # Since we didn't pass in a service, a new service is created @@ -143,20 +151,6 @@ def test_automation_node_type_update_row_prepare_values_with_instance(data_fixtu assert result == {"service": node.service} -@pytest.mark.django_db -def test_automation_node_type_update_row_prepare_values_without_instance(data_fixture): - user = data_fixture.create_user() - node = data_fixture.create_automation_node(user=user, type="update_row") - - values = {"service": {}} - result = node.get_type().prepare_values(values, user) - - # Since we didn't pass in a service, a new service is created - new_service = result["service"] - assert isinstance(new_service, type(node.service)) - assert new_service.id != node.service.id - - @patch("baserow.contrib.automation.nodes.registries.ServiceHandler.dispatch_service") @pytest.mark.django_db def test_automation_node_type_update_row_dispatch(mock_dispatch, data_fixture): @@ -176,7 +170,8 @@ def test_automation_node_type_update_row_dispatch(mock_dispatch, data_fixture): @pytest.mark.django_db def test_automation_node_type_delete_row_prepare_values_with_instance(data_fixture): user = data_fixture.create_user() - node = data_fixture.create_automation_node(user=user, type="delete_row") + workflow = data_fixture.create_automation_workflow(user=user) + node = data_fixture.create_automation_node(workflow=workflow, type="delete_row") values = {"service": {}} result = node.get_type().prepare_values(values, user, instance=node) @@ -186,9 +181,17 @@ def test_automation_node_type_delete_row_prepare_values_with_instance(data_fixtu @pytest.mark.django_db def test_automation_node_type_delete_row_prepare_values_without_instance(data_fixture): user = data_fixture.create_user() - node = data_fixture.create_automation_node(user=user, type="delete_row") + workflow = data_fixture.create_automation_workflow(user=user) - values = {"service": {}} + node = data_fixture.create_automation_node(workflow=workflow, type="delete_row") + another_node = data_fixture.create_automation_node( + workflow=workflow, type="delete_row" + ) + + values = { + "service": {}, + "workflow": node.workflow, + } result = node.get_type().prepare_values(values, user) # Since we didn't pass in a service, a new service is created @@ -214,39 +217,6 @@ def test_automation_node_type_delete_row_dispatch(mock_dispatch, data_fixture): mock_dispatch.assert_called_once_with(node.service.specific, dispatch_context) -@pytest.mark.django_db -def test_automation_node_migrates_its_previous_node_output_on_import( - data_fixture, -): - user = data_fixture.create_user() - workflow = data_fixture.create_automation_workflow(user=user) - service = data_fixture.create_core_router_service(default_edge_label="Default") - data_fixture.create_core_router_action_node(workflow=workflow, service=service) - edge = data_fixture.create_core_router_service_edge( - service=service, label="Do this", condition="'true'" - ) - output_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, previous_node_output=str(edge.uid) - ) - output_node_type = output_node.get_type() - - serialized = json.loads(json.dumps(output_node_type.export_serialized(output_node))) - assert serialized["previous_node_output"] == str(edge.uid) - - id_mapping = { - "integrations": MirrorDict(), - "automation_workflow_nodes": MirrorDict(), - "automation_edge_outputs": {str(edge.uid): str(uuid.uuid4())}, - } - new_output_node = output_node_type.import_serialized( - workflow, serialized, id_mapping, import_formula=lambda x, d: x - ) - assert ( - new_output_node.previous_node_output - == id_mapping["automation_edge_outputs"][str(edge.uid)] - ) - - @pytest.mark.django_db @patch( "baserow.contrib.automation.workflows.service.AutomationWorkflowHandler.async_start_workflow" @@ -259,23 +229,21 @@ def test_on_event_excludes_disabled_workflows(mock_async_start_workflow, data_fi user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) - service = data_fixture.create_local_baserow_rows_created_service( - table=table, - ) # Create a Node + workflow that is disabled original_workflow = data_fixture.create_automation_workflow() workflow = data_fixture.create_automation_workflow( - state=WorkflowState.DISABLED, + state=WorkflowState.DISABLED, trigger_service_kwargs={"table": table} ) workflow.automation.published_from = original_workflow workflow.automation.save() - node = data_fixture.create_local_baserow_rows_created_trigger_node( - workflow=workflow, service=service + trigger = workflow.get_trigger() + + service_queryset = trigger.service.get_type().model_class.objects.filter( + table=table ) - service_queryset = service.get_type().model_class.objects.filter(table=table) event_payload = [ { "id": 1, @@ -289,14 +257,46 @@ def test_on_event_excludes_disabled_workflows(mock_async_start_workflow, data_fi }, ] - node.get_type().on_event(service_queryset, event_payload, user=user) + trigger.get_type().on_event(service_queryset, event_payload, user=user) + mock_async_start_workflow.assert_not_called() +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_type", + [ + node_type.type + for node_type in automation_node_type_registry.get_all() + if node_type.is_workflow_trigger + ], +) +def test_trigger_cant_be_moved(node_type, api_client, data_fixture): + node_type = automation_node_type_registry.get(node_type) + + user, token = data_fixture.create_user_and_token() + workflow = data_fixture.create_automation_workflow(user, trigger_type=node_type) + trigger = workflow.get_trigger() + node_after = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="before" + ) + response = api_client.post( + reverse(API_URL_MOVE, kwargs={"node_id": trigger.id}), + {"reference_node_id": node_after.id, "position": "south", "output": ""}, + **get_api_kwargs(token), + ) + assert response.status_code == HTTP_400_BAD_REQUEST + assert response.json() == { + "error": "ERROR_AUTOMATION_NODE_NOT_MOVABLE", + "detail": "Trigger nodes cannot be moved.", + } + + @pytest.mark.django_db def test_duplicating_router_node(data_fixture): user = data_fixture.create_user() workflow = data_fixture.create_automation_workflow(user=user) + core_router_with_edges = data_fixture.create_core_router_action_node_with_edges( workflow=workflow, ) @@ -305,44 +305,119 @@ def test_duplicating_router_node(data_fixture): edge2_output = core_router_with_edges.edge2_output fallback_output_node = core_router_with_edges.fallback_output_node - router_type = router.get_type() - source_router_outputs = router_type.get_output_nodes(router, specific=True) - assert len(source_router_outputs) == 3 - assert edge1_output in source_router_outputs - assert edge2_output in source_router_outputs - assert fallback_output_node in source_router_outputs + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Default": ["fallback node"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + } + ) + + AutomationNodeService().duplicate_node(user, router.id) + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["router"]}}, + "router": { + "next": { + "Default": ["router-"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "router-": {"next": {"Default": ["fallback node"]}}, + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + } + ) - duplication = AutomationNodeHandler().duplicate_node(router) - duplicated_router = duplication.duplicated_node - assert duplicated_router.previous_node_id == router.id - assert duplicated_router.previous_node_output == "" +@pytest.mark.django_db +def test_moving_router_node_allowed_with_next_on_default_edge(api_client, data_fixture): + node_type = automation_node_type_registry.get("router") - source_router_outputs = router_type.get_output_nodes(router, specific=True) - assert len(source_router_outputs) == 3 - assert edge1_output in source_router_outputs - assert edge2_output in source_router_outputs - assert duplicated_router in source_router_outputs + user, token = data_fixture.create_user_and_token() + workflow = data_fixture.create_automation_workflow(user) + trigger = workflow.get_trigger() + before_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="before" + ) + node = data_fixture.create_automation_node( + workflow=workflow, + type=node_type.type, + ) + after_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, + label="after", + reference_node=node, + ) + response = api_client.post( + reverse(API_URL_MOVE, kwargs={"node_id": node.id}), + {"reference_node_id": trigger.id, "position": "south", "output": ""}, + **get_api_kwargs(token), + ) - fallback_output_node.refresh_from_db() - assert fallback_output_node not in source_router_outputs - assert fallback_output_node.previous_node_id == duplicated_router.id + assert response.status_code == HTTP_202_ACCEPTED + + +@pytest.mark.django_db +def test_moving_router_node_not_allowed_with_next_on_edge(api_client, data_fixture): + user, token = data_fixture.create_user_and_token() + workflow = data_fixture.create_automation_workflow(user) + trigger = workflow.get_trigger() + before_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="before" + ) + router = data_fixture.create_core_router_action_node( + workflow=workflow, + ) + edge1 = data_fixture.create_core_router_service_edge( + service=router.service, + label="Do this", + condition="'true'", + output_label="output edge 1", + ) + after_node = data_fixture.create_local_baserow_create_row_action_node( + workflow=workflow, label="after", reference_node=router, output=edge1.uid + ) + response = api_client.post( + reverse(API_URL_MOVE, kwargs={"node_id": router.id}), + {"reference_node_id": trigger.id, "position": "south", "output": ""}, + **get_api_kwargs(token), + ) + + assert response.status_code == HTTP_400_BAD_REQUEST + assert response.json() == { + "error": "ERROR_AUTOMATION_NODE_NOT_MOVABLE", + "detail": "Router nodes cannot be moved if they " + "have one or more output nodes associated with them.", + } @pytest.mark.django_db def test_trigger_node_dispatch_returns_event_payload_if_not_simulated(data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) - service = data_fixture.create_local_baserow_rows_created_service( - table=table, - ) - workflow = data_fixture.create_automation_workflow(state=WorkflowState.LIVE) - node = data_fixture.create_local_baserow_rows_created_trigger_node( - workflow=workflow, service=service + workflow = data_fixture.create_automation_workflow( + state=WorkflowState.LIVE, trigger_service_kwargs={"table": table} ) + + trigger = workflow.get_trigger().specific + dispatch_context = AutomationDispatchContext(workflow, "foo") - result = node.get_type().dispatch(node, dispatch_context) + result = trigger.get_type().dispatch(trigger, dispatch_context) assert result == DispatchResult(data="foo", status=200, output_uid="") @@ -351,19 +426,22 @@ def test_trigger_node_dispatch_returns_event_payload_if_not_simulated(data_fixtu def test_trigger_node_dispatch_returns_sample_data_if_simulated(data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) - workflow = data_fixture.create_automation_workflow(state=WorkflowState.LIVE) - node = workflow.get_trigger().specific - service = node.service.specific - service.table = table - service.sample_data = {"data": {"foo": "bar"}} - service.save() - - dispatch_context = AutomationDispatchContext(workflow, simulate_until_node=node) + workflow = data_fixture.create_automation_workflow( + state=WorkflowState.LIVE, + trigger_service_kwargs={ + "table": table, + "sample_data": {"data": {"foo": "bar"}}, + }, + ) + + trigger = workflow.get_trigger() + + dispatch_context = AutomationDispatchContext(workflow, simulate_until_node=trigger) # If we don't reset this value, the trigger is considered as updatable and will # be dispatched. dispatch_context.update_sample_data_for = [] - result = node.get_type().dispatch(node, dispatch_context) + result = trigger.get_type().dispatch(workflow.get_trigger(), dispatch_context) assert result == DispatchResult(data={"foo": "bar"}, status=200, output_uid="") diff --git a/backend/tests/baserow/contrib/automation/nodes/test_trash_types.py b/backend/tests/baserow/contrib/automation/nodes/test_trash_types.py deleted file mode 100644 index 87356fc052..0000000000 --- a/backend/tests/baserow/contrib/automation/nodes/test_trash_types.py +++ /dev/null @@ -1,116 +0,0 @@ -import pytest - -from baserow.contrib.automation.nodes.trash_types import AutomationNodeTrashableItemType -from baserow.core.trash.exceptions import TrashItemRestorationDisallowed -from baserow.core.trash.handler import TrashHandler - - -@pytest.mark.django_db -def test_trashing_and_restoring_node_updates_next_node_values(data_fixture): - user = data_fixture.create_user() - workflow = data_fixture.create_automation_workflow(user=user) - trigger = workflow.get_trigger() - initial_router_service = data_fixture.create_core_router_service() - initial_router = data_fixture.create_core_router_action_node( - workflow=workflow, service=initial_router_service - ) - second_router_service = data_fixture.create_core_router_service() - second_router = data_fixture.create_core_router_action_node( - workflow=workflow, service=second_router_service - ) - initial_router_edge = data_fixture.create_core_router_service_edge( - label="To second router", - condition="'true'", - service=initial_router_service, - output_node=second_router, - ) - second_router.previous_node_output = str(initial_router_edge.uid) - second_router.save() - second_router_edge = data_fixture.create_core_router_service_edge( - label="To create row", - condition="'true'", - service=second_router_service, - ) - second_router_edge_output_node = workflow.automation_workflow_nodes.get( - previous_node_output=second_router_edge.uid - ) - - assert trigger.previous_node_id is None - assert trigger.previous_node_output == "" - - assert initial_router.previous_node_id == trigger.id - assert initial_router.previous_node_output == "" - - assert second_router.previous_node_id == initial_router.id - assert second_router.previous_node_output == str(initial_router_edge.uid) - - assert second_router_edge_output_node.previous_node_id == second_router.id - assert second_router_edge_output_node.previous_node_output == str( - second_router_edge.uid - ) - - automation = workflow.automation - trash_entry = TrashHandler.trash( - user, automation.workspace, automation, second_router - ) - assert trash_entry.additional_restoration_data == { - second_router_edge_output_node.id: { - "previous_node_output": str(second_router_edge.uid) - } - } - - second_router_edge_output_node.refresh_from_db() - - # We've trashed the second router, so that *output node* of - # the second router becomes the output node of the first router. - assert second_router_edge_output_node.previous_node_id == initial_router.id - assert second_router_edge_output_node.previous_node_output == str( - initial_router_edge.uid - ) - - TrashHandler.restore_item( - user, - AutomationNodeTrashableItemType.type, - second_router.id, - ) - - second_router.refresh_from_db() - assert second_router.previous_node_id == initial_router.id - assert second_router.previous_node_output == str(initial_router_edge.uid) - - second_router_edge_output_node.refresh_from_db() - assert second_router_edge_output_node.previous_node_id == second_router.id - assert second_router_edge_output_node.previous_node_output == str( - second_router_edge.uid - ) - - -@pytest.mark.django_db -def test_restoring_a_trashed_output_node_after_its_edge_is_destroyed_is_disallowed( - data_fixture, -): - user = data_fixture.create_user() - workflow = data_fixture.create_automation_workflow(user=user) - data_fixture.create_local_baserow_rows_created_trigger_node(workflow=workflow) - service = data_fixture.create_core_router_service() - data_fixture.create_core_router_action_node(service=service, workflow=workflow) - edge = data_fixture.create_core_router_service_edge( - service=service, label="Edge 1", condition="'false'" - ) - output_node = workflow.automation_workflow_nodes.get(previous_node_output=edge.uid) - - automation = workflow.automation - TrashHandler.trash(user, automation.workspace, automation, output_node) - - edge.delete() - - with pytest.raises(TrashItemRestorationDisallowed) as exc: - TrashHandler.restore_item( - user, - AutomationNodeTrashableItemType.type, - output_node.id, - ) - assert ( - exc.value.args[0] == "This automation node cannot " - "be restored as its branch has been deleted." - ) diff --git a/backend/tests/baserow/contrib/automation/test_automation_application_types.py b/backend/tests/baserow/contrib/automation/test_automation_application_types.py index a80a0f9986..cce17af40d 100644 --- a/backend/tests/baserow/contrib/automation/test_automation_application_types.py +++ b/backend/tests/baserow/contrib/automation/test_automation_application_types.py @@ -34,11 +34,7 @@ { "id": 1, "type": "rows_created", - "order": "1.00000000000000000000", "workflow_id": 1, - "parent_node_id": None, - "previous_node_id": None, - "previous_node_output": "", "service": { "id": 549, "integration_id": 1, @@ -49,11 +45,7 @@ { "id": 2, "type": "create_row", - "order": "2.00000000000000000000", "workflow_id": 1, - "parent_node_id": None, - "previous_node_id": 1, - "previous_node_output": "", "service": { "id": 550, "integration_id": 1, @@ -72,6 +64,7 @@ }, }, ], + "graph": {"0": 1, "1": {"next": {"": [2]}}, "2": {}}, } ], } @@ -82,7 +75,7 @@ def test_automation_export_serialized(data_fixture): user = data_fixture.create_user(email="test@baserow.io") automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow(user, automation=automation) - trigger = workflow.get_trigger(specific=False) + trigger = workflow.get_trigger() integration = trigger.service.specific.integration first_action = data_fixture.create_local_baserow_create_row_action_node( workflow=workflow @@ -117,11 +110,7 @@ def test_automation_export_serialized(data_fixture): "id": trigger.id, "label": trigger.label, "type": "rows_created", - "order": str(trigger.order), "workflow_id": trigger.workflow_id, - "parent_node_id": trigger.parent_node_id, - "previous_node_id": trigger.previous_node_id, - "previous_node_output": trigger.previous_node_output, "service": { "id": trigger.service_id, "integration_id": trigger.service.specific.integration_id, @@ -134,11 +123,7 @@ def test_automation_export_serialized(data_fixture): "id": first_action.id, "label": first_action.label, "type": "create_row", - "order": str(first_action.order), "workflow_id": first_action.workflow_id, - "parent_node_id": first_action.parent_node_id, - "previous_node_id": first_action.previous_node_id, - "previous_node_output": first_action.previous_node_output, "service": { "id": first_action.service_id, "integration_id": first_action.service.specific.integration_id, @@ -154,6 +139,11 @@ def test_automation_export_serialized(data_fixture): }, }, ], + "graph": { + "0": trigger.id, + str(trigger.id): {"next": {"": [first_action.id]}}, + str(first_action.id): {}, + }, } ], } @@ -208,13 +198,20 @@ def test_automation_application_import(data_fixture): workflow = automation.workflows.first() assert workflow.automation_workflow_nodes.count() == 2 - [trigger, action_node] = workflow.automation_workflow_nodes.order_by("order") + [trigger, action_node] = workflow.automation_workflow_nodes.order_by("id") assert isinstance(trigger.specific, LocalBaserowRowsCreatedTriggerNode) create_row_node = action_node.specific assert isinstance(create_row_node, LocalBaserowCreateRowActionNode) - assert create_row_node.previous_node_id == trigger.id + + workflow.assert_reference( + { + "0": "rows_created", + "rows_created": {"next": {"": ["create_row"]}}, + "create_row": {}, + } + ) # Make sure the table/integration migrated properly. create_row_service = create_row_node.service.specific diff --git a/backend/tests/baserow/contrib/automation/workflows/test_graph_handler.py b/backend/tests/baserow/contrib/automation/workflows/test_graph_handler.py new file mode 100644 index 0000000000..405c223d71 --- /dev/null +++ b/backend/tests/baserow/contrib/automation/workflows/test_graph_handler.py @@ -0,0 +1,825 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from baserow.contrib.automation.workflows.graph_handler import NodeGraphHandler + + +class FakeNode: + def __init__(self, nid): + self.id = int(nid) + + def __eq__(self, other): + if isinstance(other, int): + return self.id == other + else: + return self.id == other.id + + def __str__(self): + return f"Node {self.id}" + + def __repr__(self): + return f"FakeNode({self.id})" + + def get_label(self): + return str(self) + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "reference_node_id, position, output ,expected_result", + [ + (None, "south", "", "Node 1"), + (1, "south", "", "Node 2"), + (3, "south", "", "Node 4"), + (3, "child", "", "Node 7"), + (1, "child", "", None), + (4, "south", "", "Node 5"), + (4, "south", "randomUid", "Node 9"), + (9, "south", "randomUid", None), + (9, "south", "", None), + (9, "child", "", "Node 10"), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_get_node_at_position( + mock_get_nodes, reference_node_id, position, output, expected_result +): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "5": {"next": {"": [6]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "6": {}, + "7": {"next": {"": [8]}}, + "8": {}, + "9": {"children": [10]}, + "10": {}, + } + + mock_get_nodes.side_effect = lambda n: f"Node {n}" + + graph_handler = NodeGraphHandler(workflow) + + assert ( + graph_handler.get_node_at_position(reference_node_id, position, output) + == expected_result + ) + + +@pytest.mark.django_db +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_get_node_at_position_empty_graph(mock_get_nodes): + workflow = MagicMock() + workflow.graph = {} + + mock_get_nodes.side_effect = lambda n: f"Node {n}" + + graph_handler = NodeGraphHandler(workflow) + + assert graph_handler.get_node_at_position(None, "south", "") is None + + +@pytest.mark.django_db +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_get_last_position(mock_get_nodes): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "5": {"next": {"": [6]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "6": {}, # This is the last node + "7": {"next": {"": [8]}}, + "8": {}, + "9": {"children": [10]}, + "10": {}, + } + + mock_get_nodes.side_effect = lambda n: f"Node {n}" + + graph_handler = NodeGraphHandler(workflow) + + assert graph_handler.get_last_position() == ( + "Node 6", + "south", + "", + ) + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, expected_result", + [ + (1, (None, "south", "")), + (2, ("1", "south", "")), + (3, ("2", "south", "")), + (4, ("3", "south", "")), + (5, ("4", "south", "")), + (6, ("5", "south", "")), + (7, ("3", "child", "")), + (8, ("7", "south", "")), + (9, ("4", "south", "randomUid")), + (10, ("9", "child", "")), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_get_position(mock_get_nodes, node_id, expected_result): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "5": {"next": {"": [6]}}, + "6": {}, + "7": {"next": {"": [8]}}, + "8": {}, + "9": {"children": [10]}, + "10": {}, + } + + mock_get_nodes.side_effect = lambda n: f"Node {n}" + + graph_handler = NodeGraphHandler(workflow) + + node = MagicMock() + + node.id = node_id + + assert graph_handler.get_position(node) == expected_result + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, expected_result", + [ + (1, []), + (2, [(FakeNode(1), "south", "")]), + (3, [(FakeNode(1), "south", ""), (FakeNode(2), "south", "")]), + ( + 6, + [ + (FakeNode(1), "south", ""), + (FakeNode(2), "south", ""), + (FakeNode(3), "south", ""), + (FakeNode(4), "south", ""), + (FakeNode(5), "south", ""), + ], + ), + ( + 8, + [ + (FakeNode(1), "south", ""), + (FakeNode(2), "south", ""), + (FakeNode(3), "child", ""), + (FakeNode(7), "south", ""), + ], + ), + ( + 9, + [ + (FakeNode(1), "south", ""), + (FakeNode(2), "south", ""), + (FakeNode(3), "south", ""), + (FakeNode(4), "south", "randomUid"), + ], + ), + ( + 10, + [ + (FakeNode(1), "south", ""), + (FakeNode(2), "south", ""), + (FakeNode(3), "south", ""), + (FakeNode(4), "south", "randomUid"), + (FakeNode(9), "child", ""), + ], + ), + (11, None), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_get_previous_position(mock_get_nodes, node_id, expected_result): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "5": {"next": {"": [6]}}, + "6": {"next": {"": []}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {"children": [10]}, + "10": {}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + assert graph_handler.get_previous_positions(FakeNode(node_id)) == expected_result + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, output, expected_result", + [ + (1, "", [FakeNode(2)]), + (2, "", [FakeNode(3)]), + (4, None, [FakeNode(5), FakeNode(9)]), + (4, "", [FakeNode(5)]), + (4, "randomUid", [FakeNode(9)]), + (4, "missing", []), + (9, "", []), + (10, "", []), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_get_next_nodes(mock_get_nodes, node_id, output, expected_result): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "5": {"next": {"": [6]}}, + "6": {"next": {"": []}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {"children": [10]}, + "10": {}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + assert graph_handler.get_next_nodes(FakeNode(node_id), output) == expected_result + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, expected_result", + [ + (1, []), + (3, [FakeNode(7)]), + (8, []), + (9, [FakeNode(10)]), + (10, []), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_get_children(mock_get_nodes, node_id, expected_result): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "5": {"next": {"": [6]}}, + "6": {"next": {"": []}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {"children": [10]}, + "10": {}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + assert graph_handler.get_children(FakeNode(node_id)) == expected_result + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, reference_node_id, position, output, expected_result", + [ + ( + 11, + 1, + "south", + "", + { + "0": 1, + "1": {"next": {"": [11]}}, + "11": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 11, + 3, + "south", + "", + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [11]}}, + "11": {"next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 11, + 3, + "child", + "", + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [11], "next": {"": [4]}}, + "11": {"next": {"": [7]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "9": {}, + }, + ), + ( + 11, + 7, + "south", + "", + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [11]}}, + "8": {"children": []}, + "9": {}, + "11": {"next": {"": [8]}}, + }, + ), + ( + 11, + 9, + "south", + "", + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {"next": {"": [11]}}, + "11": {}, + }, + ), + ( + 11, + 4, + "south", + "randomUid", + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [11]}}, + "11": {"next": {"": [9]}}, + "9": {}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + }, + ), + ( + 11, + None, + "south", + "", + { + "0": 11, + "11": {"next": {"": [1]}}, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_insert( + mock_get_nodes, + node_id, + reference_node_id, + position, + output, + expected_result, +): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + graph_handler.insert( + FakeNode(node_id), + FakeNode(reference_node_id) if reference_node_id is not None else None, + position, + output, + ) + + assert graph_handler.graph == expected_result + + +@pytest.mark.django_dbw +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_insert_first_node( + mock_get_nodes, +): + workflow = MagicMock() + workflow.graph = {} + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + graph_handler.insert(FakeNode(1), None, "south", "") + + assert graph_handler.graph == {"1": {}, "0": 1} + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, expected_result", + [ + ( + 1, + { + "0": 2, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 2, + { + "0": 1, + "1": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 3, + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [4, 7]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 4, + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [5, 9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_remove( + mock_get_nodes, + node_id, + expected_result, +): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + graph_handler.remove( + FakeNode(node_id), + ) + + assert graph_handler.graph == expected_result + + +@pytest.mark.django_db +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_remove_last_node( + mock_get_nodes, +): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": []}}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + graph_handler.remove( + FakeNode(1), + ) + + assert graph_handler.graph == {} + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, replace_id, expected_result", + [ + ( + 1, + 11, + { + "0": 11, + "11": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 3, + 11, + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [11]}}, + "11": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 4, + 11, + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [11]}}, + "11": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_replace( + mock_get_nodes, + node_id, + replace_id, + expected_result, +): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + graph_handler.replace(FakeNode(node_id), FakeNode(replace_id)) + + assert graph_handler.graph == expected_result + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "node_id, reference_node_id, position, output, expected_result", + [ + ( + 2, + 3, + "south", + "", + { + "0": 1, + "1": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [2]}}, + "2": {"next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 2, + 3, + "child", + "", + { + "0": 1, + "1": {"next": {"": [3]}}, + "3": {"children": [2], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "2": {"next": {"": [7]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 2, + 4, + "south", + "randomUid", + { + "0": 1, + "1": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [2]}}, + "2": {"next": {"": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + }, + ), + ( + 7, + 3, + "south", + "randomUid", + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [8], "next": {"": [4], "randomUid": [7]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "9": {}, + "7": {}, + "8": {"children": []}, + }, + ), + ( + 8, + 9, + "south", + "anotherUid", + { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": []}}, + "8": {"children": []}, + "9": {"next": {"anotherUid": [8]}}, + }, + ), + ], +) +@patch("baserow.contrib.automation.workflows.graph_handler.NodeGraphHandler.get_node") +def test_graph_handler_move( + mock_get_nodes, + node_id, + reference_node_id, + position, + output, + expected_result, +): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + } + + mock_get_nodes.side_effect = FakeNode + + graph_handler = NodeGraphHandler(workflow) + + graph_handler.move( + FakeNode(node_id), + FakeNode(reference_node_id) if reference_node_id is not None else None, + position, + output, + ) + + assert graph_handler.graph == expected_result + + +@pytest.mark.django_db +def test_graph_handler_migrate(): + workflow = MagicMock() + workflow.graph = { + "0": 1, + "1": {"next": {"": [2]}}, + "2": {"next": {"": [3]}}, + "3": {"children": [7], "next": {"": [4]}}, + "4": {"next": {"": [5], "randomUid": [9]}}, + "7": {"next": {"": [8]}}, + "8": {"children": []}, + "9": {}, + } + + graph_handler = NodeGraphHandler(workflow) + + graph_handler.migrate_graph( + { + "automation_workflow_nodes": { + 1: 41, + 2: 42, + 3: 43, + 4: 44, + 5: 45, + 6: 46, + 7: 47, + 8: 48, + 9: 49, + }, + "automation_edge_outputs": {"randomUid": "anotherRandomUid"}, + } + ) + + assert graph_handler.graph == { + "0": 41, + "41": {"next": {"": [42]}}, + "42": {"next": {"": [43]}}, + "43": {"next": {"": [44]}, "children": [47]}, + "44": {"next": {"": [45], "anotherRandomUid": [49]}}, + "47": {"next": {"": [48]}}, + "48": {"children": []}, + "49": {}, + } diff --git a/backend/tests/baserow/contrib/automation/workflows/test_workflow_handler.py b/backend/tests/baserow/contrib/automation/workflows/test_workflow_handler.py index 44204d7e52..302bde2a78 100644 --- a/backend/tests/baserow/contrib/automation/workflows/test_workflow_handler.py +++ b/backend/tests/baserow/contrib/automation/workflows/test_workflow_handler.py @@ -13,7 +13,6 @@ CorePeriodicTriggerNodeType, LocalBaserowRowsCreatedNodeTriggerType, ) -from baserow.contrib.automation.nodes.types import AutomationNodeDict from baserow.contrib.automation.workflows.constants import ( ALLOW_TEST_RUN_MINUTES, WorkflowState, @@ -268,6 +267,36 @@ def test_duplicate_workflow(data_fixture): assert workflow_clone.order != workflow.order +@pytest.mark.django_db +def test_duplicate_workflow_with_nodes(data_fixture): + workflow = data_fixture.create_automation_workflow(name="test") + data_fixture.create_core_router_action_node_with_edges( + workflow=workflow, + reference_node=workflow.get_trigger(), + ) + + reference = { + "0": "rows_created", + "fallback node": {}, + "output edge 1": {}, + "output edge 2": {}, + "router": { + "next": { + "Default": ["fallback node"], + "Do that": ["output edge 2"], + "Do this": ["output edge 1"], + } + }, + "rows_created": {"next": {"": ["router"]}}, + } + + workflow.assert_reference(reference) + + workflow_clone = AutomationWorkflowHandler().duplicate_workflow(workflow) + + workflow_clone.assert_reference(reference) + + @pytest.mark.django_db def test_import_workflow_only(data_fixture): automation = data_fixture.create_automation_application() @@ -305,27 +334,6 @@ def test_export_prepared_values(data_fixture): } -def test_sort_serialized_nodes_by_priority(): - serialized_nodes = [ - AutomationNodeDict(id=1, parent_node_id=None, order=0), - AutomationNodeDict(id=2, parent_node_id=1, order=0), - AutomationNodeDict(id=3, parent_node_id=1, order=1), - AutomationNodeDict(id=4, parent_node_id=1, order=2), - AutomationNodeDict(id=5, parent_node_id=None, order=1), - AutomationNodeDict(id=6, parent_node_id=None, order=2), - ] - assert AutomationWorkflowHandler()._sort_serialized_nodes_by_priority( - serialized_nodes - ) == [ - AutomationNodeDict(id=1, parent_node_id=None, order=0), - AutomationNodeDict(id=5, parent_node_id=None, order=1), - AutomationNodeDict(id=6, parent_node_id=None, order=2), - AutomationNodeDict(id=2, parent_node_id=1, order=0), - AutomationNodeDict(id=3, parent_node_id=1, order=1), - AutomationNodeDict(id=4, parent_node_id=1, order=2), - ] - - @pytest.mark.django_db def test_publish_returns_published_workflow(data_fixture): workflow = data_fixture.create_automation_workflow() diff --git a/backend/tests/baserow/contrib/automation/workflows/test_workflow_runner.py b/backend/tests/baserow/contrib/automation/workflows/test_workflow_runner.py deleted file mode 100644 index ddd9bb76be..0000000000 --- a/backend/tests/baserow/contrib/automation/workflows/test_workflow_runner.py +++ /dev/null @@ -1,179 +0,0 @@ -import pytest - -from baserow.contrib.automation.automation_dispatch_context import ( - AutomationDispatchContext, -) -from baserow.contrib.automation.nodes.handler import AutomationNodeHandler -from baserow.contrib.automation.workflows.constants import WorkflowState - - -@pytest.mark.django_db -def test_run_workflow_with_create_row_action(data_fixture): - user = data_fixture.create_user() - workspace = data_fixture.create_workspace(user=user) - integration = data_fixture.create_local_baserow_integration(user=user) - database = data_fixture.create_database_application(workspace=workspace) - trigger_table = data_fixture.create_database_table(database=database) - action_table = data_fixture.create_database_table(database=database) - action_table_field = data_fixture.create_text_field(table=action_table) - workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger() - trigger_service = trigger.service.specific - trigger_service.table = trigger_table - trigger_service.integration = integration - trigger_service.save() - action_node = data_fixture.create_local_baserow_create_row_action_node( - workflow=workflow, - service=data_fixture.create_local_baserow_upsert_row_service( - table=action_table, - integration=integration, - ), - ) - action_node.service.field_mappings.create(field=action_table_field, value="'Horse'") - - action_table_model = action_table.get_model() - assert action_table_model.objects.count() == 0 - - dispatch_context = AutomationDispatchContext(workflow, {}) - AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) - - row = action_table_model.objects.first() - assert getattr(row, action_table_field.db_column) == "Horse" - assert dispatch_context.dispatch_history == [trigger.id, action_node.id] - - -@pytest.mark.django_db -def test_run_workflow_with_update_row_action(data_fixture): - user = data_fixture.create_user() - workspace = data_fixture.create_workspace(user=user) - integration = data_fixture.create_local_baserow_integration(user=user) - database = data_fixture.create_database_application(workspace=workspace) - trigger_table = data_fixture.create_database_table(database=database) - action_table = data_fixture.create_database_table(database=database) - action_table_field = data_fixture.create_text_field(table=action_table) - action_table_row = action_table.get_model().objects.create( - **{f"field_{action_table_field.id}": "Horse"} - ) - workflow = data_fixture.create_automation_workflow(user) - trigger = workflow.get_trigger() - trigger_service = trigger.service.specific - trigger_service.table = trigger_table - trigger_service.integration = integration - trigger_service.save() - action_node = data_fixture.create_local_baserow_update_row_action_node( - workflow=workflow, - service=data_fixture.create_local_baserow_upsert_row_service( - table=action_table, - integration=integration, - row_id=action_table_row.id, - ), - ) - action_node.service.field_mappings.create( - field=action_table_field, value="'Badger'" - ) - - dispatch_context = AutomationDispatchContext(workflow, {}) - AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) - - action_table_row.refresh_from_db() - assert getattr(action_table_row, action_table_field.db_column) == "Badger" - assert dispatch_context.dispatch_history == [trigger.id, action_node.id] - - -@pytest.mark.django_db -def test_run_workflow_with_delete_row_action(data_fixture): - user = data_fixture.create_user() - workspace = data_fixture.create_workspace(user=user) - integration = data_fixture.create_local_baserow_integration(user=user) - database = data_fixture.create_database_application(workspace=workspace) - trigger_table = data_fixture.create_database_table(database=database) - action_table = data_fixture.create_database_table(database=database) - action_table_field = data_fixture.create_text_field(table=action_table) - action_table_row = action_table.get_model().objects.create( - **{f"field_{action_table_field.id}": "Mouse"} - ) - workflow = data_fixture.create_automation_workflow( - user=user, state=WorkflowState.LIVE - ) - trigger = workflow.get_trigger() - trigger_service = trigger.service.specific - trigger_service.table = trigger_table - trigger_service.integration = integration - trigger_service.save() - action_node = data_fixture.create_local_baserow_delete_row_action_node( - workflow=workflow, - service=data_fixture.create_local_baserow_delete_row_service( - table=action_table, - integration=integration, - row_id=action_table_row.id, - ), - ) - - assert action_table.get_model().objects.all().count() == 1 - - dispatch_context = AutomationDispatchContext(workflow, {}) - AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) - - assert action_table.get_model().objects.all().count() == 0 - assert dispatch_context.dispatch_history == [trigger.id, action_node.id] - - -@pytest.mark.django_db -def test_run_workflow_with_router_action(data_fixture): - user = data_fixture.create_user() - workspace = data_fixture.create_workspace(user=user) - integration = data_fixture.create_local_baserow_integration(user=user) - database = data_fixture.create_database_application(workspace=workspace) - trigger_table = data_fixture.create_database_table(database=database) - workflow = data_fixture.create_automation_workflow( - user=user, state=WorkflowState.LIVE - ) - trigger = workflow.get_trigger() - trigger_service = trigger.service.specific - trigger_service.table = trigger_table - trigger_service.integration = integration - trigger_service.save() - router_service = data_fixture.create_core_router_service() - router_node = data_fixture.create_core_router_action_node( - workflow=workflow, service=router_service - ) - data_fixture.create_core_router_service_edge( - service=router_service, label="Edge 1", condition="'false'" - ) - - action_table = data_fixture.create_database_table(database=database) - action_table_field = data_fixture.create_text_field(table=action_table) - action_table_row = action_table.get_model().objects.create( - **{f"field_{action_table_field.id}": "Horse"} - ) - edge2_output_node = data_fixture.create_local_baserow_update_row_action_node( - workflow=workflow, - previous_node_id=router_node.id, - service=data_fixture.create_local_baserow_upsert_row_service( - table=action_table, - integration=integration, - row_id=action_table_row.id, - ), - ) - edge2_output_node.service.field_mappings.create( - field=action_table_field, value="'Badger'" - ) - edge2 = data_fixture.create_core_router_service_edge( - service=router_service, - label="Edge 2", - condition="'true'", - output_node=edge2_output_node, - ) - edge2_output_node.previous_node_output = edge2.uid - edge2_output_node.save() - - dispatch_context = AutomationDispatchContext(workflow, {}) - AutomationNodeHandler().dispatch_node(workflow.get_trigger(), dispatch_context) - - action_table_row.refresh_from_db() - assert getattr(action_table_row, action_table_field.db_column) == "Badger" - assert dispatch_context.dispatch_history == [ - trigger.id, - router_node.id, - edge2_output_node.id, - ] diff --git a/backend/tests/baserow/contrib/automation/workflows/test_workflow_service.py b/backend/tests/baserow/contrib/automation/workflows/test_workflow_service.py index 6da77ac4ae..60b6832eef 100644 --- a/backend/tests/baserow/contrib/automation/workflows/test_workflow_service.py +++ b/backend/tests/baserow/contrib/automation/workflows/test_workflow_service.py @@ -72,10 +72,10 @@ def test_create_workflow_user_not_in_workspace(data_fixture): def test_create_workflow(data_fixture): user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) + workflow = AutomationWorkflowService().create_workflow(user, automation.id, "foo") - assert workflow.automation_workflow_nodes.count() == 1 - node = workflow.automation_workflow_nodes.get().specific - assert node.get_type().is_workflow_trigger + + assert workflow.automation_workflow_nodes.count() == 0 @patch(f"{SERVICES_PATH}.automation_workflow_deleted") diff --git a/backend/tests/baserow/contrib/automation/workflows/test_workflow_tasks.py b/backend/tests/baserow/contrib/automation/workflows/test_workflow_tasks.py index 0b2c8bb5a4..1d60adf614 100644 --- a/backend/tests/baserow/contrib/automation/workflows/test_workflow_tasks.py +++ b/backend/tests/baserow/contrib/automation/workflows/test_workflow_tasks.py @@ -54,9 +54,6 @@ def test_run_workflow_dispatch_error_creates_workflow_history( ) published_workflow.automation.published_from = original_workflow published_workflow.automation.save() - data_fixture.create_local_baserow_rows_created_trigger_node( - workflow=published_workflow - ) mock_dispatch_node.side_effect = DispatchException("mock dispatch error") @@ -88,9 +85,6 @@ def test_run_workflow_unexpected_error_creates_workflow_history( ) published_workflow.automation.published_from = original_workflow published_workflow.automation.save() - data_fixture.create_local_baserow_rows_created_trigger_node( - workflow=published_workflow - ) mock_dispatch_node.side_effect = ValueError("mock unexpected error") @@ -147,9 +141,6 @@ def test_run_workflow_disables_workflow_if_too_many_errors( ) published_workflow.automation.published_from = original_workflow published_workflow.automation.save() - data_fixture.create_local_baserow_rows_created_trigger_node( - workflow=published_workflow - ) # The first 3 runs should just be an error for i in range(3): @@ -194,19 +185,19 @@ def test_run_workflow_disables_workflow_if_too_many_consecutive_errors( ) published_workflow.automation.published_from = original_workflow published_workflow.automation.save() - data_fixture.create_local_baserow_rows_created_trigger_node( - workflow=published_workflow - ) start_workflow_celery_task(published_workflow.id, False, None) mock_dispatch_node.assert_not_called() histories = AutomationWorkflowHistory.objects.filter(workflow=original_workflow) + assert len(histories) == 1 + history = histories[0] assert history.workflow == original_workflow assert history.status == "disabled" + error_msg = "mock too many errors" assert history.message == error_msg diff --git a/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py b/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py index d1a195c96d..1f2631c09b 100644 --- a/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py +++ b/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py @@ -30,7 +30,7 @@ def test_periodic_trigger_service_type_generate_schema(data_fixture): user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.LIVE + automation=automation, state=WorkflowState.LIVE, create_trigger=False ) trigger_node = data_fixture.create_periodic_trigger_node( workflow=workflow, @@ -52,7 +52,7 @@ def test_periodic_trigger_node_creation_and_property_updates(data_fixture): user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.LIVE + automation=automation, state=WorkflowState.LIVE, create_trigger=False ) node_handler = AutomationNodeHandler() @@ -107,7 +107,7 @@ def test_call_periodic_services_that_are_not_published( user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.DRAFT + automation=automation, state=WorkflowState.DRAFT, create_trigger=False ) data_fixture.create_periodic_trigger_node( workflow=workflow, @@ -134,7 +134,7 @@ def test_call_periodic_services_that_are_paused(mock_start_workflow, data_fixtur user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.PAUSED + automation=automation, state=WorkflowState.PAUSED, create_trigger=False ) data_fixture.create_periodic_trigger_node( workflow=workflow, @@ -161,7 +161,7 @@ def test_call_periodic_services_that_are_locked(mock_start_workflow, data_fixtur user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.LIVE + automation=automation, state=WorkflowState.LIVE, create_trigger=False ) trigger = data_fixture.create_periodic_trigger_node( workflow=workflow, @@ -195,7 +195,7 @@ def test_call_multiple_periodic_services_that_are_due( user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow_1 = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.LIVE + automation=automation, state=WorkflowState.LIVE, create_trigger=False ) data_fixture.create_periodic_trigger_node( workflow=workflow_1, @@ -205,7 +205,7 @@ def test_call_multiple_periodic_services_that_are_due( }, ) workflow_2 = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.LIVE + automation=automation, state=WorkflowState.LIVE, create_trigger=False ) data_fixture.create_periodic_trigger_node( workflow=workflow_2, @@ -599,7 +599,7 @@ def test_call_periodic_services_that_are_due( user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( - automation=automation, state=WorkflowState.LIVE + automation=automation, state=WorkflowState.LIVE, create_trigger=False ) trigger = data_fixture.create_periodic_trigger_node( workflow=workflow, diff --git a/backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py b/backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py new file mode 100644 index 0000000000..45a49d7cd7 --- /dev/null +++ b/backend/tests/baserow/contrib/integrations/core/test_iterator_service_type.py @@ -0,0 +1,74 @@ +import pytest + +from baserow.test_utils.helpers import AnyStr +from baserow.test_utils.pytest_conftest import FakeDispatchContext + + +@pytest.mark.django_db +def test_core_iterator_service_type_dispatch_data_simple_value(data_fixture): + service = data_fixture.create_core_iterator_service(source="get('test')") + + service_type = service.get_type() + dispatch_context = FakeDispatchContext() + + dispatch_result = service_type.dispatch(service, dispatch_context) + + assert dispatch_result.data == [2] + + +@pytest.mark.django_db +def test_core_iterator_service_type_dispatch_data_array(data_fixture): + service = data_fixture.create_core_iterator_service(source="get('array')") + + service_type = service.get_type() + dispatch_context = FakeDispatchContext( + context={"array": [{"test": "data"}, {"test": "data2"}]} + ) + + dispatch_result = service_type.dispatch(service, dispatch_context) + + assert dispatch_result.data == [{"test": "data"}, {"test": "data2"}] + + +@pytest.mark.django_db +def test_core_iterator_service_type_schema(data_fixture): + service = data_fixture.create_core_iterator_service( + sample_data={"data": [{"test": "data"}, {"test": "data2"}]} + ) + + service_type = service.get_type() + assert service_type.generate_schema(service) == { + "$schema": AnyStr(), + "title": AnyStr(), + "type": "array", + "items": { + "type": "object", + "properties": {"test": {"type": "string"}}, + "required": ["test"], + }, + } + + +@pytest.mark.django_db +def test_core_iterator_service_types_simple_schema(data_fixture): + service = data_fixture.create_core_iterator_service( + sample_data={"data": ["string"]} + ) + + service_type = service.get_type() + assert service_type.generate_schema(service) == { + "$schema": AnyStr(), + "title": AnyStr(), + "type": "array", + "items": { + "type": "string", + }, + } + + +@pytest.mark.django_db +def test_core_iterator_service_type_empty_schema(data_fixture): + service = data_fixture.create_core_iterator_service(sample_data={"data": []}) + + service_type = service.get_type() + assert service_type.generate_schema(service) is None diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 089ef78a40..a66db0347a 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -237,27 +237,6 @@ services: networks: local: - embeddings: - build: - context: ./embeddings - dockerfile: Dockerfile - ports: - - "${HOST_PUBLISH_IP:-127.0.0.1}:7999:80" - networks: - local: - restart: unless-stopped - healthcheck: - test: - [ - "CMD", - "python", - "-c", - "import requests; requests.get('http://localhost/health').raise_for_status()", - ] - interval: 1m30s - timeout: 10s - retries: 3 - # When switching between dev and local the media files in the media volume will be # owned by different users. Ensure that we chown them to the user appropriate for the # environment here. diff --git a/e2e-tests/fixtures/automation/automationNode.ts b/e2e-tests/fixtures/automation/automationNode.ts index 61ae23a97f..0106794af7 100644 --- a/e2e-tests/fixtures/automation/automationNode.ts +++ b/e2e-tests/fixtures/automation/automationNode.ts @@ -1,5 +1,5 @@ -import { getClient } from "../../client" -import { AutomationWorkflow } from "./automationWorkflow" +import { getClient } from "../../client"; +import { AutomationWorkflow } from "./automationWorkflow"; export class AutomationNode { constructor( @@ -12,16 +12,17 @@ export class AutomationNode { export async function createAutomationNode( workflow: AutomationWorkflow, nodeType: string, + referenceNodeId: number | null = null, + position: string = "south", + output: string = "" ): Promise { - const response: any = await getClient(workflow.automation.workspace.user).post( - `automation/workflow/${workflow.id}/nodes/`, - { - type: nodeType, - } - ) - return new AutomationNode( - response.data.id, - nodeType, - workflow, - ) + const response: any = await getClient( + workflow.automation.workspace.user + ).post(`automation/workflow/${workflow.id}/nodes/`, { + type: nodeType, + reference_node_id: referenceNodeId, + position, + output, + }); + return new AutomationNode(response.data.id, nodeType, workflow); } diff --git a/e2e-tests/tests/automation/automation.spec.ts b/e2e-tests/tests/automation/automation.spec.ts index 9bbfeeaa96..578e1fc421 100644 --- a/e2e-tests/tests/automation/automation.spec.ts +++ b/e2e-tests/tests/automation/automation.spec.ts @@ -22,12 +22,10 @@ test.describe("Automation application test suite", () => { "Ensure the default Workflow has been created and is visible." ).toBeVisible(); - const createNodeButton = page.getByRole("button", { - name: "Create automation node", - }); + const chooseTriggerTitle = page.getByText("Choose an event..."); await expect( - createNodeButton, - "Ensure the button to create a node is visible." + chooseTriggerTitle, + "Ensure the trigger chooser is visible." ).toBeVisible(); }); @@ -51,12 +49,10 @@ test.describe("Automation application test suite", () => { "Ensure the default Workflow has been created and is visible." ).toBeVisible(); - const createNodeButton = page.getByRole("button", { - name: "Create automation node", - }); + const chooseTriggerTitle = page.getByText("Choose an event..."); await expect( - createNodeButton, - "Ensure the button to create a node is visible." + chooseTriggerTitle, + "Ensure the trigger chooser is visible." ).toBeVisible(); }); }); diff --git a/e2e-tests/tests/automation/automationNode.spec.ts b/e2e-tests/tests/automation/automationNode.spec.ts index fac24282f8..edd5b0cb89 100644 --- a/e2e-tests/tests/automation/automationNode.spec.ts +++ b/e2e-tests/tests/automation/automationNode.spec.ts @@ -3,8 +3,17 @@ import { expect, test } from "../baserowTest"; import { createAutomationNode } from "../../fixtures/automation/automationNode"; test.describe("Automation node test suite", () => { - test.beforeEach(async ({ automationWorkflowPage }) => { + let trigger; + test.beforeEach(async ({ automationWorkflowPage, page }) => { await automationWorkflowPage.goto(); + + trigger = await createAutomationNode( + automationWorkflowPage.automationWorkflow, + "periodic" + ); + + const startsWhen = page.getByText("Configure"); + await expect(startsWhen).toBeVisible(); }); test("Can create an automation node", async ({ page }) => { @@ -28,15 +37,14 @@ test.describe("Automation node test suite", () => { page, automationWorkflowPage, }) => { - await createAutomationNode( + const createNode = await createAutomationNode( automationWorkflowPage.automationWorkflow, - "create_row" + "create_row", + trigger.id, + "south", + "" ); - // TODO: Remove this manual reload once real-time events have been - // implemented for automations. - await page.reload(); - const nodeDiv = page.getByRole("heading", { name: "Create a row", level: 1, @@ -46,12 +54,14 @@ test.describe("Automation node test suite", () => { // Let's select the node await nodeDiv.click(); + await page.locator(".vue-flow__controls-fitview").click(); + const nodeMenuButton = page .locator(".workflow-node-content--selected") .getByRole("button", { name: "Node options" }); await nodeMenuButton.click(); - const deleteNodeButton = page.getByRole("button", { name: "Delete" }); + const deleteNodeButton = page.getByText("Delete"); await deleteNodeButton.waitFor({ state: "visible" }); deleteNodeButton.click(); diff --git a/e2e-tests/tests/automation/automationWorkflow.spec.ts b/e2e-tests/tests/automation/automationWorkflow.spec.ts index ed9de8816f..646f6fd4ad 100644 --- a/e2e-tests/tests/automation/automationWorkflow.spec.ts +++ b/e2e-tests/tests/automation/automationWorkflow.spec.ts @@ -26,12 +26,10 @@ test.describe("Automation workflow test suite", () => { "Ensure the default automation name is displayed in the sidebar." ).toBeVisible(); - const createNodeButton = page.getByRole("button", { - name: "Create automation node", - }); + const chooseTriggerTitle = page.getByText("Choose an event..."); await expect( - createNodeButton, - "Ensure the button to create a node is visible." + chooseTriggerTitle, + "Ensure the trigger chooser is visible." ).toBeVisible(); }); @@ -55,12 +53,10 @@ test.describe("Automation workflow test suite", () => { "Ensure the duplicated workflow is displayed in the sidebar." ).toBeVisible(); - const createNodeButton = page.getByRole("button", { - name: "Create automation node", - }); + const chooseTriggerTitle = page.getByText("Choose an event..."); await expect( - createNodeButton, - "Ensure the button to create a node is visible." + chooseTriggerTitle, + "Ensure the trigger chooser is visible." ).toBeVisible(); }); @@ -85,7 +81,7 @@ test.describe("Automation workflow test suite", () => { await editable.fill(newWorkflowName); // Click outside to cause a blur event so that the name is saved - await page.locator("body").click(); + await page.locator("body").click({ position: { x: 10, y: 10 } }); const workflowLink = page.getByRole("link", { name: "Workflow" }); await expect( @@ -93,12 +89,10 @@ test.describe("Automation workflow test suite", () => { "Ensure the renamed workflow is displayed in the sidebar." ).toBeVisible(); - const createNodeButton = page.getByRole("button", { - name: "Create automation node", - }); + const chooseTriggerTitle = page.getByText("Choose an event..."); await expect( - createNodeButton, - "Ensure the button to create a node is visible." + chooseTriggerTitle, + "Ensure the trigger chooser is visible." ).toBeVisible(); }); @@ -110,7 +104,9 @@ test.describe("Automation workflow test suite", () => { await workflow.hover(); await page.locator(".tree__sub > .tree__options").first().click(); - const deleteLink = await page.locator(".context__menu").getByText("Delete"); + const deleteLink = await page + .locator(".context__menu") + .getByText("Delete", { exact: true }); await deleteLink.click(); await expect(deleteLink).toBeHidden(); diff --git a/web-frontend/modules/automation/components/AutomationBuilderFormulaInput.vue b/web-frontend/modules/automation/components/AutomationBuilderFormulaInput.vue index 65ac6297a6..57dac950d4 100644 --- a/web-frontend/modules/automation/components/AutomationBuilderFormulaInput.vue +++ b/web-frontend/modules/automation/components/AutomationBuilderFormulaInput.vue @@ -18,7 +18,7 @@ import { inject, computed, useContext } from '@nuxtjs/composition-api' import FormulaInputField from '@baserow/modules/core/components/formula/FormulaInputField' const props = defineProps({ - value: { type: Object, required: false, default: () => ({}) }, + value: { type: [Object, String], required: false, default: () => ({}) }, dataProvidersAllowed: { type: Array, required: false, default: () => [] }, }) diff --git a/web-frontend/modules/automation/components/AutomationHeader.vue b/web-frontend/modules/automation/components/AutomationHeader.vue index 6290c47e64..7d56623419 100644 --- a/web-frontend/modules/automation/components/AutomationHeader.vue +++ b/web-frontend/modules/automation/components/AutomationHeader.vue @@ -87,6 +87,7 @@ :icon="testRunEnabled ? 'iconoir-cancel' : 'iconoir-play'" type="secondary" data-highlight="automation-test-run" + :disabled="testRunDisabled" @click="toggleTestRun" >{{ testRunEnabled @@ -116,6 +117,8 @@ import { HistoryEditorSidePanelType } from '@baserow/modules/automation/editorSi import { notifyIf } from '@baserow/modules/core/utils/error' import { WORKFLOW_STATES } from '@baserow/modules/automation/components/enums' +import NodeGraphHandler from '@baserow/modules/automation/utils/nodeGraphHandler' + export default defineComponent({ name: 'AutomationHeader', components: {}, @@ -144,6 +147,10 @@ export default defineComponent({ } }) + const testRunDisabled = computed(() => { + return !new NodeGraphHandler(workflow.value).hasNodes() + }) + const testRunEnabled = computed(() => { return ( moment(workflow.value?.allow_test_run_until).isAfter() || @@ -274,6 +281,7 @@ export default defineComponent({ selectedWorkflow, workflow, activeSidePanel, + testRunDisabled, } }, }) diff --git a/web-frontend/modules/automation/components/form/SimulateDispatchNodeForm.vue b/web-frontend/modules/automation/components/form/SimulateDispatchNodeForm.vue index 01683a43da..ff19130841 100644 --- a/web-frontend/modules/automation/components/form/SimulateDispatchNodeForm.vue +++ b/web-frontend/modules/automation/components/form/SimulateDispatchNodeForm.vue @@ -10,8 +10,8 @@ {{ buttonLabel }} -
- {{ nodeIsInError }} +
+ {{ cantBeTestedReason }}
@@ -59,6 +59,7 @@ import SampleDataModal from '@baserow/modules/automation/components/sidebar/Samp const { app } = useContext() const store = useStore() +const automation = inject('automation') const workflow = inject('workflow') const sampleDataModalRef = ref(null) @@ -90,35 +91,50 @@ const isLoading = computed(() => { return queryInProgress.value || isSimulatingThisNode.value }) +const nodeType = computed(() => app.$registry.get('node', props.node.type)) + +const sampleData = computed(() => { + const sample = nodeType.value.getSampleData(props.node) + if (nodeType.value.serviceType.returnsList && sample) { + return sample.results + } + return sample +}) + +const hasSampleData = computed(() => { + return Boolean(sampleData.value) +}) + /** * All previous nodes must have been tested, i.e. they must have sample * data and shouldn't be in error. */ -const nodeIsInError = computed(() => { - const nodeType = app.$registry.get('node', props.node.type) - - if (nodeType.isInError({ service: props.node.service })) { +const cantBeTestedReason = computed(() => { + if (nodeType.value.isInError({ service: props.node.service })) { return app.i18n.t('simulateDispatch.errorNodeNotConfigured') } - let currentNode = workflow.value.orderedNodes.find( - (node) => node.id === props.node.previous_node_id - ) - - while (currentNode) { - const nodeType = app.$registry.get('node', currentNode.type) + const previousNodes = store.getters[ + 'automationWorkflowNode/getPreviousNodes' + ](workflow.value, props.node) - if (nodeType.isInError({ service: currentNode.service })) { - return app.i18n.t('simulateDispatch.errorPreviousNodeNotConfigured') + for (const previousNode of previousNodes) { + const previousNodeType = app.$registry.get('node', previousNode.type) + const nodeLabel = previousNodeType.getLabel({ + automation: automation.value, + node: previousNode, + }) + if (previousNodeType.isInError(previousNode)) { + return app.i18n.t('simulateDispatch.errorPreviousNodeNotConfigured', { + node: nodeLabel, + }) } - if (!currentNode.service?.sample_data) { - return app.i18n.t('simulateDispatch.errorPreviousNodesNotTested') + if (!previousNodeType.getSampleData(previousNode)) { + return app.i18n.t('simulateDispatch.errorPreviousNodesNotTested', { + node: nodeLabel, + }) } - - currentNode = workflow.value.orderedNodes.find( - (node) => node.id === currentNode.previous_node_id - ) } return '' @@ -126,7 +142,7 @@ const nodeIsInError = computed(() => { const isDisabled = computed(() => { return ( - Boolean(nodeIsInError.value) || + Boolean(cantBeTestedReason.value) || (isSimulating.value && !isSimulatingThisNode.value) ) }) @@ -141,14 +157,6 @@ const sampleDataModalTitle = computed(() => { }) }) -const sampleData = computed(() => { - return props.node.service.sample_data?.data -}) - -const hasSampleData = computed(() => { - return Boolean(sampleData.value) -}) - const buttonLabel = computed(() => { return hasSampleData.value ? app.i18n.t('simulateDispatch.buttonLabelTestAgain') @@ -156,7 +164,7 @@ const buttonLabel = computed(() => { }) const showTestNodeDescription = computed(() => { - if (Boolean(nodeIsInError.value) || hasSampleData.value) { + if (Boolean(cantBeTestedReason.value) || hasSampleData.value) { return false } diff --git a/web-frontend/modules/automation/components/workflow/WorkflowAddNodeMenu.vue b/web-frontend/modules/automation/components/workflow/WorkflowAddNodeMenu.vue new file mode 100644 index 0000000000..19ee12b971 --- /dev/null +++ b/web-frontend/modules/automation/components/workflow/WorkflowAddNodeMenu.vue @@ -0,0 +1,82 @@ + + + diff --git a/web-frontend/modules/automation/components/workflow/WorkflowConnector.vue b/web-frontend/modules/automation/components/workflow/WorkflowConnector.vue index 928b8a481a..884705bef1 100644 --- a/web-frontend/modules/automation/components/workflow/WorkflowConnector.vue +++ b/web-frontend/modules/automation/components/workflow/WorkflowConnector.vue @@ -46,7 +46,7 @@ const pathData = computed(() => { const localRadius = props.radius - if (localEndX === localStartX) { + if (Math.abs(localEndX - localStartX) < 5) { localStartX += 1 // Let's go straight as it's a simple line. return `M ${localStartX},${localStartY} L ${localStartX},${localEndY}` @@ -83,7 +83,6 @@ const viewBox = computed( const svgStyle = computed(() => ({ position: 'absolute', left: `${bounds.value.left}px`, - top: `${bounds.value.top}px`, width: `${bounds.value.width}px`, height: `${bounds.value.height}px`, pointerEvents: 'none', diff --git a/web-frontend/modules/automation/components/workflow/WorkflowEdge.vue b/web-frontend/modules/automation/components/workflow/WorkflowEdge.vue index d01b47a44a..0b80a1ed4a 100644 --- a/web-frontend/modules/automation/components/workflow/WorkflowEdge.vue +++ b/web-frontend/modules/automation/components/workflow/WorkflowEdge.vue @@ -1,22 +1,23 @@ @@ -40,6 +52,9 @@ import { Controls } from '@vue2-flow/controls' import { ref, watch, toRefs, onMounted } from 'vue' import { inject, computed } from '@nuxtjs/composition-api' import WorkflowNode from '@baserow/modules/automation/components/workflow/WorkflowNode' +import WorkflowAddNodeMenu from '@baserow/modules/automation/components/workflow/WorkflowAddNodeMenu' +import debounce from 'lodash.debounce' +import NodeGraphHandler from '@baserow/modules/automation/utils/nodeGraphHandler' const props = defineProps({ nodes: { @@ -68,8 +83,10 @@ const panOnScroll = ref(true) const zoomOnDoubleClick = ref(false) const updateKey = ref(1) +const workflow = inject('workflow') + const trigger = computed(() => { - return props.nodes.find((node) => node.previous_node_id === null) + return new NodeGraphHandler(workflow.value).getFirstNode() }) const vueFlowNodes = computed(() => { @@ -90,24 +107,39 @@ const computedNodes = computed(() => { return props.nodes }) +const triggerUpdate = debounce(() => { + updateKey.value += 1 +}, 500) + +const currentGraph = computed(() => workflow.value.graph) + /** - * This watcher is used to force the update the workflow graph when nodes are updated. - * Vue-flow prevents the update somehow. + * These watchers are used to force the update the workflow graph when nodes are updated. + * Vue-flow prevents the natural update. */ watch( computedNodes, () => { - updateKey.value += 1 + triggerUpdate() }, { deep: true } ) +watch( + currentGraph, + () => { + updateKey.value += 1 + }, + { deep: false } +) /** * When the component is mounted, we emit the first node's ID. This is * to ensure that the first node (the trigger) is selected by default. */ onMounted(() => { - emit('input', props.nodes[0].id) + if (props.nodes.length) { + emit('input', props.nodes[0].id) + } }) /** diff --git a/web-frontend/modules/automation/components/workflow/WorkflowNode.vue b/web-frontend/modules/automation/components/workflow/WorkflowNode.vue index edf8dc3560..88ce4e5cca 100644 --- a/web-frontend/modules/automation/components/workflow/WorkflowNode.vue +++ b/web-frontend/modules/automation/components/workflow/WorkflowNode.vue @@ -4,7 +4,7 @@ ref="nodeComponent" :node="node" :data-highlight=" - node.previous_node_id === null + nodeType.is_workflow_trigger ? 'automation-trigger' : 'automation-action' " @@ -15,32 +15,59 @@ @remove-node="emit('remove-node', $event)" @replace-node="emit('replace-node', $event)" /> - +
+
+
+ +
+
+ +
+
+
+
+ +
-
- -
+ :node="node" + :edge-uid="edge.uid" + :edge-label="edge.label" + :has-siblings="nodeEdges.length > 1" + :selected-node-id="selectedNodeId" + :debug="debug" + :read-only="readOnly" + @add-node="emit('add-node', $event)" + @select-node="emit('select-node', $event)" + @remove-node="emit('remove-node', $event)" + @replace-node="emit('replace-node', $event)" + @move-node="emit('move-node', $event)" + />
@@ -56,6 +83,8 @@ import WorkflowNodeContent from '@baserow/modules/automation/components/workflow import WorkflowEdge from '@baserow/modules/automation/components/workflow/WorkflowEdge' import WorkflowConnector from '@baserow/modules/automation/components/workflow/WorkflowConnector' +const connectorHeight = 32 + const props = defineProps({ node: { type: Object, @@ -90,11 +119,24 @@ const instance = getCurrentInstance() const refs = instance.proxy.$refs const workflowNode = ref() +const children = ref() const nodeComponent = ref() const nodeType = computed(() => app.$registry.get('node', props.node.type)) const nodeEdges = computed(() => nodeType.value.getEdges({ node: props.node })) +const hasMultipleEdges = computed(() => nodeEdges.value.length > 1) + +const computeEdgeCoords = (wrapper, edgeElt, multiple = false) => { + const startX = edgeElt.offsetLeft + edgeElt.offsetWidth / 2 + const endX = wrapper.offsetWidth / 2 + + const startY = multiple ? connectorHeight * 2 : connectorHeight + const endY = 0 + + return { startX, endX, startY, endY } +} + /** * Compute all connector coordinates per edge */ @@ -103,16 +145,21 @@ const coordsPerEdge = computed(() => { return nodeEdges.value.map((edge) => { const wrap = workflowNode.value - const elt = nodeComponent.value.$el + const edgeElt = refs[`edge-${edge.uid}`][0].$el - const edgeElt = refs[`edge-${edge.uid}`][0] + return [edge.uid, computeEdgeCoords(wrap, edgeElt, hasMultipleEdges.value)] + }) +}) - const startX = edgeElt.offsetLeft + edgeElt.offsetWidth / 2 - const startY = elt.offsetHeight + 40 - const endX = wrap.offsetWidth / 2 - const endY = elt.offsetHeight +const childEdgeCoords = computed(() => { + if (nodeType.value.isContainer) { + if (!children.value) return { startX: 0, startY: 0, endX: 0, endY: 0 } - return [edge.uid, { startX, startY, endY, endX }] - }) + const wrap = children.value + const edgeElt = refs['child-edge'].$el + + return computeEdgeCoords(wrap, edgeElt) + } + return null }) diff --git a/web-frontend/modules/automation/components/workflow/WorkflowNodeContent.vue b/web-frontend/modules/automation/components/workflow/WorkflowNodeContent.vue index 5664019952..602083e648 100644 --- a/web-frontend/modules/automation/components/workflow/WorkflowNodeContent.vue +++ b/web-frontend/modules/automation/components/workflow/WorkflowNodeContent.vue @@ -1,106 +1,101 @@ @@ -110,7 +105,7 @@ import { useVueFlow } from '@vue2-flow/core' import { useStore, useContext, inject, computed } from '@nuxtjs/composition-api' import WorkflowNodeContext from '@baserow/modules/automation/components/workflow/WorkflowNodeContext' import flushPromises from 'flush-promises' -import { CoreRouterNodeType } from '@baserow/modules/automation/nodeTypes' +import NodeGraphHandler from '@baserow/modules/automation/utils/nodeGraphHandler' const { onMove } = useVueFlow() const props = defineProps({ @@ -229,11 +224,7 @@ const isInteractionReady = computed(() => { */ const displayLabel = computed(() => { return props.debug - ? app.i18n.t('workflowNode.displayLabelDebug', { - id: props.node.id, - previousNodeId: props.node.previous_node_id || 'none', - outputUid: props.node.previous_node_output || 'none', - }) + ? `ID: ${props.node.id}` : nodeType.value.getLabel({ automation: automation.value, node: props.node, @@ -267,30 +258,23 @@ const getDeleteErrorMessage = computed(() => { /** * This computed property determines the label that should be displayed - * before the node label in the workflow editor. It checks the previous node - * in the workflow to determine if it is a router node or if the current node - * is an output node. Based on these conditions, it returns the appropriate - * label for the node. - * @returns {string} - The label to display before the node label. + * before the node label in the workflow editor. + * @returns {string} - The label to display before the node. */ const getDataBeforeLabel = computed(() => { - const previousNode = store.getters['automationWorkflowNode/getPreviousNode']( - workflow.value, - props.node - ) - // TODO use a generic way to handle that not specific to router node - const previousNodeIsRouter = - previousNode?.type === CoreRouterNodeType.getType() - const isOutputNode = props.node.previous_node_output.length > 0 - switch (true) { - case nodeType.value.isTrigger: - return app.i18n.t('workflowNode.beforeLabelTrigger') - case isOutputNode: - return app.i18n.t('workflowNode.beforeLabelCondition') - case previousNodeIsRouter && !isOutputNode: - return app.i18n.t('workflowNode.beforeLabelConditionDefault') - default: - return app.i18n.t('workflowNode.beforeLabelAction') + const [referenceNode, position, output] = new NodeGraphHandler( + workflow.value + ).getNodePosition(props.node) + + if (referenceNode === null) { + return app.i18n.t('workflowNode.beforeLabelTrigger') } + const referenceNodeType = app.$registry.get('node', referenceNode.type) + return referenceNodeType.getBeforeLabel({ + workflow: workflow.value, + node: referenceNode, + position, + output, + }) }) diff --git a/web-frontend/modules/automation/components/workflow/WorkflowNodeContext.vue b/web-frontend/modules/automation/components/workflow/WorkflowNodeContext.vue index a64302b5b8..f2a92056f9 100644 --- a/web-frontend/modules/automation/components/workflow/WorkflowNodeContext.vue +++ b/web-frontend/modules/automation/components/workflow/WorkflowNodeContext.vue @@ -1,42 +1,19 @@ diff --git a/web-frontend/modules/automation/dataProviderTypes.js b/web-frontend/modules/automation/dataProviderTypes.js index f1a5a0b47d..9e2a8ce1c0 100644 --- a/web-frontend/modules/automation/dataProviderTypes.js +++ b/web-frontend/modules/automation/dataProviderTypes.js @@ -23,7 +23,9 @@ export class PreviousNodeDataProviderType extends DataProviderType { const previousNodes = this.app.store.getters[ 'automationWorkflowNode/getPreviousNodes' - ](workflow, currentNode) + ](workflow, currentNode, { + predicate: (referenceNode, position, output) => position !== 'child', + }) const previousNodeSchema = _.chain(previousNodes) // Retrieve the associated schema for each node @@ -41,12 +43,17 @@ export class PreviousNodeDataProviderType extends DataProviderType { .flatMap((previousNodes) => previousNodes.map(([previousNode, schema], index) => [ previousNode.id, - { ...schema, title: `${schema.title} ${index ? index + 1 : ''}` }, + { + ...schema, + title: `${schema.title}${index ? ` ${index + 1}` : ''}`, + order: index, + }, ]) ) // Create the schema object .fromPairs() .value() + return { type: 'object', properties: previousNodeSchema } } @@ -67,3 +74,87 @@ export class PreviousNodeDataProviderType extends DataProviderType { return super.getPathTitle(applicationContext, pathParts) } } + +export class CurrentIterationDataProviderType extends DataProviderType { + static getType() { + return 'current_iteration' + } + + get name() { + return this.app.i18n.t('dataProviderType.currentIteration') + } + + getNodeSchema({ automation, node }) { + if (node?.type) { + const nodeType = this.app.$registry.get('node', node.type) + return nodeType.getDataSchema({ automation, node }) + } + return null + } + + getDataSchema(applicationContext) { + const { automation, workflow, node: currentNode } = applicationContext + + const ancestors = this.app.store.getters[ + 'automationWorkflowNode/getAncestors' + ](workflow, currentNode) + + const ancestorsSchema = _.chain(ancestors) + // Retrieve the associated schema for each node + .map((ancestor, index) => { + const schema = this.getNodeSchema({ automation, node: ancestor }) + if (!schema) { + return [ancestor, null] + } + return [ + ancestor, + { + title: schema.title, + order: index, + type: 'object', + properties: { + item: { + ...schema.items, + title: this.app.i18n.t('dataProviderType.item'), + }, + index: { type: 'number', title: 'index' }, + }, + }, + ] + }) + // Remove nodes without schema + .filter(([_, schema]) => schema) + // Add an index number to the schema title for each node of the same + // schema title. For example if we have two "Create a row in Customers" + // nodes, then the schema titles will be: + // [Create a row in Customers, Create a row in Customers 2] + .groupBy('1.title') + .flatMap((ancestors) => + ancestors.map(([previousNode, schema], index) => [ + previousNode.id, + { ...schema, title: `${schema.title} ${index ? index + 1 : ''}` }, + ]) + ) + // Create the schema object + .fromPairs() + .value() + return { type: 'object', properties: ancestorsSchema } + } + + getPathTitle(applicationContext, pathParts) { + if (pathParts.length === 2) { + const workflow = applicationContext?.workflow + const nodeId = parseInt(pathParts[1]) + + const node = this.app.store.getters['automationWorkflowNode/findById']( + workflow, + nodeId + ) + + if (!node) { + return `node_${nodeId}` + } + } + return super.getPathTitle(applicationContext, pathParts) + } +} diff --git a/web-frontend/modules/automation/enums.js b/web-frontend/modules/automation/enums.js index bb1718506e..76389f9415 100644 --- a/web-frontend/modules/automation/enums.js +++ b/web-frontend/modules/automation/enums.js @@ -1,4 +1,7 @@ -import { PreviousNodeDataProviderType } from '@baserow/modules/automation/dataProviderTypes' +import { + PreviousNodeDataProviderType, + CurrentIterationDataProviderType, +} from '@baserow/modules/automation/dataProviderTypes' /** * A list of all the data providers that can be used to configure automation nodes. @@ -6,5 +9,6 @@ import { PreviousNodeDataProviderType } from '@baserow/modules/automation/dataPr * @type {String[]} */ export const DATA_PROVIDERS_ALLOWED_NODE_ACTIONS = [ + CurrentIterationDataProviderType.getType(), PreviousNodeDataProviderType.getType(), ] diff --git a/web-frontend/modules/automation/guidedTourTypes.js b/web-frontend/modules/automation/guidedTourTypes.js index c5254fb1ed..40f419fdf4 100644 --- a/web-frontend/modules/automation/guidedTourTypes.js +++ b/web-frontend/modules/automation/guidedTourTypes.js @@ -21,57 +21,25 @@ class WelcomeGuidedTourStep extends GuidedTourStep { } } -class TriggerGuidedTourStep extends GuidedTourStep { +class GraphGuidedTourStep extends GuidedTourStep { get title() { - return this.app.i18n.t('triggerGuidedTourStep.title') + return this.app.i18n.t('graphGuidedTourStep.title') } get content() { - return this.app.i18n.t('triggerGuidedTourStep.content') + return this.app.i18n.t('graphGuidedTourStep.content') } get selectors() { - return ['[data-highlight="automation-trigger"]'] + return ['.workflow-editor'] } get position() { - return 'bottom-center' - } -} - -class ActionGuidedTourStep extends GuidedTourStep { - get title() { - return this.app.i18n.t('actionGuidedTourStep.title') - } - - get content() { - return this.app.i18n.t('actionGuidedTourStep.content') - } - - get selectors() { - return ['[data-highlight="automation-add-node-btn"]'] - } - - get position() { - return 'bottom-center' - } -} - -class NodeSidepanelGuidedTourStep extends GuidedTourStep { - get title() { - return this.app.i18n.t('nodeSidepanelGuidedTourStep.title') - } - - get content() { - return this.app.i18n.t('nodeSidepanelGuidedTourStep.content') - } - - get selectors() { - return ['[data-highlight="automation-node-sidepanel"]'] + return 'center' } - get position() { - return 'left-top' + get highlightPadding() { + return 0 } } @@ -173,9 +141,7 @@ export class AutomationGuidedTourType extends GuidedTourType { get steps() { return [ new WelcomeGuidedTourStep(this.app), - new TriggerGuidedTourStep(this.app), - new ActionGuidedTourStep(this.app), - new NodeSidepanelGuidedTourStep(this.app), + new GraphGuidedTourStep(this.app), new HistoryGuidedTourStep(this.app), new TestRunGuidedTourStep(this.app), new PublishGuidedTourStep(this.app), diff --git a/web-frontend/modules/automation/locales/en.json b/web-frontend/modules/automation/locales/en.json index 64d71449e2..e84e286988 100644 --- a/web-frontend/modules/automation/locales/en.json +++ b/web-frontend/modules/automation/locales/en.json @@ -1,20 +1,11 @@ - { "welcomeGuidedTourStep": { "title": "Welcome to Baserow Automations!", "content": "Let's take a quick tour. You'll see how to set up triggers, add actions, test your workflow, and publish it — no coding required." }, - "triggerGuidedTourStep": { - "title": "Workflows start with a trigger", - "content": "Every workflow begins with a trigger. By default, this is an interval trigger, but you can replace it with other types to suit your needs. Triggers can be replaced, but not deleted." - }, - "actionGuidedTourStep": { - "title": "Workflows then execute actions", - "content": "Add one or more actions after your trigger to define what the workflow should do. Click the highlighted button to get started." - }, - "nodeSidepanelGuidedTourStep": { - "title": "Configure your steps", - "content": "Select a trigger or action to configure it in the side panel. Each step's settings can be adjusted here." + "graphGuidedTourStep": { + "title": "Manage your workflow graph here", + "content": "Create a trigger and actions here to define what the workflow should do. Select a trigger or action to configure it in the side panel." }, "testRunGuidedTourStep": { "title": "Test your workflow", @@ -88,6 +79,8 @@ "lastPublished": "Last published" }, "dataProviderType": { + "currentIteration": "Current iteration", + "item": "Item", "previousNode": "Previous node" }, "nodeSidePanel": { @@ -118,9 +111,12 @@ "httpRequestLabel": "Send an HTTP request", "smtpEmailLabel": "Send an email", "routerLabel": "Follow 1 of {edgeCount} branches", + "iterationLabel": "Iterate on items", "routerDefaultEdgeLabelFallback": "Default", "routerWithOutputNodesDeleteError": "Cannot be deleted until its {outputCount} output nodes are removed.", "routerWithOutputNodesReplaceError": "Cannot be replaced until its {outputCount} output nodes are removed.", + "iteratorWithChildrenNodesDeleteError": "Cannot be deleted until its child nodes are removed.", + "iteratorWithChildrenNodesReplaceError": "Cannot be replaced until its child nodes are removed.", "periodicTriggerLabel": "Periodic trigger" }, "workflowNode": { @@ -132,14 +128,13 @@ "beforeLabelAction": "Then", "beforeLabelCondition": "If condition is met", "beforeLabelConditionDefault": "If no condition(s) are met", + "beforeLabelRepeat": "For each item", "moreEdit": "Edit", "moreReplace": "Replace", - "nodeOptions": "Node options", - "displayLabelDebug": "ID: {id} | Prev: {previousNodeId} | {outputUid}" + "nodeOptions": "Node options" }, "workflowAddNode": { - "displayTitle": "Create automation node", - "displayTitleDebug": "ID: {id} | {outputUid}" + "displayTitle": "Create automation node" }, "workflowNodeContext": { "searchPlaceholderTrigger": "Search for a trigger...", @@ -177,16 +172,19 @@ "deactivatedTitle": "Periodic trigger deactivated", "deactivatedText": "This periodic trigger has been automatically deactivated due to consecutive failures." }, + "workflowEditor": { + "chooseEvent": "Choose an event..." + }, "simulateDispatch": { "sampleDataLabel": "Output", - "triggerNodeAwaitingEvent": "This trigger node is waiting for an event.", + "triggerNodeAwaitingEvent": "The node is waiting. Please manually trigger the event to populate the payload and complete the test.", "errorNodeNotConfigured": "The Node must be configured before it can be tested.", - "errorPreviousNodeNotConfigured": "All previous nodes must be configured.", - "errorPreviousNodesNotTested": "All previous nodes must be tested.", - "buttonLabelShowPayload": "Show payload", + "errorPreviousNodeNotConfigured": "The previous node \"{node}\" must be configured.", + "errorPreviousNodesNotTested": "The previous node \"{node}\" must be tested first.", "buttonLabelTest": "Test event", "buttonLabelTestAgain": "Test event again", "testNodeDescription": "Test this event to confirm the configuration is correct. Data from this test can be used in later steps.", + "buttonLabelShowPayload": "Show payload", "sampleDataModalTitle": "Payload for {nodeLabel}", "sampleDataModalSubTitle": "JSON payload", "sampleDataCopy": "Copy", diff --git a/web-frontend/modules/automation/nodeTypeMixins.js b/web-frontend/modules/automation/nodeTypeMixins.js index 4efcc463f1..0fa94ef11f 100644 --- a/web-frontend/modules/automation/nodeTypeMixins.js +++ b/web-frontend/modules/automation/nodeTypeMixins.js @@ -31,3 +31,8 @@ export const UtilityNodeMixin = (Base) => class extends Base { isUtilityNode = true } + +export const containerNodeTypeMixin = (Base) => + class extends Base { + isContainer = true + } diff --git a/web-frontend/modules/automation/nodeTypes.js b/web-frontend/modules/automation/nodeTypes.js index e351f7de29..d3f042dd3b 100644 --- a/web-frontend/modules/automation/nodeTypes.js +++ b/web-frontend/modules/automation/nodeTypes.js @@ -3,6 +3,7 @@ import { ActionNodeTypeMixin, TriggerNodeTypeMixin, UtilityNodeMixin, + containerNodeTypeMixin, } from '@baserow/modules/automation/nodeTypeMixins' import { LocalBaserowCreateRowWorkflowServiceType, @@ -21,6 +22,7 @@ import { CoreRouterServiceType, CoreSMTPEmailServiceType, CoreHTTPTriggerServiceType, + CoreIteratorServiceType, } from '@baserow/modules/integrations/core/serviceTypes' import { uuid } from '@baserow/modules/core/utils/string' @@ -55,6 +57,13 @@ export class NodeType extends Registerable { return node.label || this.getDefaultLabel({ automation, node }) } + /** + * Returns the text to be displayed on the graph just before the node. + */ + getBeforeLabel({ workflow, node }) { + return this.app.i18n.t('workflowNode.beforeLabelAction') + } + /** * The node type's description. * The description is derived from the service type's description. @@ -77,8 +86,9 @@ export class NodeType extends Registerable { * The icon which is shown inside the editor's node. * @returns {string} - The node's icon class. */ + get iconClass() { - return 'iconoir-table' + return this.serviceType.icon } /** @@ -207,15 +217,23 @@ export class NodeType extends Registerable { const serviceSchema = this.serviceType.getDataSchema(node.service) if (serviceSchema) { return { - type: this.dataType, + ...serviceSchema, title: this.getLabel({ automation, node }), - properties: serviceSchema.properties || {}, - items: serviceSchema.items || [], } } return null } + /** + * Returns the sample data for this node. + */ + getSampleData({ service }) { + if (!service) { + return null + } + return this.serviceType.getSampleData(service) + } + getEdges({ node }) { return [{ uid: '', label: '' }] } @@ -582,17 +600,81 @@ export class CoreHttpRequestNodeType extends ActionNodeTypeMixin(NodeType) { } } -export class CoreSMTPEmailNodeType extends ActionNodeTypeMixin(NodeType) { +export class CoreIteratorNodeType extends containerNodeTypeMixin( + ActionNodeTypeMixin(NodeType) +) { static getType() { - return 'smtp_email' + return 'iterator' } getOrder() { return 8 } - get iconClass() { - return 'iconoir-send-mail' + get name() { + return this.app.i18n.t('nodeType.iterationLabel') + } + + get serviceType() { + return this.app.$registry.get('service', CoreIteratorServiceType.getType()) + } + + /** + * Responsible for checking if the router node can be deleted. It can't be + * if it has output nodes connected to its edges. + * @param workflow - The workflow the router belongs to. + * @param node - The router node for which the deletability is being checked. + * @returns {string} - An error message if the router cannot be deleted. + */ + getDeleteErrorMessage({ workflow, node }) { + const children = this.app.store.getters[ + 'automationWorkflowNode/getChildren' + ](workflow, node) + const count = children.length + if (count) { + return this.app.i18n.t('nodeType.iteratorWithChildrenNodesDeleteError', { + count, + }) + } + return '' + } + + getBeforeLabel({ workflow, node, position, output }) { + if (position === 'child') { + return this.app.i18n.t('workflowNode.beforeLabelRepeat') + } + + return super.getBeforeLabel({ workflow, node, position, output }) + } + + /** + * Responsible for checking if the router node can be replaced. It can't be + * if it has output nodes connected to its edges. + * @param workflow - The workflow the router belongs to. + * @param node - The router node for which the replaceability is being checked. + * @returns {string} - An error message if the router cannot be replaced. + */ + getReplaceErrorMessage({ workflow, node }) { + const children = this.app.store.getters[ + 'automationWorkflowNode/getChildren' + ](workflow, node) + const count = children.length + if (count) { + return this.app.i18n.t('nodeType.iteratorWithChildrenNodesReplaceError', { + count, + }) + } + return '' + } +} + +export class CoreSMTPEmailNodeType extends ActionNodeTypeMixin(NodeType) { + static getType() { + return 'smtp_email' + } + + getOrder() { + return 8 } get name() { @@ -621,6 +703,13 @@ export class CoreRouterNodeType extends ActionNodeTypeMixin( return true } + getBeforeLabel({ workflow, node, position, output }) { + if (output.length > 0) { + return this.app.i18n.t('workflowNode.beforeLabelCondition') + } + return this.app.i18n.t('workflowNode.beforeLabelConditionDefault') + } + getOrder() { return 9 } @@ -634,10 +723,6 @@ export class CoreRouterNodeType extends ActionNodeTypeMixin( : this.name } - get iconClass() { - return 'iconoir-git-fork' - } - get serviceType() { return this.app.$registry.get('service', CoreRouterServiceType.getType()) } @@ -706,13 +791,9 @@ export class CoreRouterNodeType extends ActionNodeTypeMixin( * @returns {Array} - An array of output nodes that are connected to the router's edges. */ getOutputNodes({ workflow, router }) { - const edgeUids = this.getEdges({ node: router }).map((edge) => edge.uid) - return this.app.store.getters['automationWorkflowNode/getNodes']( - workflow - ).filter( - (node) => - node.previous_node_id === router.id && - edgeUids.includes(node.previous_node_output) + return this.app.store.getters['automationWorkflowNode/getNextNodes']( + workflow, + router ) } diff --git a/web-frontend/modules/automation/pages/automationWorkflow.vue b/web-frontend/modules/automation/pages/automationWorkflow.vue index 729a2314b5..fa3e4b28d9 100644 --- a/web-frontend/modules/automation/pages/automationWorkflow.vue +++ b/web-frontend/modules/automation/pages/automationWorkflow.vue @@ -1,5 +1,5 @@