Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions backend/.flake8
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ per-file-ignores =
exclude =
.git,
__pycache__,
src/baserow/config/settings/local.py,
src/baserow/core/formula/parser/generated


Expand Down
3 changes: 2 additions & 1 deletion backend/src/baserow/contrib/automation/nodes/node_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,8 @@ def after_create(self, node: CoreRouterActionNode):
:param node: The router node instance that was just created.
"""

node.service.edges.create(label=_("Branch"))
if not len(node.service.edges.all()):
node.service.edges.create(label=_("Branch"))

def prepare_values(
self,
Expand Down
24 changes: 24 additions & 0 deletions backend/src/baserow/contrib/automation/workflows/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,30 @@ def get_workflow(self, user: AbstractUser, workflow_id: int) -> AutomationWorkfl

return workflow

def list_workflows(
self, user: AbstractUser, automation_id: int
) -> List[AutomationWorkflow]:
"""
Lists all the workflows that belong to the given automation.

:param user: The user requesting the workflows.
:param automation_id: The automation to which the workflows belong.
:return: A list of AutomationWorkflow instances.
"""

automation = AutomationHandler().get_automation(automation_id)

all_workflows = self.handler.get_workflows(
automation, base_queryset=AutomationWorkflow.objects
)

return CoreHandler().filter_queryset(
user,
ReadAutomationWorkflowOperationType.type,
all_workflows,
workspace=automation.workspace,
)

def create_workflow(
self,
user: AbstractUser,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,6 @@ def get_data_chunk(self, dispatch_context: BuilderDispatchContext, path: List[st
# The data source has probably been deleted
raise InvalidRuntimeFormula() from exc

# Declare the call and check for recursion
dispatch_context.add_call(data_source.id)

dispatch_result = DataSourceHandler().dispatch_data_source(
data_source, dispatch_context
)
Expand Down
26 changes: 10 additions & 16 deletions backend/src/baserow/contrib/builder/data_sources/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,16 +508,12 @@ def dispatch_data_sources(
data_sources_dispatch[data_source.id] = {}
continue

# Add the initial call to the call stack
dispatch_context.add_call(data_source.id)
try:
data_sources_dispatch[data_source.id] = self.dispatch_data_source(
data_source, dispatch_context
)
except Exception as e:
data_sources_dispatch[data_source.id] = e
# Reset the stack as we are starting a new dispatch
dispatch_context.reset_call_stack()

return data_sources_dispatch

Expand All @@ -538,21 +534,12 @@ def dispatch_data_source(
raise ServiceImproperlyConfiguredDispatchException(
"The service type is missing."
)

cache = dispatch_context.cache
call_stack = dispatch_context.call_stack

page = dispatch_context.page
current_data_source_dispatched = dispatch_context.data_source or data_source

dispatch_context = dispatch_context.clone(
data_source=current_data_source_dispatched,
)

# keep the call stack
dispatch_context.call_stack = call_stack

if current_data_source_dispatched != data_source:
data_sources = self.get_data_sources_with_cache(dispatch_context.page)
data_sources = self.get_data_sources_with_cache(page)
ordered_ids = [d.id for d in data_sources]
if ordered_ids.index(current_data_source_dispatched.id) < ordered_ids.index(
data_source.id
Expand All @@ -561,9 +548,16 @@ def dispatch_data_source(
"You can't reference a data source after the current data source"
)

# Clone the dispatch context to keep the call stack as it is
cloned_dispatch_context = dispatch_context.clone(
data_source=current_data_source_dispatched
)
# Declare the call and check for recursion
cloned_dispatch_context.add_call(data_source.id)

if data_source.id not in cache.setdefault("data_source_contents", {}):
service_dispatch = self.service_handler.dispatch_service(
data_source.service.specific, dispatch_context
data_source.service.specific, cloned_dispatch_context
)

# Cache the dispatch in the formula cache if we have formulas that need
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -240,26 +240,20 @@ def test_data_source_data_provider_get_data_chunk_with_list_data_source(data_fix
== "Blue"
)

dispatch_context.reset_call_stack()

assert (
data_source_provider.get_data_chunk(
dispatch_context, [data_source.id, "2", fields[1].db_column]
)
== "White"
)

dispatch_context.reset_call_stack()

assert (
data_source_provider.get_data_chunk(
dispatch_context, [data_source.id, "0", "id"]
)
== rows[0].id
)

dispatch_context.reset_call_stack()

assert data_source_provider.get_data_chunk(
dispatch_context, [data_source.id, "*", fields[1].db_column]
) == ["Blue", "Orange", "White", "Green"]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
import json
from unittest.mock import MagicMock

import pytest

from baserow.contrib.builder.data_sources.builder_dispatch_context import (
BuilderDispatchContext,
)
from baserow.contrib.builder.workflow_actions.exceptions import (
WorkflowActionNotInElement,
)
Expand All @@ -15,6 +21,7 @@
OpenPageWorkflowActionType,
)
from baserow.core.services.models import Service
from baserow.test_utils.helpers import AnyInt, AnyStr


@pytest.mark.django_db
Expand Down Expand Up @@ -229,3 +236,64 @@ def test_order_workflow_actions_different_scopes(data_fixture):
)

assert page_workflow_action.order == element_workflow_action.order


@pytest.mark.django_db
def test_dispatch_workflow_action_doesnt_trigger_formula_recursion(data_fixture):
user, token = data_fixture.create_user_and_token()
workspace = data_fixture.create_workspace(user=user)
database = data_fixture.create_database_application(workspace=workspace)
builder = data_fixture.create_builder_application(workspace=workspace)
table, fields, rows = data_fixture.build_table(
user=user,
columns=[
("Name", "text"),
("My Color", "text"),
],
rows=[
["BMW", "Blue"],
["Audi", "Orange"],
["Volkswagen", "White"],
["Volkswagen", "Green"],
],
)
page = data_fixture.create_builder_page(builder=builder)
element = data_fixture.create_builder_button_element(page=page)
integration = data_fixture.create_local_baserow_integration(
application=builder, user=user, authorized_user=user
)
data_source = data_fixture.create_builder_local_baserow_list_rows_data_source(
integration=integration,
page=page,
table=table,
)
service = data_fixture.create_local_baserow_upsert_row_service(
table=table,
integration=integration,
)
service.field_mappings.create(
field=fields[0],
value=f'concat(get("data_source.{data_source.id}.0.{fields[0].db_column}"), '
f'get("data_source.{data_source.id}.0.{fields[1].db_column}"))',
)
workflow_action = data_fixture.create_local_baserow_create_row_workflow_action(
page=page, service=service, element=element, event=EventTypes.CLICK
)

fake_request = MagicMock()
fake_request.data = {"metadata": json.dumps({})}

dispatch_context = BuilderDispatchContext(
fake_request, page, only_expose_public_allowed_properties=False
)

result = BuilderWorkflowActionHandler().dispatch_workflow_action(
workflow_action, dispatch_context
)

assert result.data == {
"id": AnyInt(),
"order": AnyStr(),
"Name": "AudiOrange",
"My Color": None,
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"type": "bug",
"message": "Fix formula recursion error when the same data source is used twice in one formula of workflow action",
"domain": "builder",
"issue_number": 4195,
"bullet_points": [],
"created_at": "2025-11-10"
}
13 changes: 9 additions & 4 deletions enterprise/backend/src/baserow_enterprise/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,18 +303,20 @@ def ready(self):
notification_type_registry.register(TwoWaySyncDeactivatedNotificationType())

from baserow_enterprise.assistant.tools import (
CreateDatabaseToolType,
CreateBuildersToolType,
CreateFieldsToolType,
CreateTablesToolType,
CreateViewFiltersToolType,
CreateViewsToolType,
CreateWorkflowsToolType,
GenerateDatabaseFormulaToolType,
GetRowsToolsToolType,
GetTablesSchemaToolType,
ListDatabasesToolType,
ListBuildersToolType,
ListRowsToolType,
ListTablesToolType,
ListViewsToolType,
ListWorkflowsToolType,
NavigationToolType,
SearchDocsToolType,
)
Expand All @@ -325,8 +327,8 @@ def ready(self):
assistant_tool_registry.register(SearchDocsToolType())
assistant_tool_registry.register(NavigationToolType())

assistant_tool_registry.register(ListDatabasesToolType())
assistant_tool_registry.register(CreateDatabaseToolType())
assistant_tool_registry.register(ListBuildersToolType())
assistant_tool_registry.register(CreateBuildersToolType())
assistant_tool_registry.register(ListTablesToolType())
assistant_tool_registry.register(CreateTablesToolType())
assistant_tool_registry.register(GetTablesSchemaToolType())
Expand All @@ -338,6 +340,9 @@ def ready(self):
assistant_tool_registry.register(CreateViewsToolType())
assistant_tool_registry.register(CreateViewFiltersToolType())

assistant_tool_registry.register(ListWorkflowsToolType())
assistant_tool_registry.register(CreateWorkflowsToolType())

# The signals must always be imported last because they use the registries
# which need to be filled first.
import baserow_enterprise.audit_log.signals # noqa: F
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,7 @@ def _init_lm_client(self):
model=lm_model,
cache=not settings.DEBUG,
max_retries=5,
max_tokens=32000,
)

def _init_assistant(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,15 @@
AUTOMATION_BUILDER_CONCEPTS = """
### AUTOMATIONS (no-code automation builder)

**Structure**: Automation → Workflows → Triggers + Actions + Routers (Nodes)
**Structure**: Automation → Workflows → Trigger + Actions + Routers (Nodes)

**Key concepts**:
• **Triggers**: Events that start automations (e.g., row created/updated, view accessed)
• **Trigger**: The single event that starts the workflow (e.g., row created/updated/deleted)
• **Actions**: Tasks performed (e.g., create/update rows, send emails, call webhooks)
• **Routers**: Conditional logic (if/else, switch) to control flow
• **Execution**: Runs in the background; monitor via logs
• **History**: Track runs, successes, failures
• **Publishing**: Requires domain configuration
• **Publishing**: Requires at least one configured action
"""

ASSISTANT_SYSTEM_PROMPT = (
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from .automation.tools import * # noqa: F401, F403
from .core.tools import * # noqa: F401, F403
from .database.tools import * # noqa: F401, F403
from .navigation.tools import * # noqa: F401, F403
from .search_docs.tools import * # noqa: F401, F403
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from .tools import CreateWorkflowsToolType, ListWorkflowsToolType

__all__ = [
"ListWorkflowsToolType",
"CreateWorkflowsToolType",
]
Loading
Loading