From cdc53d1143c574388c166d304d20cf2ed26bfe93 Mon Sep 17 00:00:00 2001 From: Blake Gentry Date: Thu, 7 May 2026 22:29:27 -0500 Subject: [PATCH 1/4] upgrade River Pro workflow dependencies Update the River Pro module set used by `riverproui` so the backend can consume the newer workflow data exposed for workflow v2 wait, timer, and signal support. --- riverproui/go.mod | 21 ++++++------ riverproui/go.sum | 34 +++++++++++++------ .../prohandler/pro_handler_api_endpoints.go | 6 +++- 3 files changed, 40 insertions(+), 21 deletions(-) diff --git a/riverproui/go.mod b/riverproui/go.mod index 887be5e3..a6e6a9fe 100644 --- a/riverproui/go.mod +++ b/riverproui/go.mod @@ -13,18 +13,21 @@ require ( github.com/riverqueue/river/rivershared v0.35.1 github.com/riverqueue/river/rivertype v0.35.1 github.com/stretchr/testify v1.11.1 - riverqueue.com/riverpro v0.23.2 - riverqueue.com/riverpro/driver v0.23.2 - riverqueue.com/riverpro/driver/riverpropgxv5 v0.23.2 + riverqueue.com/riverpro v0.24.0-rc.1 + riverqueue.com/riverpro/driver v0.24.0-rc.1 + riverqueue.com/riverpro/driver/riverpropgxv5 v0.24.0-rc.1 riverqueue.com/riverui v0.15.0 ) require ( + cel.dev/expr v0.25.1 // indirect + github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/gabriel-vasile/mimetype v1.4.13 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.30.1 // indirect + github.com/google/cel-go v0.27.0 // indirect github.com/jackc/pgerrcode v0.0.0-20250907135507-afb5586c32a6 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect @@ -42,9 +45,13 @@ require ( go.opentelemetry.io/otel/trace v1.29.0 // indirect go.uber.org/goleak v1.3.0 // indirect golang.org/x/crypto v0.48.0 // indirect + golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 // indirect golang.org/x/sync v0.20.0 // indirect golang.org/x/sys v0.41.0 // indirect golang.org/x/text v0.36.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260226221140-a57be14db171 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d // indirect + google.golang.org/protobuf v1.36.11 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) @@ -53,10 +60,4 @@ retract ( v0.12.0 // Improper release process, not fully usable ) -// replace riverqueue.com/riverui => ../ - -// replace riverqueue.com/riverpro => ../../riverpro - -// replace riverqueue.com/riverpro/driver => ../../riverpro/driver - -// replace riverqueue.com/riverpro/driver/riverpropgxv5 => ../../riverpro/driver/riverpropgxv5 +replace riverqueue.com/riverui => ../ diff --git a/riverproui/go.sum b/riverproui/go.sum index bd13a226..86cdfdab 100644 --- a/riverproui/go.sum +++ b/riverproui/go.sum @@ -1,3 +1,7 @@ +cel.dev/expr v0.25.1 h1:1KrZg61W6TWSxuNZ37Xy49ps13NUovb66QLprthtwi4= +cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= +github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ= +github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -11,8 +15,10 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w= github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/cel-go v0.27.0 h1:e7ih85+4qVrBuqQWTW4FKSqZYokVuc3HnhH5keboFTo= +github.com/google/cel-go v0.27.0/go.mod h1:tTJ11FWqnhw5KKpnWpvW9CJC3Y9GK4EIS0WXnBbebzw= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/jackc/pgerrcode v0.0.0-20250907135507-afb5586c32a6 h1:D/V0gu4zQ3cL2WKeVNVM4r2gLxGGf6McLwgXzRTo2RQ= @@ -75,25 +81,33 @@ go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt3 go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA= +golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4= golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0= golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg= golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164= +google.golang.org/genproto/googleapis/api v0.0.0-20260226221140-a57be14db171 h1:tu/dtnW1o3wfaxCOjSLn5IRX4YDcJrtlpzYkhHhGaC4= +google.golang.org/genproto/googleapis/api v0.0.0-20260226221140-a57be14db171/go.mod h1:M5krXqk4GhBKvB596udGL3UyjL4I1+cTbK0orROM9ng= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d h1:t/LOSXPJ9R0B6fnZNyALBRfZBH0Uy0gT+uR+SJ6syqQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -riverqueue.com/riverpro v0.23.2 h1:sLa3yu4X+Pp4CCs3qqjsTpjbpppe6CPs+UoE6oXaRHI= -riverqueue.com/riverpro v0.23.2/go.mod h1:GB/taad40wxHlLegaKV6kr/4h99HPUaJkWqADJKsp7M= -riverqueue.com/riverpro/driver v0.23.2 h1:tg0gvR+R4M3HzET358FwuSdv5TnBFrJXHzPPtS3CSpQ= -riverqueue.com/riverpro/driver v0.23.2/go.mod h1:d9s4bU6SeNl76KUp+io4k/EpJi0dvAO6NCcoXeo5q7k= -riverqueue.com/riverpro/driver/riverpropgxv5 v0.23.2 h1:Uceq9aDer9xuZCPlwj4Y+GP7lYpKCbyJtWBp+igNrUs= -riverqueue.com/riverpro/driver/riverpropgxv5 v0.23.2/go.mod h1:aYZgQfQ3DSEsJEKLVZ3cKj1BhpWZZWDsxl3AP1N3bUw= -riverqueue.com/riverui v0.15.0 h1:7Xm/tqv63jZrGSv4X2u4zpAvbtXSs835Qk4RFonBDdk= -riverqueue.com/riverui v0.15.0/go.mod h1:J4fH8+zPe1cqmYWuMWVJdDdMmq1U2UPVofyOczGZNnw= +riverqueue.com/riverpro v0.24.0-rc.1 h1:4f4hlA9rb8RmtmvKlSudCXZgIv03ypFazIkqjwUOlyQ= +riverqueue.com/riverpro v0.24.0-rc.1/go.mod h1:jINiXsyxuMRNrf21KLLfO2JpDqRPjZWl98cO7Acb8jg= +riverqueue.com/riverpro/driver v0.24.0-rc.1 h1:G4hC22saVptb81bK/Ggj1QSA7COGI7lTAjz0GVgKR48= +riverqueue.com/riverpro/driver v0.24.0-rc.1/go.mod h1:d9s4bU6SeNl76KUp+io4k/EpJi0dvAO6NCcoXeo5q7k= +riverqueue.com/riverpro/driver/riverpropgxv5 v0.24.0-rc.1 h1:JHb3u57fnbG0hjuPStrPZw+sCg6b3TpHpRnl0PlBA/Q= +riverqueue.com/riverpro/driver/riverpropgxv5 v0.24.0-rc.1/go.mod h1:7W60TU/5AAuCbU/XaRnxbmGyDZu84SA5XVQBwgYKEW0= diff --git a/riverproui/internal/prohandler/pro_handler_api_endpoints.go b/riverproui/internal/prohandler/pro_handler_api_endpoints.go index 36d8256f..bda2f791 100644 --- a/riverproui/internal/prohandler/pro_handler_api_endpoints.go +++ b/riverproui/internal/prohandler/pro_handler_api_endpoints.go @@ -261,7 +261,7 @@ func (a *workflowGetEndpoint[TTx]) Execute(ctx context.Context, req *workflowGet } return &workflowGetResponse{ - Tasks: sliceutil.Map(jobs, internalJobToSerializableJob), + Tasks: sliceutil.Map(jobs, internalWorkflowTaskWithJobToSerializableJob), }, nil } @@ -472,6 +472,10 @@ func internalJobToSerializableJob(internal *rivertype.JobRow) *riverJobSerializa } } +func internalWorkflowTaskWithJobToSerializableJob(internal *riverprodriver.WorkflowTaskWithJob) *riverJobSerializable { + return internalJobToSerializableJob(internal.Job) +} + type workflowListItem struct { CountAvailable int `json:"count_available"` CountCancelled int `json:"count_cancelled"` From e55635a9f4678eff1123ae0ec90f555b20c14ffa Mon Sep 17 00:00:00 2001 From: Blake Gentry Date: Thu, 7 May 2026 22:31:55 -0500 Subject: [PATCH 2/4] add workflow v2 backend API support Expose workflow v2 task data from the River Pro API, including task wait reasons and the newer backend workflow fields needed by the UI. The handler tests now exercise the serialized workflow detail shape instead of relying on frontend-derived placeholders. The wait-reason mapping uses a direct switch over the River Pro enum so unknown values continue to fall back to the existing `none` state. --- handler_test.go | 2 +- internal/handlertest/handlertest.go | 4 +- riverproui/endpoints.go | 16 +- riverproui/endpoints_test.go | 27 +- .../prohandler/pro_handler_api_endpoints.go | 247 +++++++++++++++++- .../pro_handler_api_endpoints_test.go | 193 ++++++++++++-- .../protestfactory/pro_test_factory.go | 3 +- riverproui/pro_handler_test.go | 78 ++++-- 8 files changed, 501 insertions(+), 69 deletions(-) diff --git a/handler_test.go b/handler_test.go index 3fb7ece7..7a52ad6a 100644 --- a/handler_test.go +++ b/handler_test.go @@ -62,7 +62,7 @@ func TestNewHandlerIntegration(t *testing.T) { return server } - testRunner := func(exec riverdriver.Executor, makeAPICall handlertest.APICallFunc) { + testRunner := func(exec riverdriver.Executor, _ riverdriver.Driver[pgx.Tx], makeAPICall handlertest.APICallFunc) { ctx := context.Background() makeURL := fmt.Sprintf diff --git a/internal/handlertest/handlertest.go b/internal/handlertest/handlertest.go index 6bfd3b19..0e0aed54 100644 --- a/internal/handlertest/handlertest.go +++ b/internal/handlertest/handlertest.go @@ -20,7 +20,7 @@ import ( type APICallFunc = func(t *testing.T, testCaseName, method, path string, payload []byte) -func RunIntegrationTest[TClient any](t *testing.T, createClient func(ctx context.Context, tb testing.TB, logger *slog.Logger) (TClient, riverdriver.Driver[pgx.Tx], pgx.Tx), createBundle func(client TClient, tx pgx.Tx) uiendpoints.Bundle, createHandler func(t *testing.T, bundle uiendpoints.Bundle) http.Handler, testRunner func(exec riverdriver.Executor, makeAPICall APICallFunc)) { +func RunIntegrationTest[TClient any](t *testing.T, createClient func(ctx context.Context, tb testing.TB, logger *slog.Logger) (TClient, riverdriver.Driver[pgx.Tx], pgx.Tx), createBundle func(client TClient, tx pgx.Tx) uiendpoints.Bundle, createHandler func(t *testing.T, bundle uiendpoints.Bundle) http.Handler, testRunner func(exec riverdriver.Executor, dbDriver riverdriver.Driver[pgx.Tx], makeAPICall APICallFunc)) { t.Helper() var ( @@ -70,5 +70,5 @@ func RunIntegrationTest[TClient any](t *testing.T, createClient func(ctx context }) } - testRunner(exec, makeAPICall) + testRunner(exec, driver, makeAPICall) } diff --git a/riverproui/endpoints.go b/riverproui/endpoints.go index 8472396e..bbcb124b 100644 --- a/riverproui/endpoints.go +++ b/riverproui/endpoints.go @@ -122,10 +122,20 @@ func (e *endpoints[TTx]) Extensions(ctx context.Context) (map[string]bool, error return nil, err } + hasWorkflowTable, err := execTx.TableExists(ctx, &riverdriver.TableExistsParams{ + Schema: schema, + Table: "river_workflow", + }) + if err != nil { + return nil, err + } + indexResults, err := execTx.IndexesExist(ctx, &riverdriver.IndexesExistParams{ IndexNames: []string{ "river_job_workflow_list_active", "river_job_workflow_scheduling", + "river_job_workflow_active_idx", + "river_job_workflow_inactive_idx", }, Schema: schema, }) @@ -133,7 +143,11 @@ func (e *endpoints[TTx]) Extensions(ctx context.Context) (map[string]bool, error return nil, err } - hasWorkflows := indexResults["river_job_workflow_list_active"] || indexResults["river_job_workflow_scheduling"] + hasWorkflows := hasWorkflowTable || + indexResults["river_job_workflow_list_active"] || + indexResults["river_job_workflow_scheduling"] || + indexResults["river_job_workflow_active_idx"] || + indexResults["river_job_workflow_inactive_idx"] return map[string]bool{ "durable_periodic_jobs": hasPeriodicJobTable, diff --git a/riverproui/endpoints_test.go b/riverproui/endpoints_test.go index b46cf7a2..1a551b3c 100644 --- a/riverproui/endpoints_test.go +++ b/riverproui/endpoints_test.go @@ -187,7 +187,7 @@ func TestProEndpointsExtensions(t *testing.T) { t.Run("WorkflowsDetection", func(t *testing.T) { t.Parallel() - t.Run("NoWorkflowIndexes", func(t *testing.T) { + t.Run("WorkflowTablePresentWithoutLegacyIndexes", func(t *testing.T) { t.Parallel() bundle := setup(ctx, t) @@ -196,6 +196,31 @@ func TestProEndpointsExtensions(t *testing.T) { require.NoError(t, err) _, err = bundle.tx.Exec(ctx, `DROP INDEX IF EXISTS river_job_workflow_scheduling;`) require.NoError(t, err) + _, err = bundle.tx.Exec(ctx, `DROP INDEX IF EXISTS river_job_workflow_active_idx;`) + require.NoError(t, err) + _, err = bundle.tx.Exec(ctx, `DROP INDEX IF EXISTS river_job_workflow_inactive_idx;`) + require.NoError(t, err) + + ext, err := bundle.endpoint.Extensions(ctx) + require.NoError(t, err) + require.True(t, ext["has_workflows"]) + }) + + t.Run("NoWorkflowTableOrIndexes", func(t *testing.T) { + t.Parallel() + + bundle := setup(ctx, t) + + _, err := bundle.tx.Exec(ctx, `DROP TABLE IF EXISTS river_workflow CASCADE;`) + require.NoError(t, err) + _, err = bundle.tx.Exec(ctx, `DROP INDEX IF EXISTS river_job_workflow_list_active;`) + require.NoError(t, err) + _, err = bundle.tx.Exec(ctx, `DROP INDEX IF EXISTS river_job_workflow_scheduling;`) + require.NoError(t, err) + _, err = bundle.tx.Exec(ctx, `DROP INDEX IF EXISTS river_job_workflow_active_idx;`) + require.NoError(t, err) + _, err = bundle.tx.Exec(ctx, `DROP INDEX IF EXISTS river_job_workflow_inactive_idx;`) + require.NoError(t, err) ext, err := bundle.endpoint.Extensions(ctx) require.NoError(t, err) diff --git a/riverproui/internal/prohandler/pro_handler_api_endpoints.go b/riverproui/internal/prohandler/pro_handler_api_endpoints.go index bda2f791..77322681 100644 --- a/riverproui/internal/prohandler/pro_handler_api_endpoints.go +++ b/riverproui/internal/prohandler/pro_handler_api_endpoints.go @@ -3,6 +3,7 @@ package prohandler import ( "context" "encoding/json" + "errors" "fmt" "net/http" "slices" @@ -19,6 +20,7 @@ import ( "riverqueue.com/riverpro" riverprodriver "riverqueue.com/riverpro/driver" + "riverqueue.com/riverpro/riverworkflow" "riverqueue.com/riverui/internal/apibundle" "riverqueue.com/riverui/riverproui/internal/uitype" @@ -243,25 +245,40 @@ func (req *workflowGetRequest) ExtractRaw(r *http.Request) error { } type workflowGetResponse struct { - Tasks []*riverJobSerializable `json:"tasks"` + ID string `json:"id"` + Name string `json:"name"` + Tasks []*workflowTaskSerializable `json:"tasks"` } func (a *workflowGetEndpoint[TTx]) Execute(ctx context.Context, req *workflowGetRequest) (*workflowGetResponse, error) { - jobs, err := a.DB.WorkflowJobList(ctx, &riverprodriver.WorkflowJobListParams{ - PaginationLimit: 1000, - PaginationOffset: 0, - WorkflowID: req.ID, - }) + workflow, err := a.Client.WorkflowFromExistingID(ctx, req.ID, nil) if err != nil { - return nil, fmt.Errorf("error getting workflow jobs: %w", err) + if errors.Is(err, rivertype.ErrNotFound) { + return nil, apierror.NewNotFoundf("Workflow not found: %s.", req.ID) + } + return nil, fmt.Errorf("error loading workflow: %w", err) } - if len(jobs) < 1 { - return nil, apierror.NewNotFoundf("Workflow not found: %s.", req.ID) + loadedTasks, err := workflow.LoadAll(ctx, nil) + if err != nil { + return nil, fmt.Errorf("error loading workflow tasks: %w", err) + } + + taskNames := loadedTasks.Names() + tasks := make([]*workflowTaskSerializable, 0, len(taskNames)) + for _, taskName := range taskNames { + task := loadedTasks.Get(taskName) + serializedTask := internalWorkflowTaskToSerializableTask(task) + if serializedTask == nil { + continue + } + tasks = append(tasks, serializedTask) } return &workflowGetResponse{ - Tasks: sliceutil.Map(jobs, internalWorkflowTaskWithJobToSerializableJob), + ID: workflow.ID(), + Name: workflow.Name(), + Tasks: tasks, }, nil } @@ -472,8 +489,214 @@ func internalJobToSerializableJob(internal *rivertype.JobRow) *riverJobSerializa } } -func internalWorkflowTaskWithJobToSerializableJob(internal *riverprodriver.WorkflowTaskWithJob) *riverJobSerializable { - return internalJobToSerializableJob(internal.Job) +const ( + workflowTaskWaitReasonDependencies = "dependencies" + workflowTaskWaitReasonDependenciesAndGate = "dependencies_and_gate" + workflowTaskWaitReasonGate = "gate" + workflowTaskWaitReasonNone = "none" +) + +type workflowTaskSerializable struct { + riverJobSerializable + + Deps []string `json:"deps"` + Gate *workflowTaskGate `json:"gate,omitempty"` + IgnoreCancelledDeps bool `json:"ignore_cancelled_deps"` + IgnoreDeletedDeps bool `json:"ignore_deleted_deps"` + IgnoreDiscardedDeps bool `json:"ignore_discarded_deps"` + Name string `json:"name"` + StagedAt *time.Time `json:"staged_at,omitempty"` + WaitReason string `json:"wait_reason"` + WorkflowID string `json:"workflow_id"` +} + +type workflowTaskGate struct { + ActiveAt *time.Time `json:"active_at,omitempty"` + DeclaredSignals []string `json:"declared_signals"` + Enabled bool `json:"enabled"` + ExprCEL string `json:"expr_cel"` + Phase string `json:"phase"` + Satisfaction *workflowTaskGateSatisfaction `json:"satisfaction,omitempty"` + SatisfiedAt *time.Time `json:"satisfied_at,omitempty"` + Timers []*workflowTaskGateTimer `json:"timers"` +} + +type workflowTaskGateSatisfaction struct { + AsOf time.Time `json:"as_of"` + Attempt int `json:"attempt"` + Signals []*workflowTaskGateSatisfactionSignal `json:"signals"` + Timers []*workflowTaskGateSatisfactionTimer `json:"timers"` +} + +type workflowTaskGateSatisfactionSignal struct { + Count int64 `json:"count"` + Key string `json:"key"` + LastSignalID int64 `json:"last_signal_id"` +} + +type workflowTaskGateSatisfactionTimer struct { + FireAt *time.Time `json:"fire_at,omitempty"` + Fired bool `json:"fired"` + Name string `json:"name"` +} + +type workflowTaskGateTimer struct { + After string `json:"after,omitempty"` + AfterUS *int64 `json:"after_us,omitempty"` + AfterSeconds *float64 `json:"after_seconds,omitempty"` + Anchor *workflowTaskGateTimerAnchor `json:"anchor,omitempty"` + FireAt *time.Time `json:"fire_at,omitempty"` + HasAfter bool `json:"has_after"` + HasFireAt bool `json:"has_fire_at"` + Name string `json:"name"` +} + +type workflowTaskGateTimerAnchor struct { + Kind string `json:"kind"` + Task string `json:"task,omitempty"` +} + +func internalWorkflowTaskToSerializableTask(task *riverpro.WorkflowTaskWithJob) *workflowTaskSerializable { + if task == nil || task.Job == nil { + return nil + } + + gateView := task.Gate.View() + + return &workflowTaskSerializable{ + riverJobSerializable: *internalJobToSerializableJob(task.Job), + Deps: task.Deps, + Gate: workflowTaskGateFromInternal(gateView), + IgnoreCancelledDeps: task.IgnoreCancelledDeps, + IgnoreDeletedDeps: task.IgnoreDeletedDeps, + IgnoreDiscardedDeps: task.IgnoreDiscardedDeps, + Name: task.Name, + StagedAt: workflowTaskStagedAtFromMetadata(task.Job.Metadata), + WaitReason: workflowTaskWaitReasonFromInternal(task.WaitReason), + WorkflowID: task.WorkflowID, + } +} + +func workflowTaskWaitReasonFromInternal(waitReason riverpro.WorkflowTaskWaitReason) string { + switch waitReason { + case riverpro.WorkflowTaskWaitReasonDependenciesAndGate: + return workflowTaskWaitReasonDependenciesAndGate + case riverpro.WorkflowTaskWaitReasonDependencies: + return workflowTaskWaitReasonDependencies + case riverpro.WorkflowTaskWaitReasonGate: + return workflowTaskWaitReasonGate + default: + return workflowTaskWaitReasonNone + } +} + +func workflowTaskGateFromInternal(gateView riverworkflow.GateView) *workflowTaskGate { + if !gateView.Enabled { + return nil + } + + result := &workflowTaskGate{ + ActiveAt: gateView.ActiveAt, + DeclaredSignals: gateView.DeclaredSignals, + Enabled: gateView.Enabled, + ExprCEL: gateView.ExprCEL, + Phase: gateView.Phase, + SatisfiedAt: gateView.SatisfiedAt, + Timers: make([]*workflowTaskGateTimer, 0, len(gateView.Timers)), + } + + for _, timer := range gateView.Timers { + if timer == nil { + continue + } + + serializedTimer := &workflowTaskGateTimer{ + After: timer.After, + AfterUS: timer.AfterUS, + HasAfter: timer.HasAfter, + HasFireAt: timer.HasFireAt, + Name: timer.Name, + } + + if timer.HasAfter && timer.AfterUS != nil { + afterSeconds := float64(*timer.AfterUS) / float64(time.Second/time.Microsecond) + serializedTimer.AfterSeconds = &afterSeconds + } + if timer.HasFireAt && timer.FireAt != nil { + serializedTimer.FireAt = timer.FireAt + } + if timer.Anchor != nil { + serializedTimer.Anchor = &workflowTaskGateTimerAnchor{ + Kind: string(timer.Anchor.Kind), + Task: timer.Anchor.Task, + } + } + + result.Timers = append(result.Timers, serializedTimer) + } + + if gateView.Satisfaction == nil { + return result + } + + satisfactionSignals := make([]*workflowTaskGateSatisfactionSignal, 0, len(gateView.Satisfaction.Signals)) + for _, signal := range gateView.Satisfaction.Signals { + if signal == nil { + continue + } + satisfactionSignals = append(satisfactionSignals, &workflowTaskGateSatisfactionSignal{ + Count: signal.Count, + Key: signal.Key, + LastSignalID: signal.LastSignalID, + }) + } + + satisfactionTimers := make([]*workflowTaskGateSatisfactionTimer, 0, len(gateView.Satisfaction.Timers)) + for _, timer := range gateView.Satisfaction.Timers { + if timer == nil { + continue + } + satisfactionTimers = append(satisfactionTimers, &workflowTaskGateSatisfactionTimer{ + FireAt: timer.FireAt, + Fired: timer.Fired, + Name: timer.Name, + }) + } + + result.Satisfaction = &workflowTaskGateSatisfaction{ + AsOf: gateView.Satisfaction.AsOf, + Attempt: gateView.Satisfaction.Attempt, + Signals: satisfactionSignals, + Timers: satisfactionTimers, + } + + return result +} + +func workflowTaskStagedAtFromMetadata(metadata json.RawMessage) *time.Time { + if len(metadata) == 0 { + return nil + } + + var metadataView struct { + WorkflowStagedAt string `json:"workflow_staged_at"` + } + if err := json.Unmarshal(metadata, &metadataView); err != nil { + return nil + } + if metadataView.WorkflowStagedAt == "" { + return nil + } + + stagedAt, err := time.Parse(time.RFC3339Nano, metadataView.WorkflowStagedAt) + if err != nil { + stagedAt, err = time.Parse(time.RFC3339, metadataView.WorkflowStagedAt) + if err != nil { + return nil + } + } + + return &stagedAt } type workflowListItem struct { diff --git a/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go b/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go index 7a896cd5..9f571dc8 100644 --- a/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go +++ b/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go @@ -24,6 +24,7 @@ import ( "riverqueue.com/riverpro" "riverqueue.com/riverpro/driver" "riverqueue.com/riverpro/driver/riverpropgxv5" + "riverqueue.com/riverpro/riverworkflow" "riverqueue.com/riverui/internal/apibundle" "riverqueue.com/riverui/internal/riverinternaltest/testfactory" @@ -32,24 +33,26 @@ import ( type setupEndpointTestBundle struct { client *riverpro.Client[pgx.Tx] - exec driver.ProExecutorTx + exec driver.ProExecutor logger *slog.Logger - tx pgx.Tx + schema string } func setupEndpoint[TEndpoint any](ctx context.Context, t *testing.T, initFunc func(bundle ProAPIBundle[pgx.Tx]) *TEndpoint) (*TEndpoint, *setupEndpointTestBundle) { t.Helper() var ( - logger = riversharedtest.Logger(t) - driver = riverpropgxv5.New(riversharedtest.DBPool(ctx, t)) - tx, _ = riverdbtest.TestTxPgxDriver(ctx, t, driver, nil) - exec = driver.UnwrapProExecutor(tx) + logger = riversharedtest.Logger(t) + pool = riversharedtest.DBPool(ctx, t) + proDriver = riverpropgxv5.New(pool) + schema = riverdbtest.TestSchema(ctx, t, proDriver, &riverdbtest.TestSchemaOpts{DisableReuse: true}) + exec = proDriver.GetProExecutor() ) - client, err := riverpro.NewClient(driver, &riverpro.Config{ + client, err := riverpro.NewClient(proDriver, &riverpro.Config{ Config: river.Config{ Logger: logger, + Schema: schema, }, }) require.NoError(t, err) @@ -59,7 +62,7 @@ func setupEndpoint[TEndpoint any](ctx context.Context, t *testing.T, initFunc fu Archetype: riversharedtest.BaseServiceArchetype(t), Client: client.Client, DB: exec, - Driver: driver, + Driver: proDriver, // Extensions aren't needed for any of these test endpoints Extensions: func(_ context.Context) (map[string]bool, error) { return map[string]bool{}, nil }, Logger: logger, @@ -77,7 +80,7 @@ func setupEndpoint[TEndpoint any](ctx context.Context, t *testing.T, initFunc fu client: client, exec: exec, logger: logger, - tx: tx, + schema: schema, } } @@ -99,8 +102,8 @@ func TestProAPIHandlerPeriodicJobList(t *testing.T) { endpoint, bundle := setupEndpoint(ctx, t, NewPeriodicJobListEndpoint) - job1 := protestfactory.PeriodicJob(ctx, t, bundle.exec, &protestfactory.PeriodicJobOpts{ID: ptrutil.Ptr("alpha"), NextRunAt: ptrutil.Ptr(time.Now().Add(time.Minute))}) - job2 := protestfactory.PeriodicJob(ctx, t, bundle.exec, &protestfactory.PeriodicJobOpts{ID: ptrutil.Ptr("beta"), NextRunAt: ptrutil.Ptr(time.Now().Add(2 * time.Minute))}) + job1 := protestfactory.PeriodicJob(ctx, t, bundle.exec, &protestfactory.PeriodicJobOpts{ID: ptrutil.Ptr("alpha"), NextRunAt: ptrutil.Ptr(time.Now().Add(time.Minute)), Schema: bundle.schema}) + job2 := protestfactory.PeriodicJob(ctx, t, bundle.exec, &protestfactory.PeriodicJobOpts{ID: ptrutil.Ptr("beta"), NextRunAt: ptrutil.Ptr(time.Now().Add(2 * time.Minute)), Schema: bundle.schema}) resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &periodicJobListRequest{}) require.NoError(t, err) @@ -114,8 +117,8 @@ func TestProAPIHandlerPeriodicJobList(t *testing.T) { endpoint, bundle := setupEndpoint(ctx, t, NewPeriodicJobListEndpoint) - job1 := protestfactory.PeriodicJob(ctx, t, bundle.exec, nil) - _ = protestfactory.PeriodicJob(ctx, t, bundle.exec, nil) + job1 := protestfactory.PeriodicJob(ctx, t, bundle.exec, &protestfactory.PeriodicJobOpts{Schema: bundle.schema}) + _ = protestfactory.PeriodicJob(ctx, t, bundle.exec, &protestfactory.PeriodicJobOpts{Schema: bundle.schema}) resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &periodicJobListRequest{Limit: ptrutil.Ptr(1)}) require.NoError(t, err) @@ -134,9 +137,9 @@ func TestProAPIHandlerWorkflowCancel(t *testing.T) { endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowCancelEndpoint) - job1 := makeWorkflowJob(ctx, t, bundle.exec, "123", "task", nil) - job2 := makeWorkflowJob(ctx, t, bundle.exec, "123", "task", []string{"dep1"}) - job3 := makeWorkflowJob(ctx, t, bundle.exec, "123", "task", []string{"dep1", "dep2"}) + job1 := makeWorkflowJob(ctx, t, bundle.exec, bundle.schema, "123", "task_1", nil) + job2 := makeWorkflowJob(ctx, t, bundle.exec, bundle.schema, "123", "task_2", []string{"task_1"}) + job3 := makeWorkflowJob(ctx, t, bundle.exec, bundle.schema, "123", "task_3", []string{"task_1", "task_2"}) resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowCancelRequest{ID: "123"}) require.NoError(t, err) @@ -146,6 +149,101 @@ func TestProAPIHandlerWorkflowCancel(t *testing.T) { }) } +func TestProAPIHandlerWorkflowGet(t *testing.T) { + t.Parallel() + + ctx := context.Background() + + t.Run("SuccessIncludesGateAndWaitReason", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowGetEndpoint) + require.NoError(t, bundle.exec.WorkflowInsertMany(ctx, &driver.WorkflowInsertManyParams{ + IDs: []string{"wf_get"}, + Names: []string{"wf_get"}, + Schema: bundle.schema, + })) + + now := time.Now().UTC().Truncate(time.Second) + gateSpec := &riverworkflow.GateSpec{ + Expr: `signals["approval"].size() > 0 || timers["review_sla"].fired`, + Signals: []string{"approval"}, + Timers: []riverworkflow.Timer{ + riverworkflow.TimerAfterWorkflowCreated("review_sla", 30*time.Minute), + }, + } + + dependencyJob := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ + FinalizedAt: ptrutil.Ptr(now.Add(-2 * time.Minute)), + Metadata: workflowMetadata("wf_get", "collect_inputs", nil), + State: ptrutil.Ptr(rivertype.JobStateCompleted), + }) + + gatedJob := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ + Metadata: workflowMetadataWithGate("wf_get", "await_review", []string{"collect_inputs"}, gateSpec, map[string]any{ + "active_at": now.Format(time.RFC3339Nano), + "timers": map[string]any{ + "review_sla": map[string]any{ + "fire_at": now.Add(30 * time.Minute).Format(time.RFC3339Nano), + }, + }, + }), + State: ptrutil.Ptr(rivertype.JobStatePending), + }) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowGetRequest{ID: "wf_get"}) + require.NoError(t, err) + require.Equal(t, "wf_get", resp.ID) + require.Equal(t, "wf_get", resp.Name) + require.Len(t, resp.Tasks, 2) + + taskByID := map[int64]*workflowTaskSerializable{} + for _, task := range resp.Tasks { + taskByID[task.ID] = task + } + + require.Equal(t, workflowTaskWaitReasonNone, taskByID[dependencyJob.ID].WaitReason) + require.Nil(t, taskByID[dependencyJob.ID].Gate) + + gatedTask := taskByID[gatedJob.ID] + require.NotNil(t, gatedTask) + require.Equal(t, "await_review", gatedTask.Name) + require.Equal(t, "wf_get", gatedTask.WorkflowID) + require.Equal(t, []string{"collect_inputs"}, gatedTask.Deps) + require.Equal(t, workflowTaskWaitReasonGate, gatedTask.WaitReason) + require.NotNil(t, gatedTask.Gate) + require.True(t, gatedTask.Gate.Enabled) + require.Equal(t, "waiting", gatedTask.Gate.Phase) + require.Equal(t, gateSpec.Expr, gatedTask.Gate.ExprCEL) + require.Equal(t, []string{"approval"}, gatedTask.Gate.DeclaredSignals) + require.Len(t, gatedTask.Gate.Timers, 1) + require.NotNil(t, gatedTask.Gate.ActiveAt) + require.Nil(t, gatedTask.Gate.Satisfaction) + + timer := gatedTask.Gate.Timers[0] + require.Equal(t, "review_sla", timer.Name) + require.NotEmpty(t, timer.After) + require.NotNil(t, timer.AfterUS) + require.True(t, timer.HasAfter) + require.True(t, timer.HasFireAt) + require.NotNil(t, timer.AfterSeconds) + require.InDelta(t, 1800, *timer.AfterSeconds, 0.001) + require.NotNil(t, timer.Anchor) + require.Equal(t, string(riverworkflow.TimerAnchorKindWorkflowCreatedAt), timer.Anchor.Kind) + require.NotNil(t, timer.FireAt) + }) + + t.Run("NotFound", func(t *testing.T) { + t.Parallel() + + endpoint, _ := setupEndpoint(ctx, t, NewWorkflowGetEndpoint) + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowGetRequest{ID: "does-not-exist"}) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "Workflow not found") + }) +} + func TestProAPIHandlerWorkflowRetry(t *testing.T) { t.Parallel() @@ -156,17 +254,17 @@ func TestProAPIHandlerWorkflowRetry(t *testing.T) { endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowRetryEndpoint) - job1 := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + job1 := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_all_1", "task1", nil), State: ptrutil.Ptr(rivertype.JobStateDiscarded), }) - job2 := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + job2 := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_all_1", "task2", nil), State: ptrutil.Ptr(rivertype.JobStateCompleted), }) - job3 := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + job3 := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_all_1", "task3", []string{"task1", "task2"}), State: ptrutil.Ptr(rivertype.JobStateCancelled), @@ -185,12 +283,12 @@ func TestProAPIHandlerWorkflowRetry(t *testing.T) { endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowRetryEndpoint) // Build jobs with specific states - jobCompleted := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + jobCompleted := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_failed_only", "done", nil), State: ptrutil.Ptr(rivertype.JobStateCompleted), }) - jobDiscarded := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + jobDiscarded := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_failed_only", "failed", nil), State: ptrutil.Ptr(rivertype.JobStateDiscarded), @@ -210,17 +308,17 @@ func TestProAPIHandlerWorkflowRetry(t *testing.T) { endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowRetryEndpoint) // a -> b -> c; mark a as discarded, others completed - jobA := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + jobA := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_failed_downstream", "a", nil), State: ptrutil.Ptr(rivertype.JobStateDiscarded), }) - jobB := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + jobB := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_failed_downstream", "b", []string{"a"}), State: ptrutil.Ptr(rivertype.JobStateCompleted), }) - jobC := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + jobC := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_failed_downstream", "c", []string{"b"}), State: ptrutil.Ptr(rivertype.JobStateCompleted), @@ -240,7 +338,7 @@ func TestProAPIHandlerWorkflowRetry(t *testing.T) { attempt := 2 maxAttempts := 5 - job := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + job := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ Attempt: &attempt, FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_reset_history", "t1", nil), @@ -259,7 +357,7 @@ func TestProAPIHandlerWorkflowRetry(t *testing.T) { // With resetting history, Attempt resets to 0 and MaxAttempts does not increment beyond the previous +1 action // Create a fresh workflow to isolate effects attempt2 := 3 - job2 := testfactory.Job(ctx, t, bundle.exec, &testfactory.JobOpts{ + job2 := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ Attempt: &attempt2, FinalizedAt: ptrutil.Ptr(time.Now()), Metadata: workflowMetadata("wf_reset_history2", "t1", nil), @@ -276,20 +374,59 @@ func TestProAPIHandlerWorkflowRetry(t *testing.T) { }) } -func makeWorkflowJob(ctx context.Context, t *testing.T, exec riverdriver.ExecutorTx, workflowID string, taskName string, deps []string) *rivertype.JobRow { +func makeWorkflowJob(ctx context.Context, t *testing.T, exec riverdriver.Executor, schema string, workflowID string, taskName string, deps []string) *rivertype.JobRow { t.Helper() - return testfactory.Job(ctx, t, exec, &testfactory.JobOpts{ + return jobWithSchema(ctx, t, exec, schema, &testfactory.JobOpts{ Metadata: workflowMetadata(workflowID, taskName, deps), }) } +func jobWithSchema(ctx context.Context, t *testing.T, exec riverdriver.Executor, schema string, opts *testfactory.JobOpts) *rivertype.JobRow { + t.Helper() + + params := testfactory.Job_Build(t, opts) + params.Schema = schema + + job, err := exec.JobInsertFull(ctx, params) + require.NoError(t, err) + return job +} + func workflowMetadata(workflowID, taskName string, deps []string) []byte { + if deps == nil { + deps = []string{} + } + + meta := map[string]any{ + "workflow_id": workflowID, + "task": taskName, + "deps": deps, + } + + buf, err := json.Marshal(meta) + if err != nil { + panic(err) + } + return buf +} + +func workflowMetadataWithGate(workflowID, taskName string, deps []string, gate *riverworkflow.GateSpec, gateState map[string]any) []byte { + if deps == nil { + deps = []string{} + } + meta := map[string]any{ "workflow_id": workflowID, "task": taskName, "deps": deps, } + if gate != nil { + meta["river:workflow_gate"] = gate + } + if gateState != nil { + meta["river:workflow_gate_state"] = gateState + } buf, err := json.Marshal(meta) if err != nil { diff --git a/riverproui/internal/protestfactory/pro_test_factory.go b/riverproui/internal/protestfactory/pro_test_factory.go index ee145a79..fefea207 100644 --- a/riverproui/internal/protestfactory/pro_test_factory.go +++ b/riverproui/internal/protestfactory/pro_test_factory.go @@ -17,6 +17,7 @@ import ( type PeriodicJobOpts struct { ID *string NextRunAt *time.Time + Schema string UpdatedAt *time.Time } @@ -31,7 +32,7 @@ func PeriodicJob(ctx context.Context, tb testing.TB, exec driver.ProExecutor, op ID: ptrutil.ValOrDefaultFunc(opts.ID, func() string { return fmt.Sprintf("periodic_job_%05d", nextSeq()) }), NextRunAt: ptrutil.ValOrDefaultFunc(opts.NextRunAt, time.Now), UpdatedAt: opts.UpdatedAt, - Schema: "", + Schema: opts.Schema, }) require.NoError(tb, err) return periodicJob diff --git a/riverproui/pro_handler_test.go b/riverproui/pro_handler_test.go index 02bcc978..624e70da 100644 --- a/riverproui/pro_handler_test.go +++ b/riverproui/pro_handler_test.go @@ -33,6 +33,8 @@ import ( func TestProHandlerIntegration(t *testing.T) { t.Parallel() + var schema string + createBundle := func(client *riverpro.Client[pgx.Tx], tx pgx.Tx) uiendpoints.Bundle { return NewEndpoints(client, &EndpointsOpts[pgx.Tx]{Tx: &tx}) } @@ -44,11 +46,13 @@ func TestProHandlerIntegration(t *testing.T) { river.AddWorker(workers, &uicommontest.NoOpWorker{}) driver := riverpropgxv5.New(riversharedtest.DBPool(ctx, tb)) - tx, _ := riverdbtest.TestTxPgxDriver(ctx, tb, driver, nil) + tx, testSchema := riverdbtest.TestTxPgxDriver(ctx, tb, driver, nil) + schema = testSchema client, err := riverpro.NewClient(driver, &riverpro.Config{ Config: river.Config{ Logger: logger, + Schema: testSchema, Workers: workers, }, }) @@ -72,19 +76,31 @@ func TestProHandlerIntegration(t *testing.T) { return handler } - testRunner := func(exec riverdriver.Executor, makeAPICall handlertest.APICallFunc) { + testRunner := func(exec riverdriver.Executor, dbDriver riverdriver.Driver[pgx.Tx], makeAPICall handlertest.APICallFunc) { ctx := context.Background() proExec, ok := exec.(driver.ProExecutor) require.True(t, ok) + proDriver, ok := dbDriver.(driver.ProDriver[pgx.Tx]) + require.True(t, ok) _ = protestfactory.PeriodicJob(ctx, t, proExec, nil) queue := testfactory.Queue(ctx, t, exec, nil) workflowID := uuid.New() + require.NoError(t, proDriver.GetProExecutor().WorkflowInsertMany(ctx, &driver.WorkflowInsertManyParams{ + IDs: []string{workflowID.String()}, + Names: []string{workflowID.String()}, + Schema: schema, + })) _ = testfactory.Job(ctx, t, exec, &testfactory.JobOpts{Metadata: uicommontest.MustMarshalJSON(t, map[string]uuid.UUID{"workflow_id": workflowID})}) workflowID2 := uuid.New() + require.NoError(t, proDriver.GetProExecutor().WorkflowInsertMany(ctx, &driver.WorkflowInsertManyParams{ + IDs: []string{workflowID2.String()}, + Names: []string{workflowID2.String()}, + Schema: schema, + })) _ = testfactory.Job(ctx, t, exec, &testfactory.JobOpts{Metadata: uicommontest.MustMarshalJSON(t, map[string]uuid.UUID{"workflow_id": workflowID2})}) // Verify OSS features endpoint is mounted and returns success even w/ Pro bundle: @@ -103,38 +119,54 @@ func TestProHandlerIntegration(t *testing.T) { func TestProFeaturesEndpointResponse(t *testing.T) { t.Parallel() - ctx := t.Context() - logger := riversharedtest.Logger(t) - - driver := riverpropgxv5.New(riversharedtest.DBPool(ctx, t)) - tx, _ := riverdbtest.TestTxPgxDriver(ctx, t, driver, &riverdbtest.TestTxOpts{DisableSchemaSharing: true}) - client, err := riverpro.NewClient(driver, &riverpro.Config{ - Config: river.Config{ - Logger: logger, - }, - }) - require.NoError(t, err) + type testBundle struct { + client *riverpro.Client[pgx.Tx] + handler http.Handler + logger *slog.Logger + schema string + tx pgx.Tx + } - bundle := NewEndpoints(client, &EndpointsOpts[pgx.Tx]{Tx: &tx}) + setup := func(ctx context.Context, t *testing.T) *testBundle { + t.Helper() - // Reuse the same handler creation pattern as integration tests - handler := func() http.Handler { logger := riversharedtest.Logger(t) - opts := &riverui.HandlerOpts{ + driver := riverpropgxv5.New(riversharedtest.DBPool(ctx, t)) + tx, schema := riverdbtest.TestTxPgxDriver(ctx, t, driver, &riverdbtest.TestTxOpts{DisableSchemaSharing: true}) + client, err := riverpro.NewClient(driver, &riverpro.Config{ + Config: river.Config{ + Logger: logger, + Schema: schema, + }, + }) + require.NoError(t, err) + + bundle := NewEndpoints(client, &EndpointsOpts[pgx.Tx]{Tx: &tx}) + + handler, err := riverui.NewHandler(&riverui.HandlerOpts{ DevMode: true, Endpoints: bundle, LiveFS: false, Logger: logger, - } - h, err := riverui.NewHandler(opts) + }) require.NoError(t, err) - return h - }() + + return &testBundle{ + client: client, + handler: handler, + logger: logger, + schema: schema, + tx: tx, + } + } + + ctx := context.Background() + bundle := setup(ctx, t) recorder := httptest.NewRecorder() - req := httptest.NewRequestWithContext(t.Context(), http.MethodGet, "/api/features", nil) + req := httptest.NewRequestWithContext(ctx, http.MethodGet, "/api/features", nil) - handler.ServeHTTP(recorder, req) + bundle.handler.ServeHTTP(recorder, req) status := recorder.Result().StatusCode require.Equal(t, http.StatusOK, status) From b1c6ca7abea8862753a89376c6b889e7795f14d5 Mon Sep 17 00:00:00 2001 From: Blake Gentry Date: Thu, 7 May 2026 22:32:20 -0500 Subject: [PATCH 3/4] add workflow wait inspection UI Render workflow v2 wait metadata in the diagram and detail views using the final wait terminology instead of the earlier gate model. Tasks now show wait phases, timer and signal evidence, dependency progress, and wait reasons from the backend workflow detail payload. Add the dedicated wait inspector, denser detail layouts, task timeline, and Storybook coverage used to inspect workflow waits across timers, signals, dependencies, and mixed conditions. The diagram tests and factories now cover the wait-aware graph and node states directly. --- .prettierignore | 1 + .storybook/preview.tsx | 19 + CHANGELOG.md | 10 + riverproui/endpoints.go | 2 + .../prohandler/pro_handler_api_endpoints.go | 673 +++++++++++-- .../pro_handler_api_endpoints_test.go | 686 +++++++++++++- riverproui/pro_handler_test.go | 17 +- src/components/TaskStateIcon.tsx | 12 +- src/components/WorkflowDetail.stories.tsx | 741 +++++++++++++++ src/components/WorkflowDetail.test.tsx | 695 ++++++++++++++ src/components/WorkflowDetail.tsx | 891 +++++++++++++++--- src/components/WorkflowGateInspector.model.ts | 542 +++++++++++ .../WorkflowGateInspector.stories.tsx | 563 +++++++++++ src/components/WorkflowGateInspector.test.tsx | 780 +++++++++++++++ src/components/WorkflowGateInspector.tsx | 458 +++++++++ src/components/WorkflowGateInspector.types.ts | 89 ++ .../WorkflowGateInspectorConditions.tsx | 620 ++++++++++++ .../WorkflowGateInspectorDiagnostics.tsx | 125 +++ .../WorkflowGateInspectorSignals.tsx | 262 +++++ .../WorkflowGateInspectorSummary.tsx | 151 +++ src/components/job-search/JobSearch.test.tsx | 41 + .../WorkflowDiagram.stories.tsx | 662 +++++++++++++ .../workflow-diagram/WorkflowDiagram.test.tsx | 144 ++- .../workflow-diagram/WorkflowDiagram.tsx | 81 +- .../WorkflowDiagramEdge.test.tsx | 35 + .../workflow-diagram/WorkflowDiagramEdge.tsx | 12 +- .../WorkflowGateGallery.stories.tsx | 519 ++++++++++ .../workflow-diagram/WorkflowNode.test.tsx | 146 +++ .../workflow-diagram/WorkflowNode.tsx | 458 ++++++++- .../workflow-diagram/workflow-diagram.css | 15 + .../workflowDiagramConstants.ts | 22 +- .../workflowDiagramGraphModel.test.ts | 195 +--- .../workflowDiagramGraphModel.ts | 68 +- .../workflow-diagram/workflowDiagramLayout.ts | 11 +- .../workflowDiagramMergeHints.ts | 13 +- src/services/workflows.test.ts | 254 +++++ src/services/workflows.ts | 670 ++++++++++++- src/test/factories/workflowJob.test.ts | 75 ++ src/test/factories/workflowJob.ts | 113 ++- 39 files changed, 10308 insertions(+), 563 deletions(-) create mode 100644 src/components/WorkflowDetail.stories.tsx create mode 100644 src/components/WorkflowDetail.test.tsx create mode 100644 src/components/WorkflowGateInspector.model.ts create mode 100644 src/components/WorkflowGateInspector.stories.tsx create mode 100644 src/components/WorkflowGateInspector.test.tsx create mode 100644 src/components/WorkflowGateInspector.tsx create mode 100644 src/components/WorkflowGateInspector.types.ts create mode 100644 src/components/WorkflowGateInspectorConditions.tsx create mode 100644 src/components/WorkflowGateInspectorDiagnostics.tsx create mode 100644 src/components/WorkflowGateInspectorSignals.tsx create mode 100644 src/components/WorkflowGateInspectorSummary.tsx create mode 100644 src/components/workflow-diagram/WorkflowDiagram.stories.tsx create mode 100644 src/components/workflow-diagram/WorkflowDiagramEdge.test.tsx create mode 100644 src/components/workflow-diagram/WorkflowGateGallery.stories.tsx create mode 100644 src/components/workflow-diagram/WorkflowNode.test.tsx create mode 100644 src/components/workflow-diagram/workflow-diagram.css create mode 100644 src/services/workflows.test.ts create mode 100644 src/test/factories/workflowJob.test.ts diff --git a/.prettierignore b/.prettierignore index 62af73cd..8c3df916 100644 --- a/.prettierignore +++ b/.prettierignore @@ -11,3 +11,4 @@ src/routeTree.gen.ts # Other files to ignore package-lock.json +.claude/settings.local.json diff --git a/.storybook/preview.tsx b/.storybook/preview.tsx index 09ea840a..82228453 100644 --- a/.storybook/preview.tsx +++ b/.storybook/preview.tsx @@ -2,6 +2,7 @@ import type { Decorator, Preview } from "@storybook/react-vite"; import { withThemeByClassName } from "@storybook/addon-themes"; import { ReactRenderer } from "@storybook/react-vite"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import { createMemoryHistory, createRootRoute, @@ -82,6 +83,23 @@ export const withThemeProvider: Decorator = (StoryFn) => ( ); +const storybookQueryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, +}); + +/** + * Decorator that provides a React Query client for stories that use hooks. + */ +export const withQueryClient: Decorator = (StoryFn) => ( + + + +); + /** * Decorator that sets user settings for stories * Can be overridden per story using parameters.settings @@ -113,6 +131,7 @@ declare module "@storybook/react-vite" { const preview: Preview = { decorators: [ + withQueryClient, withFeatures, withSettings, withRouter, diff --git a/CHANGELOG.md b/CHANGELOG.md index 47db9edd..1501db6a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- Workflow detail: add support for new River Pro workflow features, including signals and timers. [PR #567](https://github.com/riverqueue/riverui/pull/567). + ### Fixed - Workflow detail: add on-canvas zoom controls for click/touch navigation and improve controls styling for dark mode. [PR #524](https://github.com/riverqueue/riverui/pull/524). @@ -17,6 +21,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Job delete actions: require confirmation before deleting a single job or selected jobs in bulk. [Fixes #545](https://github.com/riverqueue/riverui/issues/545). [PR #546](https://github.com/riverqueue/riverui/pull/546). - Workflow detail: show the backend's not-found message instead of crashing when a workflow ID does not exist. [PR #564](https://github.com/riverqueue/riverui/pull/564). - Job detail: render a dedicated `Snoozed` timeline step for scheduled jobs with prior attempts so snoozed jobs no longer show negative wait durations. [PR #565](https://github.com/riverqueue/riverui/pull/565). +- Workflow detail: source wait phases, timers, signal evidence, and task wait reasons from backend workflow wait metadata instead of frontend-derived placeholders. [PR #567](https://github.com/riverqueue/riverui/pull/567). +- Workflow detail: redesign wait inspection around a denser task-side summary, structured term/timer/signal cards, reliable full-node selection, Storybook workflow detail coverage below the diagram, and a tighter details layout that keeps job metadata with the main job card. [PR #567](https://github.com/riverqueue/riverui/pull/567). +- Workflow detail: add a task-signal debugger backed by task-scoped River Pro signal reads while keeping workflow detail wait data summary-only. [PR #567](https://github.com/riverqueue/riverui/pull/567). +- Workflow detail: clarify wait inspection with a compact condition matrix, phase-aware match summaries, explicit signal scopes, condition-type icons, per-term CEL definitions, and denser timer/dependency/signal evidence. [PR #567](https://github.com/riverqueue/riverui/pull/567). +- Workflow detail: replace the flat narrative event list with a task timeline that keeps dependencies in one place, emphasizes wait evidence and durations, avoids showing staged events before a task is actually runnable, and uses a lower-noise milestone hierarchy. [PR #567](https://github.com/riverqueue/riverui/pull/567). +- Workflow detail: remove the not-started wait preview from dependency progress milestones. [PR #567](https://github.com/riverqueue/riverui/pull/567). ## [v0.15.0] - 2026-02-26 diff --git a/riverproui/endpoints.go b/riverproui/endpoints.go index bbcb124b..b9ea67b5 100644 --- a/riverproui/endpoints.go +++ b/riverproui/endpoints.go @@ -195,6 +195,8 @@ func (e *endpoints[TTx]) MountEndpoints(archetype *baseservice.Archetype, logger apiendpoint.Mount(mux, prohandler.NewWorkflowGetEndpoint(bundle), mountOpts), apiendpoint.Mount(mux, prohandler.NewWorkflowListEndpoint(bundle), mountOpts), apiendpoint.Mount(mux, prohandler.NewWorkflowRetryEndpoint(bundle), mountOpts), + apiendpoint.Mount(mux, prohandler.NewWorkflowTaskSignalsEndpoint(bundle), mountOpts), + apiendpoint.Mount(mux, prohandler.NewWorkflowTaskWaitDiagnosticsEndpoint(bundle), mountOpts), ) return endpoints diff --git a/riverproui/internal/prohandler/pro_handler_api_endpoints.go b/riverproui/internal/prohandler/pro_handler_api_endpoints.go index 77322681..137e1366 100644 --- a/riverproui/internal/prohandler/pro_handler_api_endpoints.go +++ b/riverproui/internal/prohandler/pro_handler_api_endpoints.go @@ -282,6 +282,337 @@ func (a *workflowGetEndpoint[TTx]) Execute(ctx context.Context, req *workflowGet }, nil } +// +// workflowTaskSignalsEndpoint +// + +type workflowTaskSignalsEndpoint[TTx any] struct { + ProAPIBundle[TTx] + apiendpoint.Endpoint[workflowTaskSignalsRequest, workflowTaskSignalsResponse] +} + +func NewWorkflowTaskSignalsEndpoint[TTx any](apiBundle ProAPIBundle[TTx]) *workflowTaskSignalsEndpoint[TTx] { + return &workflowTaskSignalsEndpoint[TTx]{ProAPIBundle: apiBundle} +} + +func (*workflowTaskSignalsEndpoint[TTx]) Meta() *apiendpoint.EndpointMeta { + return &apiendpoint.EndpointMeta{ + Pattern: "GET /api/pro/workflows/{id}/task-signals", + StatusCode: http.StatusOK, + } +} + +const ( + workflowTaskSignalsLimitDefault = 20 + workflowTaskSignalsLimitMax = 100 + + workflowTaskSignalScopeEvidence = "evidence" + workflowTaskSignalScopeHistory = "history" +) + +type workflowTaskSignalsRequest struct { + CursorID *int64 `json:"-" validate:"omitempty"` // from ExtractRaw + Desc *bool `json:"-" validate:"omitempty"` // from ExtractRaw + ID string `json:"-" validate:"required"` // from ExtractRaw + Key string `json:"-" validate:"omitempty"` // from ExtractRaw + Limit *int `json:"-" validate:"omitempty,min=1"` // from ExtractRaw + Scope string `json:"-" validate:"omitempty"` // from ExtractRaw + TaskName string `json:"-" validate:"required"` // from ExtractRaw + TermName string `json:"-" validate:"omitempty"` // from ExtractRaw + WorkflowAttempt *int `json:"-" validate:"omitempty,min=1"` // from ExtractRaw +} + +func (req *workflowTaskSignalsRequest) ExtractRaw(r *http.Request) error { + req.ID = r.PathValue("id") + req.TaskName = r.URL.Query().Get("task_name") + req.Key = r.URL.Query().Get("key") + req.Scope = r.URL.Query().Get("scope") + req.TermName = r.URL.Query().Get("term_name") + + if cursorIDStr := r.URL.Query().Get("cursor_id"); cursorIDStr != "" { + cursorID, err := strconv.ParseInt(cursorIDStr, 10, 64) + if err != nil { + return apierror.NewBadRequestf("Couldn't convert `cursor_id` to integer: %s.", err) + } + req.CursorID = &cursorID + } + + if descStr := r.URL.Query().Get("desc"); descStr != "" { + desc, err := strconv.ParseBool(descStr) + if err != nil { + return apierror.NewBadRequestf("Couldn't convert `desc` to bool: %s.", err) + } + req.Desc = &desc + } + + if limitStr := r.URL.Query().Get("limit"); limitStr != "" { + limit, err := strconv.Atoi(limitStr) + if err != nil { + return apierror.NewBadRequestf("Couldn't convert `limit` to integer: %s.", err) + } + req.Limit = &limit + } + + if workflowAttemptStr := r.URL.Query().Get("workflow_attempt"); workflowAttemptStr != "" { + workflowAttempt, err := strconv.Atoi(workflowAttemptStr) + if err != nil { + return apierror.NewBadRequestf("Couldn't convert `workflow_attempt` to integer: %s.", err) + } + req.WorkflowAttempt = &workflowAttempt + } + + return nil +} + +type workflowTaskSignalsResponse struct { + Evidence *workflowTaskWaitEvidence `json:"evidence,omitempty"` + HasMore bool `json:"has_more"` + NextCursorID *int64 `json:"next_cursor_id,omitempty"` + Scope string `json:"scope"` + Signals []*workflowTaskSignal `json:"signals"` +} + +type workflowTaskSignal struct { + Attempt int `json:"attempt"` + CreatedAt time.Time `json:"created_at"` + ID int64 `json:"id"` + Key string `json:"key"` + Payload json.RawMessage `json:"payload"` + Source json.RawMessage `json:"source"` +} + +func (a *workflowTaskSignalsEndpoint[TTx]) Execute(ctx context.Context, req *workflowTaskSignalsRequest) (*workflowTaskSignalsResponse, error) { + workflow, err := a.Client.WorkflowFromExistingID(ctx, req.ID, nil) + if err != nil { + if errors.Is(err, rivertype.ErrNotFound) { + return nil, apierror.NewNotFoundf("Workflow not found: %s.", req.ID) + } + return nil, fmt.Errorf("error loading workflow: %w", err) + } + + params, evidence, scope, err := a.workflowTaskSignalListParams(ctx, req) + if err != nil { + return nil, err + } + + result, err := workflow.SignalListForTask(ctx, req.TaskName, params) + if err != nil { + var signalKeyUndeclaredErr *riverworkflow.SignalKeyUndeclaredError + if errors.As(err, &signalKeyUndeclaredErr) { + return nil, apierror.NewBadRequestf("%s.", signalKeyUndeclaredErr) + } + + var signalTaskDeclaresNoSignalKeysErr *riverworkflow.SignalTaskDeclaresNoSignalKeysError + if errors.As(err, &signalTaskDeclaresNoSignalKeysErr) { + return nil, apierror.NewBadRequestf("%s.", signalTaskDeclaresNoSignalKeysErr) + } + + var signalUnknownTaskErr *riverworkflow.SignalUnknownTaskError + if errors.As(err, &signalUnknownTaskErr) { + return nil, apierror.NewNotFoundf("%s.", signalUnknownTaskErr) + } + + return nil, fmt.Errorf("error loading task workflow signals: %w", err) + } + + return &workflowTaskSignalsResponse{ + Evidence: workflowTaskWaitEvidenceFromInternal(evidence), + HasMore: result.HasMore, + NextCursorID: result.NextCursorID, + Scope: scope, + Signals: sliceutil.Map(result.Signals, workflowTaskSignalFromInternal), + }, nil +} + +func (a *workflowTaskSignalsEndpoint[TTx]) workflowTaskSignalListParams(ctx context.Context, req *workflowTaskSignalsRequest) (*riverpro.WorkflowSignalListForTaskParams, *riverworkflow.WaitEvidence, string, error) { + if req.Key != "" && req.TermName != "" { + return nil, nil, "", apierror.NewBadRequest("Task signal key and term name are mutually exclusive.") + } + + scope := req.Scope + if scope == "" { + scope = workflowTaskSignalScopeHistory + } + if scope != workflowTaskSignalScopeEvidence && scope != workflowTaskSignalScopeHistory { + return nil, nil, "", apierror.NewBadRequestf("Task signal scope %q is invalid.", scope) + } + + key := req.Key + var evidence *riverworkflow.WaitEvidence + var loadedTask *riverpro.WorkflowTaskWithJob + loadTask := func() (*riverpro.WorkflowTaskWithJob, error) { + if loadedTask != nil { + return loadedTask, nil + } + workflow, err := a.Client.WorkflowFromExistingID(ctx, req.ID, nil) + if err != nil { + return nil, fmt.Errorf("error loading workflow: %w", err) + } + task, err := workflow.LoadTask(ctx, req.TaskName) + if err != nil { + if errors.Is(err, rivertype.ErrNotFound) { + return nil, apierror.NewNotFoundf("task %q not found in workflow %q.", req.TaskName, req.ID) + } + return nil, fmt.Errorf("error loading workflow task: %w", err) + } + loadedTask = task + return task, nil + } + + if req.TermName != "" { + task, err := loadTask() + if err != nil { + return nil, nil, "", err + } + if task.Wait == nil { + return nil, nil, "", apierror.NewBadRequestf("task %q declares no wait in workflow %q.", req.TaskName, req.ID) + } + term := task.Wait.Term(req.TermName) + if term == nil { + return nil, nil, "", apierror.NewBadRequestf("task %q wait term %q not found in workflow %q.", req.TaskName, req.TermName, req.ID) + } + if term.Kind != riverworkflow.WaitTermKindSignal { + return nil, nil, "", apierror.NewBadRequestf("task %q wait term %q is %s, not signal.", req.TaskName, req.TermName, term.Kind) + } + key = term.SignalKey + } + + var attempt *int + switch { + case scope == workflowTaskSignalScopeEvidence: + task, err := loadTask() + if err != nil { + return nil, nil, "", err + } + if task.Wait == nil || task.Wait.Evidence == nil { + return nil, nil, "", apierror.NewBadRequest("Wait evidence is unavailable before the wait resolves. Use diagnostics or signal history instead.") + } + evidence = task.Wait.Evidence + evidenceAttempt := evidence.WorkflowAttempt + attempt = &evidenceAttempt + case req.WorkflowAttempt != nil: + attempt = req.WorkflowAttempt + default: + workflow, err := a.DB.WorkflowGetByID(ctx, &riverprodriver.WorkflowGetByIDParams{ + Schema: a.Client.Schema(), + WorkflowID: req.ID, + }) + if err != nil { + return nil, nil, "", fmt.Errorf("error loading workflow: %w", err) + } + currentAttempt := workflow.CurrentAttempt + attempt = ¤tAttempt + } + + return &riverpro.WorkflowSignalListForTaskParams{ + Attempt: attempt, + CursorID: ptrutil.ValOrDefault(req.CursorID, 0), + Desc: ptrutil.ValOrDefault(req.Desc, true), + IncludeAfterResolution: scope == workflowTaskSignalScopeHistory, + Key: key, + Limit: min(ptrutil.ValOrDefault(req.Limit, workflowTaskSignalsLimitDefault), workflowTaskSignalsLimitMax), + }, evidence, scope, nil +} + +// +// workflowTaskWaitDiagnosticsEndpoint +// + +type workflowTaskWaitDiagnosticsEndpoint[TTx any] struct { + ProAPIBundle[TTx] + apiendpoint.Endpoint[workflowTaskWaitDiagnosticsRequest, workflowTaskWaitDiagnosticsResponse] +} + +func NewWorkflowTaskWaitDiagnosticsEndpoint[TTx any](apiBundle ProAPIBundle[TTx]) *workflowTaskWaitDiagnosticsEndpoint[TTx] { + return &workflowTaskWaitDiagnosticsEndpoint[TTx]{ProAPIBundle: apiBundle} +} + +func (*workflowTaskWaitDiagnosticsEndpoint[TTx]) Meta() *apiendpoint.EndpointMeta { + return &apiendpoint.EndpointMeta{ + Pattern: "GET /api/pro/workflows/{id}/task-wait-diagnostics", + StatusCode: http.StatusOK, + } +} + +type workflowTaskWaitDiagnosticsRequest struct { + ID string `json:"-" validate:"required"` // from ExtractRaw + TaskName string `json:"-" validate:"required"` // from ExtractRaw +} + +func (req *workflowTaskWaitDiagnosticsRequest) ExtractRaw(r *http.Request) error { + req.ID = r.PathValue("id") + req.TaskName = r.URL.Query().Get("task_name") + return nil +} + +type workflowTaskWaitDiagnosticsResponse struct { + EvalError string `json:"eval_error,omitempty"` + ExprResult *bool `json:"expr_result,omitempty"` + Inputs workflowWaitInputDiagnostics `json:"inputs"` + InspectedAt time.Time `json:"inspected_at"` + Phase string `json:"phase"` + SignalScanCount int `json:"signal_scan_count"` + SignalScanLimit int `json:"signal_scan_limit"` + Terms []*workflowWaitTermDiagnostic `json:"terms"` + Truncated bool `json:"truncated"` + WorkflowAttempt int `json:"workflow_attempt"` +} + +type workflowWaitInputDiagnostics struct { + Deps []*workflowWaitDepDiagnostic `json:"deps"` + Signals []*workflowWaitSignalDiagnostic `json:"signals"` + Timers []*workflowWaitTimerDiagnostic `json:"timers"` +} + +type workflowWaitDepDiagnostic struct { + Available bool `json:"available"` + FinalizedAt *time.Time `json:"finalized_at,omitempty"` + State string `json:"state,omitempty"` + TaskName string `json:"task_name"` +} + +type workflowWaitSignalDiagnostic struct { + IncludedCount int64 `json:"included_count"` + Key string `json:"key"` + LastID *int64 `json:"last_id,omitempty"` +} + +type workflowWaitTermDiagnostic struct { + LastMatchedID *int64 `json:"last_matched_id,omitempty"` + MatchedCount int64 `json:"matched_count"` + Name string `json:"name"` + RequiredCount int64 `json:"required_count"` + Satisfied bool `json:"satisfied"` +} + +type workflowWaitTimerDiagnostic struct { + FireAt *time.Time `json:"fire_at,omitempty"` + Fired bool `json:"fired"` + Name string `json:"name"` +} + +func (a *workflowTaskWaitDiagnosticsEndpoint[TTx]) Execute(ctx context.Context, req *workflowTaskWaitDiagnosticsRequest) (*workflowTaskWaitDiagnosticsResponse, error) { + workflow, err := a.Client.WorkflowFromExistingID(ctx, req.ID, nil) + if err != nil { + if errors.Is(err, rivertype.ErrNotFound) { + return nil, apierror.NewNotFoundf("Workflow not found: %s.", req.ID) + } + return nil, fmt.Errorf("error loading workflow: %w", err) + } + + result, err := workflow.TaskWaitDiagnostics(ctx, req.TaskName, nil) + if err != nil { + var signalUnknownTaskErr *riverworkflow.SignalUnknownTaskError + if errors.As(err, &signalUnknownTaskErr) { + return nil, apierror.NewNotFoundf("%s.", signalUnknownTaskErr) + } + + return nil, fmt.Errorf("error loading task wait diagnostics: %w", err) + } + + return workflowTaskWaitDiagnosticsFromInternal(result), nil +} + // // workflowListEndpoint // @@ -491,67 +822,101 @@ func internalJobToSerializableJob(internal *rivertype.JobRow) *riverJobSerializa const ( workflowTaskWaitReasonDependencies = "dependencies" - workflowTaskWaitReasonDependenciesAndGate = "dependencies_and_gate" - workflowTaskWaitReasonGate = "gate" + workflowTaskWaitReasonDependenciesAndWait = "dependencies_and_wait" workflowTaskWaitReasonNone = "none" + workflowTaskWaitReasonWait = "wait" ) type workflowTaskSerializable struct { riverJobSerializable Deps []string `json:"deps"` - Gate *workflowTaskGate `json:"gate,omitempty"` IgnoreCancelledDeps bool `json:"ignore_cancelled_deps"` IgnoreDeletedDeps bool `json:"ignore_deleted_deps"` IgnoreDiscardedDeps bool `json:"ignore_discarded_deps"` Name string `json:"name"` StagedAt *time.Time `json:"staged_at,omitempty"` + Wait *workflowTaskWait `json:"wait,omitempty"` WaitReason string `json:"wait_reason"` WorkflowID string `json:"workflow_id"` } -type workflowTaskGate struct { - ActiveAt *time.Time `json:"active_at,omitempty"` - DeclaredSignals []string `json:"declared_signals"` - Enabled bool `json:"enabled"` - ExprCEL string `json:"expr_cel"` - Phase string `json:"phase"` - Satisfaction *workflowTaskGateSatisfaction `json:"satisfaction,omitempty"` - SatisfiedAt *time.Time `json:"satisfied_at,omitempty"` - Timers []*workflowTaskGateTimer `json:"timers"` +type workflowTaskWait struct { + Evidence *workflowTaskWaitEvidence `json:"evidence,omitempty"` + ExprCEL string `json:"expr_cel"` + Inputs workflowTaskWaitInputs `json:"inputs"` + ResolvedAt *time.Time `json:"resolved_at,omitempty"` + Phase string `json:"phase"` + StartedAt *time.Time `json:"started_at,omitempty"` + Summary string `json:"summary,omitempty"` + Terms []*workflowTaskWaitTerm `json:"terms"` } -type workflowTaskGateSatisfaction struct { - AsOf time.Time `json:"as_of"` - Attempt int `json:"attempt"` - Signals []*workflowTaskGateSatisfactionSignal `json:"signals"` - Timers []*workflowTaskGateSatisfactionTimer `json:"timers"` +type workflowTaskWaitTerm struct { + ExprCEL string `json:"expr_cel,omitempty"` + Kind string `json:"kind"` + Label string `json:"label"` + Name string `json:"name"` + Result *workflowTaskWaitTermResult `json:"result,omitempty"` + SignalKey string `json:"signal_key,omitempty"` + TimerName string `json:"timer_name,omitempty"` } -type workflowTaskGateSatisfactionSignal struct { - Count int64 `json:"count"` - Key string `json:"key"` - LastSignalID int64 `json:"last_signal_id"` +type workflowTaskWaitTermResult struct { + LastMatchedID *int64 `json:"last_matched_id,omitempty"` + MatchedCount int64 `json:"matched_count"` + RequiredCount int64 `json:"required_count"` + Satisfied bool `json:"satisfied"` } -type workflowTaskGateSatisfactionTimer struct { - FireAt *time.Time `json:"fire_at,omitempty"` - Fired bool `json:"fired"` - Name string `json:"name"` +type workflowTaskWaitEvidence struct { + EvaluatedAt time.Time `json:"evaluated_at"` + WorkflowAttempt int `json:"workflow_attempt"` +} + +type workflowTaskWaitInputs struct { + Deps []*workflowTaskWaitDepInput `json:"deps"` + Signals []*workflowTaskWaitSignalInput `json:"signals"` + Timers []*workflowTaskWaitTimer `json:"timers"` +} + +type workflowTaskWaitDepInput struct { + Result *workflowTaskWaitDepInputResult `json:"result,omitempty"` + TaskName string `json:"task_name"` +} + +type workflowTaskWaitDepInputResult struct { + Available bool `json:"available"` + FinalizedAt *time.Time `json:"finalized_at,omitempty"` + State string `json:"state,omitempty"` +} + +type workflowTaskWaitSignalInput struct { + Key string `json:"key"` + Result *workflowTaskWaitSignalInputResult `json:"result,omitempty"` } -type workflowTaskGateTimer struct { +type workflowTaskWaitSignalInputResult struct { + IncludedCount int64 `json:"included_count"` + LastIncludedID *int64 `json:"last_included_id,omitempty"` +} + +type workflowTaskWaitTimer struct { After string `json:"after,omitempty"` AfterUS *int64 `json:"after_us,omitempty"` AfterSeconds *float64 `json:"after_seconds,omitempty"` - Anchor *workflowTaskGateTimerAnchor `json:"anchor,omitempty"` + Anchor *workflowTaskWaitTimerAnchor `json:"anchor,omitempty"` FireAt *time.Time `json:"fire_at,omitempty"` - HasAfter bool `json:"has_after"` - HasFireAt bool `json:"has_fire_at"` Name string `json:"name"` + Result *workflowTaskWaitTimerResult `json:"result,omitempty"` } -type workflowTaskGateTimerAnchor struct { +type workflowTaskWaitTimerResult struct { + FireAt *time.Time `json:"fire_at,omitempty"` + Fired bool `json:"fired"` +} + +type workflowTaskWaitTimerAnchor struct { Kind string `json:"kind"` Task string `json:"task,omitempty"` } @@ -561,116 +926,226 @@ func internalWorkflowTaskToSerializableTask(task *riverpro.WorkflowTaskWithJob) return nil } - gateView := task.Gate.View() - return &workflowTaskSerializable{ riverJobSerializable: *internalJobToSerializableJob(task.Job), Deps: task.Deps, - Gate: workflowTaskGateFromInternal(gateView), IgnoreCancelledDeps: task.IgnoreCancelledDeps, IgnoreDeletedDeps: task.IgnoreDeletedDeps, IgnoreDiscardedDeps: task.IgnoreDiscardedDeps, Name: task.Name, StagedAt: workflowTaskStagedAtFromMetadata(task.Job.Metadata), - WaitReason: workflowTaskWaitReasonFromInternal(task.WaitReason), + Wait: workflowTaskWaitFromInternal(task.Wait), + WaitReason: workflowTaskWaitReasonFromInternal(task.PendingReason), WorkflowID: task.WorkflowID, } } -func workflowTaskWaitReasonFromInternal(waitReason riverpro.WorkflowTaskWaitReason) string { +func workflowTaskWaitReasonFromInternal(waitReason riverpro.WorkflowTaskPendingReason) string { switch waitReason { - case riverpro.WorkflowTaskWaitReasonDependenciesAndGate: - return workflowTaskWaitReasonDependenciesAndGate - case riverpro.WorkflowTaskWaitReasonDependencies: + case riverpro.WorkflowTaskPendingReasonDependenciesAndWait: + return workflowTaskWaitReasonDependenciesAndWait + case riverpro.WorkflowTaskPendingReasonDependencies: return workflowTaskWaitReasonDependencies - case riverpro.WorkflowTaskWaitReasonGate: - return workflowTaskWaitReasonGate + case riverpro.WorkflowTaskPendingReasonWait: + return workflowTaskWaitReasonWait default: return workflowTaskWaitReasonNone } } -func workflowTaskGateFromInternal(gateView riverworkflow.GateView) *workflowTaskGate { - if !gateView.Enabled { +func workflowTaskWaitFromInternal(wait *riverworkflow.Wait) *workflowTaskWait { + if wait == nil { return nil } - result := &workflowTaskGate{ - ActiveAt: gateView.ActiveAt, - DeclaredSignals: gateView.DeclaredSignals, - Enabled: gateView.Enabled, - ExprCEL: gateView.ExprCEL, - Phase: gateView.Phase, - SatisfiedAt: gateView.SatisfiedAt, - Timers: make([]*workflowTaskGateTimer, 0, len(gateView.Timers)), + result := &workflowTaskWait{ + Evidence: workflowTaskWaitEvidenceFromInternal(wait.Evidence), + ExprCEL: wait.Expr, + Inputs: workflowTaskWaitInputsFromInternal(wait.Inputs), + ResolvedAt: wait.ResolvedAt, + Phase: wait.Phase.String(), + StartedAt: wait.StartedAt, + Summary: wait.Summary, + Terms: make([]*workflowTaskWaitTerm, 0, len(wait.Terms)), } - for _, timer := range gateView.Timers { - if timer == nil { - continue - } + for i := range wait.Terms { + term := wait.Terms[i] + result.Terms = append(result.Terms, &workflowTaskWaitTerm{ + ExprCEL: term.Expr, + Kind: string(term.Kind), + Label: term.Label, + Name: term.Name, + Result: workflowTaskWaitTermResultFromInternal(term), + SignalKey: term.SignalKey, + TimerName: term.TimerName, + }) + } - serializedTimer := &workflowTaskGateTimer{ - After: timer.After, - AfterUS: timer.AfterUS, - HasAfter: timer.HasAfter, - HasFireAt: timer.HasFireAt, - Name: timer.Name, + return result +} + +func workflowTaskWaitEvidenceFromInternal(evidence *riverworkflow.WaitEvidence) *workflowTaskWaitEvidence { + if evidence == nil { + return nil + } + + return &workflowTaskWaitEvidence{ + EvaluatedAt: evidence.EvaluatedAt, + WorkflowAttempt: evidence.WorkflowAttempt, + } +} + +func workflowTaskWaitInputsFromInternal(inputs riverworkflow.WaitInputState) workflowTaskWaitInputs { + return workflowTaskWaitInputs{ + Deps: sliceutil.Map(inputs.Deps, workflowTaskWaitDepInputFromInternal), + Signals: sliceutil.Map(inputs.Signals, workflowTaskWaitSignalInputFromInternal), + Timers: sliceutil.Map(inputs.Timers, workflowTaskWaitTimerFromInternal), + } +} + +func workflowTaskWaitDepInputFromInternal(dep riverworkflow.WaitDepInput) *workflowTaskWaitDepInput { + result := &workflowTaskWaitDepInput{ + TaskName: dep.TaskName, + } + if dep.Result != nil { + result.Result = &workflowTaskWaitDepInputResult{ + Available: dep.Result.Available, + FinalizedAt: dep.Result.FinalizedAt, + State: dep.Result.State, } + } + return result +} - if timer.HasAfter && timer.AfterUS != nil { - afterSeconds := float64(*timer.AfterUS) / float64(time.Second/time.Microsecond) - serializedTimer.AfterSeconds = &afterSeconds +func workflowTaskWaitSignalInputFromInternal(signal riverworkflow.WaitSignalInput) *workflowTaskWaitSignalInput { + result := &workflowTaskWaitSignalInput{ + Key: signal.Key, + } + if signal.Result != nil { + result.Result = &workflowTaskWaitSignalInputResult{ + IncludedCount: signal.Result.IncludedCount, + LastIncludedID: signal.Result.LastIncludedID, } - if timer.HasFireAt && timer.FireAt != nil { - serializedTimer.FireAt = timer.FireAt + } + return result +} + +func workflowTaskWaitTermResultFromInternal(term riverworkflow.WaitTermStatus) *workflowTaskWaitTermResult { + if term.Result == nil { + return nil + } + + return &workflowTaskWaitTermResult{ + LastMatchedID: term.Result.LastMatchedID, + MatchedCount: term.Result.MatchedCount, + RequiredCount: term.RequiredCount, + Satisfied: term.Result.Satisfied, + } +} + +func workflowTaskWaitTimerFromInternal(timer riverworkflow.WaitTimerInput) *workflowTaskWaitTimer { + serializedTimer := &workflowTaskWaitTimer{Name: timer.Name} + if timer.After != nil { + after := *timer.After + afterUS := after.Microseconds() + serializedTimer.After = after.String() + serializedTimer.AfterUS = &afterUS + afterSeconds := float64(afterUS) / float64(time.Second/time.Microsecond) + serializedTimer.AfterSeconds = &afterSeconds + } + if timer.FireAt != nil { + serializedTimer.FireAt = timer.FireAt + } + if timer.Anchor != nil { + serializedTimer.Anchor = &workflowTaskWaitTimerAnchor{ + Kind: string(timer.Anchor.Kind), + Task: timer.Anchor.TaskName, } - if timer.Anchor != nil { - serializedTimer.Anchor = &workflowTaskGateTimerAnchor{ - Kind: string(timer.Anchor.Kind), - Task: timer.Anchor.Task, - } + } + if timer.Result != nil { + serializedTimer.Result = &workflowTaskWaitTimerResult{ + FireAt: timer.Result.FireAt, + Fired: timer.Result.Fired, } + } + return serializedTimer +} - result.Timers = append(result.Timers, serializedTimer) +func workflowTaskSignalFromInternal(signal riverworkflow.Signal) *workflowTaskSignal { + return &workflowTaskSignal{ + Attempt: signal.Attempt, + CreatedAt: signal.CreatedAt, + ID: signal.ID, + Key: signal.Key, + Payload: signal.Payload, + Source: signal.Source, } +} - if gateView.Satisfaction == nil { - return result +func workflowTaskWaitDiagnosticsFromInternal(result *riverworkflow.WaitDiagnostics) *workflowTaskWaitDiagnosticsResponse { + if result == nil { + return nil } - satisfactionSignals := make([]*workflowTaskGateSatisfactionSignal, 0, len(gateView.Satisfaction.Signals)) - for _, signal := range gateView.Satisfaction.Signals { - if signal == nil { - continue - } - satisfactionSignals = append(satisfactionSignals, &workflowTaskGateSatisfactionSignal{ - Count: signal.Count, - Key: signal.Key, - LastSignalID: signal.LastSignalID, - }) + response := &workflowTaskWaitDiagnosticsResponse{ + ExprResult: result.ExprResult, + Inputs: workflowWaitInputDiagnosticsFromInternal(result.Inputs), + InspectedAt: result.InspectedAt, + Phase: result.Phase.String(), + SignalScanCount: result.SignalScanCount, + SignalScanLimit: result.SignalScanLimit, + Terms: sliceutil.Map(result.Terms, workflowWaitTermDiagnosticFromInternal), + Truncated: result.Truncated, + WorkflowAttempt: result.WorkflowAttempt, + } + if result.EvalError != nil { + response.EvalError = result.EvalError.Error() } + return response +} - satisfactionTimers := make([]*workflowTaskGateSatisfactionTimer, 0, len(gateView.Satisfaction.Timers)) - for _, timer := range gateView.Satisfaction.Timers { - if timer == nil { - continue - } - satisfactionTimers = append(satisfactionTimers, &workflowTaskGateSatisfactionTimer{ - FireAt: timer.FireAt, - Fired: timer.Fired, - Name: timer.Name, - }) +func workflowWaitInputDiagnosticsFromInternal(inputs riverworkflow.WaitDiagnosticsInputs) workflowWaitInputDiagnostics { + return workflowWaitInputDiagnostics{ + Deps: sliceutil.Map(inputs.Deps, workflowWaitDepDiagnosticFromInternal), + Signals: sliceutil.Map(inputs.Signals, workflowWaitSignalDiagnosticFromInternal), + Timers: sliceutil.Map(inputs.Timers, workflowWaitTimerDiagnosticFromInternal), } +} - result.Satisfaction = &workflowTaskGateSatisfaction{ - AsOf: gateView.Satisfaction.AsOf, - Attempt: gateView.Satisfaction.Attempt, - Signals: satisfactionSignals, - Timers: satisfactionTimers, +func workflowWaitDepDiagnosticFromInternal(dep riverworkflow.WaitDepDiagnostic) *workflowWaitDepDiagnostic { + return &workflowWaitDepDiagnostic{ + Available: dep.Available, + FinalizedAt: dep.FinalizedAt, + State: dep.State, + TaskName: dep.TaskName, } +} - return result +func workflowWaitSignalDiagnosticFromInternal(signal riverworkflow.WaitSignalDiagnostic) *workflowWaitSignalDiagnostic { + return &workflowWaitSignalDiagnostic{ + IncludedCount: signal.IncludedCount, + Key: signal.Key, + LastID: signal.LastID, + } +} + +func workflowWaitTermDiagnosticFromInternal(term riverworkflow.WaitTermDiagnostic) *workflowWaitTermDiagnostic { + return &workflowWaitTermDiagnostic{ + LastMatchedID: term.LastMatchedID, + MatchedCount: term.MatchedCount, + Name: term.Name, + RequiredCount: term.RequiredCount, + Satisfied: term.Satisfied, + } +} + +func workflowWaitTimerDiagnosticFromInternal(timer riverworkflow.WaitTimerDiagnostic) *workflowWaitTimerDiagnostic { + return &workflowWaitTimerDiagnostic{ + FireAt: timer.FireAt, + Fired: timer.Fired, + Name: timer.Name, + } } func workflowTaskStagedAtFromMetadata(metadata json.RawMessage) *time.Time { diff --git a/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go b/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go index 9f571dc8..7265225a 100644 --- a/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go +++ b/riverproui/internal/prohandler/pro_handler_api_endpoints_test.go @@ -4,6 +4,8 @@ import ( "context" "encoding/json" "log/slog" + "net/http" + "net/http/httptest" "testing" "time" @@ -28,6 +30,7 @@ import ( "riverqueue.com/riverui/internal/apibundle" "riverqueue.com/riverui/internal/riverinternaltest/testfactory" + "riverqueue.com/riverui/internal/uicommontest" "riverqueue.com/riverui/riverproui/internal/protestfactory" ) @@ -154,7 +157,7 @@ func TestProAPIHandlerWorkflowGet(t *testing.T) { ctx := context.Background() - t.Run("SuccessIncludesGateAndWaitReason", func(t *testing.T) { + t.Run("SuccessIncludesWaitAndWaitReason", func(t *testing.T) { t.Parallel() endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowGetEndpoint) @@ -165,11 +168,12 @@ func TestProAPIHandlerWorkflowGet(t *testing.T) { })) now := time.Now().UTC().Truncate(time.Second) - gateSpec := &riverworkflow.GateSpec{ - Expr: `signals["approval"].size() > 0 || timers["review_sla"].fired`, - Signals: []string{"approval"}, - Timers: []riverworkflow.Timer{ - riverworkflow.TimerAfterWorkflowCreated("review_sla", 30*time.Minute), + waitSpec := &riverworkflow.WaitSpec{ + Expr: "collect_inputs_ready && (approval_received || review_sla)", + Terms: []riverworkflow.WaitTermSpec{ + riverworkflow.WaitTerm("collect_inputs_ready", `deps["collect_inputs"].output.ready == true`).Label("Inputs collected"), + riverworkflow.WaitTermSignal("approval_received", "approval", `payload.approved == true`).Label("Approval received"), + riverworkflow.WaitTermTimer(riverworkflow.TimerAfterWorkflowCreated("review_sla", 30*time.Minute)).Label("Review SLA reached"), }, } @@ -179,9 +183,9 @@ func TestProAPIHandlerWorkflowGet(t *testing.T) { State: ptrutil.Ptr(rivertype.JobStateCompleted), }) - gatedJob := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ - Metadata: workflowMetadataWithGate("wf_get", "await_review", []string{"collect_inputs"}, gateSpec, map[string]any{ - "active_at": now.Format(time.RFC3339Nano), + waitingJob := jobWithSchema(ctx, t, bundle.exec, bundle.schema, &testfactory.JobOpts{ + Metadata: workflowMetadataWithWait("wf_get", "await_review", []string{"collect_inputs"}, waitSpec, map[string]any{ + "started_at": now.Format(time.RFC3339Nano), "timers": map[string]any{ "review_sla": map[string]any{ "fire_at": now.Add(30 * time.Minute).Format(time.RFC3339Nano), @@ -203,34 +207,42 @@ func TestProAPIHandlerWorkflowGet(t *testing.T) { } require.Equal(t, workflowTaskWaitReasonNone, taskByID[dependencyJob.ID].WaitReason) - require.Nil(t, taskByID[dependencyJob.ID].Gate) - - gatedTask := taskByID[gatedJob.ID] - require.NotNil(t, gatedTask) - require.Equal(t, "await_review", gatedTask.Name) - require.Equal(t, "wf_get", gatedTask.WorkflowID) - require.Equal(t, []string{"collect_inputs"}, gatedTask.Deps) - require.Equal(t, workflowTaskWaitReasonGate, gatedTask.WaitReason) - require.NotNil(t, gatedTask.Gate) - require.True(t, gatedTask.Gate.Enabled) - require.Equal(t, "waiting", gatedTask.Gate.Phase) - require.Equal(t, gateSpec.Expr, gatedTask.Gate.ExprCEL) - require.Equal(t, []string{"approval"}, gatedTask.Gate.DeclaredSignals) - require.Len(t, gatedTask.Gate.Timers, 1) - require.NotNil(t, gatedTask.Gate.ActiveAt) - require.Nil(t, gatedTask.Gate.Satisfaction) - - timer := gatedTask.Gate.Timers[0] + require.Nil(t, taskByID[dependencyJob.ID].Wait) + + waitingTask := taskByID[waitingJob.ID] + require.NotNil(t, waitingTask) + require.Equal(t, "await_review", waitingTask.Name) + require.Equal(t, "wf_get", waitingTask.WorkflowID) + require.Equal(t, []string{"collect_inputs"}, waitingTask.Deps) + require.Equal(t, workflowTaskWaitReasonWait, waitingTask.WaitReason) + require.NotNil(t, waitingTask.Wait) + require.Equal(t, "waiting", waitingTask.Wait.Phase) + require.Equal(t, waitSpec.Expr, waitingTask.Wait.ExprCEL) + require.NotNil(t, waitingTask.Wait.StartedAt) + require.Nil(t, waitingTask.Wait.ResolvedAt) + require.Len(t, waitingTask.Wait.Terms, 3) + require.Len(t, waitingTask.Wait.Inputs.Signals, 1) + require.Len(t, waitingTask.Wait.Inputs.Timers, 1) + require.Equal(t, "collect_inputs_ready", waitingTask.Wait.Terms[0].Name) + require.Equal(t, `deps["collect_inputs"].output.ready == true`, waitingTask.Wait.Terms[0].ExprCEL) + require.Equal(t, "approval_received", waitingTask.Wait.Terms[1].Name) + require.Equal(t, `payload.approved == true`, waitingTask.Wait.Terms[1].ExprCEL) + require.Equal(t, "review_sla", waitingTask.Wait.Terms[2].Name) + require.Empty(t, waitingTask.Wait.Terms[2].ExprCEL) + + require.Equal(t, "approval", waitingTask.Wait.Inputs.Signals[0].Key) + require.Nil(t, waitingTask.Wait.Inputs.Signals[0].Result) + + timer := waitingTask.Wait.Inputs.Timers[0] require.Equal(t, "review_sla", timer.Name) require.NotEmpty(t, timer.After) require.NotNil(t, timer.AfterUS) - require.True(t, timer.HasAfter) - require.True(t, timer.HasFireAt) require.NotNil(t, timer.AfterSeconds) require.InDelta(t, 1800, *timer.AfterSeconds, 0.001) require.NotNil(t, timer.Anchor) require.Equal(t, string(riverworkflow.TimerAnchorKindWorkflowCreatedAt), timer.Anchor.Kind) require.NotNil(t, timer.FireAt) + require.Nil(t, timer.Result) }) t.Run("NotFound", func(t *testing.T) { @@ -244,6 +256,409 @@ func TestProAPIHandlerWorkflowGet(t *testing.T) { }) } +func TestProAPIHandlerWorkflowTaskSignals(t *testing.T) { + t.Parallel() + + ctx := context.Background() + + t.Run("SuccessReturnsSignalHistoryForTaskNamesWithSlash", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Key: "approval", + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.False(t, resp.HasMore) + require.Nil(t, resp.NextCursorID) + require.Equal(t, workflowTaskSignalScopeHistory, resp.Scope) + require.Len(t, resp.Signals, 3) + require.Equal(t, []int64{fixture.thirdSignal.ID, fixture.secondSignal.ID, fixture.firstSignal.ID}, []int64{resp.Signals[0].ID, resp.Signals[1].ID, resp.Signals[2].ID}) + }) + + t.Run("EvidenceScopeReturnsIncludedSignals", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Key: "approval", + Scope: workflowTaskSignalScopeEvidence, + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.Equal(t, workflowTaskSignalScopeEvidence, resp.Scope) + require.Len(t, resp.Signals, 2) + require.Equal(t, []int64{fixture.secondSignal.ID, fixture.firstSignal.ID}, []int64{resp.Signals[0].ID, resp.Signals[1].ID}) + require.Equal(t, []int{1, 1}, []int{resp.Signals[0].Attempt, resp.Signals[1].Attempt}) + }) + + t.Run("EvidenceScopeUsesEvidenceAttemptAndBoundary", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + attemptTwoSignal := signalApprovalOnAttempt(ctx, t, bundle, fixture.workflowID, 2) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Key: "approval", + Scope: workflowTaskSignalScopeEvidence, + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.Equal(t, workflowTaskSignalScopeEvidence, resp.Scope) + require.Len(t, resp.Signals, 2) + require.Equal(t, []int64{fixture.secondSignal.ID, fixture.firstSignal.ID}, []int64{resp.Signals[0].ID, resp.Signals[1].ID}) + require.NotContains(t, []int64{resp.Signals[0].ID, resp.Signals[1].ID}, fixture.thirdSignal.ID) + require.NotContains(t, []int64{resp.Signals[0].ID, resp.Signals[1].ID}, attemptTwoSignal.ID) + require.Equal(t, []int{1, 1}, []int{resp.Signals[0].Attempt, resp.Signals[1].Attempt}) + }) + + t.Run("HistoryScopeDefaultsToCurrentWorkflowAttempt", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + attemptTwoSignal := signalApprovalOnAttempt(ctx, t, bundle, fixture.workflowID, 2) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Key: "approval", + Scope: workflowTaskSignalScopeHistory, + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.Equal(t, workflowTaskSignalScopeHistory, resp.Scope) + require.Len(t, resp.Signals, 1) + require.Equal(t, attemptTwoSignal.ID, resp.Signals[0].ID) + require.Equal(t, 2, resp.Signals[0].Attempt) + }) + + t.Run("HistoryScopeUsesRequestedWorkflowAttempt", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + attempt := 1 + attemptTwoSignal := signalApprovalOnAttempt(ctx, t, bundle, fixture.workflowID, 2) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Key: "approval", + Scope: workflowTaskSignalScopeHistory, + TaskName: fixture.taskName, + WorkflowAttempt: &attempt, + }) + require.NoError(t, err) + require.Equal(t, workflowTaskSignalScopeHistory, resp.Scope) + require.Len(t, resp.Signals, 3) + require.Equal(t, []int64{fixture.thirdSignal.ID, fixture.secondSignal.ID, fixture.firstSignal.ID}, []int64{resp.Signals[0].ID, resp.Signals[1].ID, resp.Signals[2].ID}) + require.NotContains(t, []int64{resp.Signals[0].ID, resp.Signals[1].ID, resp.Signals[2].ID}, attemptTwoSignal.ID) + require.Equal(t, []int{1, 1, 1}, []int{resp.Signals[0].Attempt, resp.Signals[1].Attempt, resp.Signals[2].Attempt}) + }) + + t.Run("HistoryScopeWithoutKeyReturnsAllDeclaredSignals", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Scope: workflowTaskSignalScopeHistory, + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.Equal(t, workflowTaskSignalScopeHistory, resp.Scope) + require.Len(t, resp.Signals, 4) + require.Equal(t, + []int64{fixture.thirdSignal.ID, fixture.escalationSignal.ID, fixture.secondSignal.ID, fixture.firstSignal.ID}, + []int64{resp.Signals[0].ID, resp.Signals[1].ID, resp.Signals[2].ID, resp.Signals[3].ID}, + ) + require.Equal(t, []string{"approval", "escalation", "approval", "approval"}, []string{resp.Signals[0].Key, resp.Signals[1].Key, resp.Signals[2].Key, resp.Signals[3].Key}) + require.Equal(t, []int{1, 1, 1, 1}, []int{resp.Signals[0].Attempt, resp.Signals[1].Attempt, resp.Signals[2].Attempt, resp.Signals[3].Attempt}) + }) + + t.Run("EvidenceScopeWithoutKeyReturnsAllIncludedDeclaredSignals", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Scope: workflowTaskSignalScopeEvidence, + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.Equal(t, workflowTaskSignalScopeEvidence, resp.Scope) + require.Len(t, resp.Signals, 3) + require.Equal(t, + []int64{fixture.escalationSignal.ID, fixture.secondSignal.ID, fixture.firstSignal.ID}, + []int64{resp.Signals[0].ID, resp.Signals[1].ID, resp.Signals[2].ID}, + ) + require.Equal(t, []string{"escalation", "approval", "approval"}, []string{resp.Signals[0].Key, resp.Signals[1].Key, resp.Signals[2].Key}) + require.Equal(t, []int{1, 1, 1}, []int{resp.Signals[0].Attempt, resp.Signals[1].Attempt, resp.Signals[2].Attempt}) + }) + + t.Run("TermNameResolvesSignalKey", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Scope: workflowTaskSignalScopeEvidence, + TaskName: fixture.taskName, + TermName: "approval_received", + }) + require.NoError(t, err) + require.Equal(t, workflowTaskSignalScopeEvidence, resp.Scope) + require.Len(t, resp.Signals, 2) + require.Equal(t, []int64{fixture.secondSignal.ID, fixture.firstSignal.ID}, []int64{resp.Signals[0].ID, resp.Signals[1].ID}) + require.Equal(t, []string{"approval", "approval"}, []string{resp.Signals[0].Key, resp.Signals[1].Key}) + }) + + t.Run("RejectsKeyAndTermNameTogether", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Key: "approval", + TaskName: fixture.taskName, + TermName: "approval_received", + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "mutually exclusive") + }) + + t.Run("RejectsInvalidScope", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Scope: "everything", + TaskName: fixture.taskName, + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "scope") + }) + + t.Run("RejectsUnknownTermName", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + TaskName: fixture.taskName, + TermName: "missing_term", + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "not found") + }) + + t.Run("RejectsNonSignalTermName", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + TaskName: fixture.timerOnlyTaskName, + TermName: "review_timeout", + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "not signal") + }) + + t.Run("EvidenceScopeWithoutEvidenceReturnsBadRequest", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Scope: workflowTaskSignalScopeEvidence, + TaskName: fixture.unresolvedTaskName, + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "Wait evidence is unavailable") + }) + + t.Run("WithoutKeyPaginationIsOrderedAcrossDeclaredSignalKeys", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + limit := 2 + + firstPage, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Limit: &limit, + Scope: workflowTaskSignalScopeHistory, + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.True(t, firstPage.HasMore) + require.NotNil(t, firstPage.NextCursorID) + require.Len(t, firstPage.Signals, 2) + require.Equal(t, + []int64{fixture.thirdSignal.ID, fixture.escalationSignal.ID}, + []int64{firstPage.Signals[0].ID, firstPage.Signals[1].ID}, + ) + + secondPage, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + CursorID: firstPage.NextCursorID, + ID: fixture.workflowID, + Limit: &limit, + Scope: workflowTaskSignalScopeHistory, + TaskName: fixture.taskName, + }) + require.NoError(t, err) + require.False(t, secondPage.HasMore) + require.Len(t, secondPage.Signals, 2) + require.Equal(t, + []int64{fixture.secondSignal.ID, fixture.firstSignal.ID}, + []int64{secondPage.Signals[0].ID, secondPage.Signals[1].ID}, + ) + }) + + t.Run("UnknownWorkflowReturnsNotFound", func(t *testing.T) { + t.Parallel() + + endpoint, _ := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: "missing-workflow", + TaskName: "await/review", + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "Workflow not found") + }) + + t.Run("UnknownTaskReturnsNotFound", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + TaskName: "missing/task", + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "unknown task") + }) + + t.Run("UndeclaredKeyReturnsBadRequest", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + Key: "missing_key", + TaskName: fixture.taskName, + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "not declared") + }) + + t.Run("TaskWithNoDeclaredSignalKeysReturnsBadRequest", func(t *testing.T) { + t.Parallel() + + endpoint, bundle := setupEndpoint(ctx, t, NewWorkflowTaskSignalsEndpoint) + fixture := setupWorkflowTaskSignalsFixture(ctx, t, bundle) + + resp, err := apitest.InvokeHandler(ctx, endpoint.Execute, testMountOpts(t), &workflowTaskSignalsRequest{ + ID: fixture.workflowID, + TaskName: fixture.timerOnlyTaskName, + }) + require.Nil(t, resp) + require.Error(t, err) + require.Contains(t, err.Error(), "declares no signal keys") + }) +} + +func TestWorkflowTaskSignalsRequestExtractRaw(t *testing.T) { + t.Parallel() + + t.Run("ParsesAndCapsLimit", func(t *testing.T) { + t.Parallel() + + req := &workflowTaskSignalsRequest{} + httpReq := httptest.NewRequestWithContext(t.Context(), http.MethodGet, "/api/pro/workflows/wf/task-signals?task_name=await%2Freview&key=approval&cursor_id=42&desc=false&limit=200&scope=history&workflow_attempt=2", nil) + httpReq.SetPathValue("id", "wf") + + err := req.ExtractRaw(httpReq) + require.NoError(t, err) + require.Equal(t, "wf", req.ID) + require.Equal(t, "await/review", req.TaskName) + require.Equal(t, "approval", req.Key) + require.NotNil(t, req.CursorID) + require.Equal(t, int64(42), *req.CursorID) + require.NotNil(t, req.Desc) + require.False(t, *req.Desc) + require.NotNil(t, req.Limit) + require.Equal(t, 200, *req.Limit) + require.Equal(t, workflowTaskSignalScopeHistory, req.Scope) + require.NotNil(t, req.WorkflowAttempt) + require.Equal(t, 2, *req.WorkflowAttempt) + }) + + t.Run("ReturnsBadRequestForInvalidQueryValues", func(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + url string + }{ + {name: "CursorID", url: "/api/pro/workflows/wf/task-signals?task_name=await_review&cursor_id=nope"}, + {name: "Desc", url: "/api/pro/workflows/wf/task-signals?task_name=await_review&desc=nope"}, + {name: "Limit", url: "/api/pro/workflows/wf/task-signals?task_name=await_review&limit=nope"}, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + t.Parallel() + + req := &workflowTaskSignalsRequest{} + httpReq := httptest.NewRequestWithContext(t.Context(), http.MethodGet, testCase.url, nil) + httpReq.SetPathValue("id", "wf") + + err := req.ExtractRaw(httpReq) + require.Error(t, err) + }) + } + }) +} + func TestProAPIHandlerWorkflowRetry(t *testing.T) { t.Parallel() @@ -411,7 +826,7 @@ func workflowMetadata(workflowID, taskName string, deps []string) []byte { return buf } -func workflowMetadataWithGate(workflowID, taskName string, deps []string, gate *riverworkflow.GateSpec, gateState map[string]any) []byte { +func workflowMetadataWithWait(workflowID, taskName string, deps []string, wait *riverworkflow.WaitSpec, waitState map[string]any) []byte { if deps == nil { deps = []string{} } @@ -421,11 +836,11 @@ func workflowMetadataWithGate(workflowID, taskName string, deps []string, gate * "task": taskName, "deps": deps, } - if gate != nil { - meta["river:workflow_gate"] = gate + if wait != nil { + meta["river:workflow_wait"] = persistedWaitSpec(wait) } - if gateState != nil { - meta["river:workflow_gate_state"] = gateState + if waitState != nil { + meta["river:workflow_wait_state"] = waitState } buf, err := json.Marshal(meta) @@ -434,3 +849,206 @@ func workflowMetadataWithGate(workflowID, taskName string, deps []string, gate * } return buf } + +func persistedWaitSpec(wait *riverworkflow.WaitSpec) map[string]any { + if wait == nil { + return nil + } + + raw, err := json.Marshal(wait) + if err != nil { + panic(err) + } + + var persisted map[string]any + if err := json.Unmarshal(raw, &persisted); err != nil { + panic(err) + } + + return persisted +} + +type workflowTaskSignalsFixture struct { + escalationSignal *riverpro.WorkflowSignalResult + firstSignal *riverpro.WorkflowSignalResult + secondSignal *riverpro.WorkflowSignalResult + taskName string + thirdSignal *riverpro.WorkflowSignalResult + timerOnlyTaskName string + unresolvedTaskName string + workflowID string +} + +func setupWorkflowTaskSignalsFixture(ctx context.Context, t *testing.T, bundle *setupEndpointTestBundle) *workflowTaskSignalsFixture { + t.Helper() + + workflowID := "wf_task_signals_" + time.Now().UTC().Format("150405.000000000") + + waitSpec := &riverworkflow.WaitSpec{ + Expr: "approval_received || escalation_received", + Terms: []riverworkflow.WaitTermSpec{ + riverworkflow.WaitTermSignal("approval_received", "approval", "true").Label("Approval received"), + riverworkflow.WaitTermSignal("escalation_received", "escalation", "true").Label("Escalation received"), + }, + } + + taskName := "await/review" + timerOnlyTaskName := "timer/only" + unresolvedTaskName := "await/unresolved" + workflow := bundle.client.NewWorkflow(&riverpro.WorkflowOpts{ + ID: workflowID, + Name: "wf_task_signals", + }) + workflow.Add("collect_inputs", uicommontest.NoOpArgs{Name: "collect"}, nil, nil) + workflow.Add(taskName, uicommontest.NoOpArgs{Name: "gated"}, nil, &riverpro.WorkflowTaskOpts{ + Deps: []string{"collect_inputs"}, + Wait: waitSpec, + }) + workflow.Add(unresolvedTaskName, uicommontest.NoOpArgs{Name: "unresolved"}, nil, &riverpro.WorkflowTaskOpts{ + Deps: []string{"collect_inputs"}, + Wait: waitSpec, + }) + workflow.Add(timerOnlyTaskName, uicommontest.NoOpArgs{Name: "timer"}, nil, &riverpro.WorkflowTaskOpts{ + Deps: []string{"collect_inputs"}, + Wait: &riverworkflow.WaitSpec{ + Expr: "review_timeout", + Terms: []riverworkflow.WaitTermSpec{ + riverworkflow.WaitTermTimer(riverworkflow.TimerAfterWorkflowCreated("review_timeout", 5*time.Minute)), + }, + }, + }) + + result, err := workflow.Prepare(ctx) + require.NoError(t, err) + _, err = bundle.client.InsertMany(ctx, result.Jobs) + require.NoError(t, err) + + require.NoError(t, bundle.exec.WorkflowInsertMany(ctx, &driver.WorkflowInsertManyParams{ + IDs: []string{workflowID}, + Names: []string{"wf_task_signals"}, + Schema: bundle.schema, + })) + workflowTable := pgx.Identifier{bundle.schema, "river_workflow"}.Sanitize() + jobsTable := pgx.Identifier{bundle.schema, "river_job"}.Sanitize() + + err = bundle.exec.Exec(ctx, "UPDATE "+workflowTable+" SET current_attempt = 1 WHERE id = $1", workflowID) + require.NoError(t, err) + + var waitingJobID int64 + err = bundle.exec.QueryRow(ctx, ` + SELECT id + FROM `+jobsTable+` + WHERE metadata->>'workflow_id' = $1 + AND metadata->>'task' = $2 + `, workflowID, taskName).Scan(&waitingJobID) + require.NoError(t, err) + + firstSignal, err := workflow.Signal(ctx, "approval", map[string]any{"approved_by": "lead"}, &riverpro.WorkflowSignalOpts{ + Source: map[string]any{"actor": "lead", "kind": "ui"}, + }) + require.NoError(t, err) + + secondSignal, err := workflow.Signal(ctx, "approval", map[string]any{"approved_by": "manager"}, &riverpro.WorkflowSignalOpts{ + Source: map[string]any{"actor": "manager", "kind": "ui"}, + }) + require.NoError(t, err) + + escalationSignal, err := workflow.Signal(ctx, "escalation", map[string]any{"escalated_to": "team_lead"}, &riverpro.WorkflowSignalOpts{ + Source: map[string]any{"actor": "scheduler", "kind": "automation"}, + }) + require.NoError(t, err) + + resultTime := time.Now().UTC().Add(-2 * time.Minute) + metadata := workflowMetadataWithWait(workflowID, taskName, nil, waitSpec, map[string]any{ + "started_at": time.Now().UTC().Add(-10 * time.Minute).Format(time.RFC3339Nano), + "resolved_at": resultTime.Format(time.RFC3339Nano), + "result": map[string]any{ + "as_of": resultTime.Format(time.RFC3339Nano), + "attempt": 1, + "summary": "Approval received", + "signals": map[string]any{ + "approval": map[string]any{ + "included_count": 2, + "last_included_id": secondSignal.ID, + }, + "escalation": map[string]any{ + "included_count": 1, + "last_included_id": escalationSignal.ID, + }, + }, + "terms": []map[string]any{{ + "kind": "signal", + "label": "Approval received", + "last_matched_id": secondSignal.ID, + "matched_count": 2, + "name": "approval_received", + "required_count": 1, + "satisfied": true, + "signal_key": "approval", + }, { + "kind": "signal", + "label": "Escalation received", + "last_matched_id": escalationSignal.ID, + "matched_count": 1, + "name": "escalation_received", + "required_count": 1, + "satisfied": true, + "signal_key": "escalation", + }}, + "timers": map[string]any{}, + }, + }) + err = bundle.exec.Exec(ctx, ` + UPDATE `+jobsTable+` + SET metadata = jsonb_set(metadata, '{river:workflow_wait_state}', $1::jsonb, true) + WHERE id = $2 + `, metadataFieldRaw(t, metadata, "river:workflow_wait_state"), waitingJobID) + require.NoError(t, err) + + thirdSignal, err := workflow.Signal(ctx, "approval", map[string]any{"approved_by": "director"}, &riverpro.WorkflowSignalOpts{ + Source: map[string]any{"actor": "director", "kind": "ui"}, + }) + require.NoError(t, err) + + return &workflowTaskSignalsFixture{ + escalationSignal: escalationSignal, + firstSignal: firstSignal, + secondSignal: secondSignal, + taskName: taskName, + thirdSignal: thirdSignal, + timerOnlyTaskName: timerOnlyTaskName, + unresolvedTaskName: unresolvedTaskName, + workflowID: workflowID, + } +} + +func signalApprovalOnAttempt(ctx context.Context, t *testing.T, bundle *setupEndpointTestBundle, workflowID string, attempt int) *riverpro.WorkflowSignalResult { + t.Helper() + + workflowTable := pgx.Identifier{bundle.schema, "river_workflow"}.Sanitize() + err := bundle.exec.Exec(ctx, "UPDATE "+workflowTable+" SET current_attempt = $1 WHERE id = $2", attempt, workflowID) + require.NoError(t, err) + + workflow, err := bundle.client.WorkflowFromExistingID(ctx, workflowID, nil) + require.NoError(t, err) + + signal, err := workflow.Signal(ctx, "approval", map[string]any{"approved_by": "retry"}, &riverpro.WorkflowSignalOpts{ + Attempt: &attempt, + Source: map[string]any{"actor": "retry", "kind": "ui"}, + }) + require.NoError(t, err) + + return signal +} + +func metadataFieldRaw(t *testing.T, metadata []byte, key string) []byte { + t.Helper() + + var values map[string]json.RawMessage + require.NoError(t, json.Unmarshal(metadata, &values)) + + value, ok := values[key] + require.True(t, ok) + + return value +} diff --git a/riverproui/pro_handler_test.go b/riverproui/pro_handler_test.go index 624e70da..cf6f867a 100644 --- a/riverproui/pro_handler_test.go +++ b/riverproui/pro_handler_test.go @@ -116,7 +116,7 @@ func TestProHandlerIntegration(t *testing.T) { handlertest.RunIntegrationTest(t, createClient, createBundle, createHandler, testRunner) } -func TestProFeaturesEndpointResponse(t *testing.T) { +func TestProMountedEndpointResponses(t *testing.T) { t.Parallel() type testBundle struct { @@ -194,4 +194,19 @@ func TestProFeaturesEndpointResponse(t *testing.T) { "workflow_queries": true, // static } require.Equal(t, expectedExtensions, resp.Extensions) + + recorder = httptest.NewRecorder() + req = httptest.NewRequestWithContext( + ctx, + http.MethodGet, + "/api/pro/workflows/missing-workflow/task-wait-diagnostics?task_name=await_review", + nil, + ) + req.Header.Set("Accept", "*/*") + + bundle.handler.ServeHTTP(recorder, req) + + require.Equal(t, http.StatusNotFound, recorder.Result().StatusCode) + require.Contains(t, recorder.Header().Get("Content-Type"), "application/json") + require.Contains(t, recorder.Body.String(), "Workflow not found") } diff --git a/src/components/TaskStateIcon.tsx b/src/components/TaskStateIcon.tsx index b88d0142..00a924e7 100644 --- a/src/components/TaskStateIcon.tsx +++ b/src/components/TaskStateIcon.tsx @@ -4,10 +4,10 @@ import { ClockIcon, EllipsisHorizontalCircleIcon, ExclamationTriangleIcon, - PauseCircleIcon, + PlayCircleIcon, QuestionMarkCircleIcon, XCircleIcon, -} from "@heroicons/react/20/solid"; +} from "@heroicons/react/24/outline"; import { JobState } from "@services/types"; import { capitalize } from "@utils/string"; import clsx from "clsx"; @@ -24,8 +24,8 @@ export const TaskStateIcon = ({ switch (jobState) { case JobState.Available: return ( - ); @@ -60,14 +60,14 @@ export const TaskStateIcon = ({ case JobState.Retryable: return ( ); case JobState.Running: return ( ); diff --git a/src/components/WorkflowDetail.stories.tsx b/src/components/WorkflowDetail.stories.tsx new file mode 100644 index 00000000..90becb8a --- /dev/null +++ b/src/components/WorkflowDetail.stories.tsx @@ -0,0 +1,741 @@ +import type { + Workflow, + WorkflowTask, + WorkflowTaskSignal, + WorkflowTaskWait, + WorkflowTaskWaitDiagnostics, +} from "@services/workflows"; +import type { Meta, StoryObj } from "@storybook/react-vite"; + +import { JobState } from "@services/types"; +import { workflowJobFactory } from "@test/factories/workflowJob"; +import { createFeatures } from "@test/utils/features"; +import { add, sub } from "date-fns"; +import { useState } from "react"; + +import type { + TaskSignalLoader, + TaskWaitDiagnosticsLoader, +} from "./WorkflowGateInspector"; + +import WorkflowDetail from "./WorkflowDetail"; + +const storyFeatures = createFeatures({ + hasWorkflows: true, + workflowQueries: true, +}); + +const storyNow = new Date(); + +const storyTimeAgo = (seconds: number): Date => sub(storyNow, { seconds }); + +const pendingTaskTiming = ( + createdSecondsAgo: number, +): Partial => { + const createdAt = storyTimeAgo(createdSecondsAgo); + + return { + createdAt, + scheduledAt: createdAt, + stagedAt: createdAt, + }; +}; + +const buildTask = ( + task: string, + overrides: Partial = {}, +): WorkflowTask => { + return { + ...workflowJobFactory.build({ + deps: overrides.deps ?? [], + ...(overrides.attemptedAt !== undefined + ? { attemptedAt: overrides.attemptedAt } + : {}), + ...(overrides.createdAt !== undefined + ? { createdAt: overrides.createdAt } + : {}), + ...(overrides.finalizedAt !== undefined + ? { finalizedAt: overrides.finalizedAt } + : {}), + id: overrides.id, + ...(overrides.scheduledAt !== undefined + ? { scheduledAt: overrides.scheduledAt } + : {}), + ...(overrides.stagedAt !== undefined + ? { stagedAt: overrides.stagedAt } + : {}), + state: overrides.state, + task, + wait: overrides.wait, + waitReason: overrides.waitReason, + workflowID: overrides.workflowID, + workflowStagedAt: overrides.stagedAt, + }), + ...overrides, + }; +}; + +const buildWait = (overrides: Partial): WorkflowTaskWait => { + return { + exprCel: "", + inputs: { deps: [], signals: [], timers: [] }, + phase: "waiting", + terms: [], + ...overrides, + }; +}; + +const buildWaitDiagnostics = ( + wait: WorkflowTaskWait, + overrides: Partial = {}, +): WorkflowTaskWaitDiagnostics => { + return { + inputs: { + deps: wait.inputs.deps.map((dep) => ({ + available: dep.result?.available ?? false, + finalizedAt: dep.result?.finalizedAt, + state: dep.result?.state, + taskName: dep.taskName, + })), + signals: wait.inputs.signals.map((signal) => ({ + includedCount: signal.result?.includedCount ?? 0, + key: signal.key, + lastID: signal.result?.lastIncludedID, + })), + timers: wait.inputs.timers.map((timer) => ({ + fireAt: timer.result?.fireAt ?? timer.fireAt, + fired: timer.result?.fired ?? false, + name: timer.name, + })), + }, + inspectedAt: storyTimeAgo(5), + phase: wait.phase, + signalScanCount: wait.inputs.signals.length, + signalScanLimit: 10000, + terms: wait.terms.map((term) => ({ + lastMatchedID: term.result?.lastMatchedID, + matchedCount: term.result?.matchedCount ?? 0, + name: term.name, + requiredCount: term.result?.requiredCount ?? 0, + satisfied: term.result?.satisfied ?? false, + })), + truncated: false, + workflowAttempt: 1, + ...overrides, + }; +}; + +const buildWorkflow = (id: string, name: string, tasks: WorkflowTask[]) => ({ + id, + name, + tasks, +}); + +type StorySignalFixtures = Record< + string, + Partial> +>; + +const buildSignal = ({ + attempt = 1, + createdAt, + id, + key, + payload, + source, +}: { + attempt?: number; + createdAt: Date; + id: bigint; + key: string; + payload: unknown; + source: unknown; +}): WorkflowTaskSignal => ({ + attempt, + createdAt, + id, + key, + payload, + source, +}); + +const compareSignalsDesc = ( + leftSignal: WorkflowTaskSignal, + rightSignal: WorkflowTaskSignal, +): number => { + if (leftSignal.id > rightSignal.id) return -1; + if (leftSignal.id < rightSignal.id) return 1; + return 0; +}; + +const createStorySignalLoader = ( + fixtures: StorySignalFixtures, +): TaskSignalLoader => { + return async ({ cursorID, key, limit = 20, scope }) => { + const scopeKey = scope ?? "history"; + const signals = key + ? (fixtures[key]?.[scopeKey] ?? []) + : Object.values(fixtures).flatMap((fixture) => fixture[scopeKey] ?? []); + const sortedSignals = [...signals].sort(compareSignalsDesc); + const cursorBigInt = cursorID === undefined ? undefined : BigInt(cursorID); + const cursorIndex = + cursorBigInt === undefined + ? -1 + : sortedSignals.findIndex((signal) => signal.id === cursorBigInt); + const startIndex = cursorIndex >= 0 ? cursorIndex + 1 : 0; + const page = sortedSignals.slice(startIndex, startIndex + limit); + const hasMore = startIndex + page.length < sortedSignals.length; + const lastSignal = page[page.length - 1]; + + return { + hasMore, + nextCursorID: hasMore ? lastSignal?.id : undefined, + scope: scopeKey, + signals: page, + }; + }; +}; + +const storySignalLoader = createStorySignalLoader({ + "approval.received": { + evidence: [ + buildSignal({ + createdAt: storyTimeAgo(120), + id: 9123n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + }, + }), + ], + history: [ + buildSignal({ + createdAt: storyTimeAgo(120), + id: 9123n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + }, + }), + ], + }, +}); + +const storyWaitDiagnostics = new Map(); + +const storyWaitDiagnosticsKey = (workflowID: string, taskName: string) => + `${workflowID}:${taskName}`; + +const registerStoryWaitDiagnostics = ( + task: WorkflowTask, + overrides: Partial = {}, +) => { + if (task.wait === undefined) return; + + storyWaitDiagnostics.set( + storyWaitDiagnosticsKey(task.workflowID, task.name), + buildWaitDiagnostics(task.wait, overrides), + ); +}; + +const storyWaitDiagnosticsLoader: TaskWaitDiagnosticsLoader = async ({ + taskName, + workflowID, +}) => { + const diagnostics = storyWaitDiagnostics.get( + storyWaitDiagnosticsKey(workflowID, taskName), + ); + + if (diagnostics === undefined) { + throw new Error("No Storybook waiting diagnostics fixture."); + } + + return diagnostics; +}; + +const buildWaitingWorkflow = (): Workflow => { + const classify = { + ...buildTask("classify_intake", { + finalizedAt: storyTimeAgo(360), + id: 1001n, + state: JobState.Completed, + waitReason: "none", + }), + }; + + const reviewStartedAt = storyTimeAgo(150); + const review = buildTask("await_review", { + ...pendingTaskTiming(210), + deps: ["classify_intake"], + id: 1002n, + state: JobState.Pending, + wait: buildWait({ + exprCel: "approval_received || review_timeout_reached", + inputs: { + deps: [], + signals: [ + { + key: "approval.received", + }, + ], + timers: [ + { + afterSeconds: 300, + anchor: { kind: "wait_started_at" }, + name: "review_timeout", + }, + ], + }, + phase: "waiting", + startedAt: reviewStartedAt, + summary: "Waiting for human approval or review timeout.", + terms: [ + { + kind: "signal", + label: "Human approval received", + name: "approval_received", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "approval.received", + }, + { + kind: "timer", + label: "Review SLA timeout reached", + name: "review_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], + }), + waitReason: "wait", + }); + registerStoryWaitDiagnostics(review, { exprResult: false }); + + const send = buildTask("send_response", { + ...pendingTaskTiming(180), + deps: ["await_review"], + id: 1003n, + state: JobState.Pending, + waitReason: "dependencies", + }); + + return buildWorkflow("wf-story-wait-blocked", "Customer Intake Workflow", [ + classify, + review, + send, + ]); +}; + +const buildDependenciesProgressingWorkflow = (): Workflow => { + const fetchAccount = { + ...buildTask("fetch_account_context", { + finalizedAt: storyTimeAgo(180), + id: 3001n, + state: JobState.Completed, + waitReason: "none", + }), + }; + + const fetchEntitlements = buildTask("fetch_entitlements", { + attemptedAt: storyTimeAgo(90), + id: 3002n, + state: JobState.Running, + waitReason: "none", + }); + + const fetchCharges = buildTask("fetch_recent_charges", { + ...pendingTaskTiming(150), + id: 3003n, + state: JobState.Pending, + waitReason: "none", + }); + + const promote = buildTask("promote_global", { + ...pendingTaskTiming(120), + deps: [ + "fetch_account_context", + "fetch_entitlements", + "fetch_recent_charges", + ], + id: 3004n, + state: JobState.Pending, + wait: buildWait({ + exprCel: "approval_received || launch_timeout_reached", + inputs: { + deps: [], + signals: [ + { + key: "approval.received", + }, + ], + timers: [ + { + afterSeconds: 600, + anchor: { kind: "task_finalized_at", task: "fetch_recent_charges" }, + name: "launch_timeout", + }, + ], + }, + phase: "not_started", + summary: "Waits for approval or timeout after dependency checks finish.", + terms: [ + { + kind: "signal", + label: "Approval received", + name: "approval_received", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "approval.received", + }, + { + kind: "timer", + label: "Launch timeout reached", + name: "launch_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], + }), + waitReason: "dependencies_and_wait", + }); + registerStoryWaitDiagnostics(promote); + + return buildWorkflow( + "wf-story-dependencies-progressing", + "Dependencies Still Progressing", + [fetchAccount, fetchEntitlements, fetchCharges, promote], + ); +}; + +const buildResolvedBySignalWorkflow = (): Workflow => { + const collect = { + ...buildTask("collect_inputs", { + finalizedAt: storyTimeAgo(190), + id: 2001n, + state: JobState.Completed, + waitReason: "none", + }), + }; + const safetyReview = { + ...buildTask("safety_review", { + finalizedAt: storyTimeAgo(160), + id: 2002n, + state: JobState.Completed, + waitReason: "none", + }), + }; + const waitStartedAt = storyTimeAgo(120); + const waitResolvedAt = storyTimeAgo(90); + const taskStagedAt = storyTimeAgo(80); + const taskAttemptedAt = storyTimeAgo(70); + const taskFinalizedAt = storyTimeAgo(58); + const approve = { + ...buildTask("await_review", { + attemptedAt: taskAttemptedAt, + deps: ["collect_inputs", "safety_review"], + finalizedAt: taskFinalizedAt, + id: 2003n, + stagedAt: taskStagedAt, + state: JobState.Completed, + wait: buildWait({ + evidence: { + evaluatedAt: waitResolvedAt, + workflowAttempt: 1, + }, + exprCel: "approval_received || review_timeout_reached", + inputs: { + deps: [], + signals: [ + { + key: "approval.received", + result: { + includedCount: 1, + lastIncludedID: 9123n, + }, + }, + ], + timers: [ + { + afterSeconds: 300, + anchor: { kind: "wait_started_at" }, + fireAt: add(waitStartedAt, { seconds: 300 }), + name: "review_timeout", + }, + ], + }, + phase: "resolved", + resolvedAt: waitResolvedAt, + startedAt: waitStartedAt, + summary: "Human approval received", + terms: [ + { + kind: "signal", + label: "Human approval received", + name: "approval_received", + result: { + lastMatchedID: 9123n, + matchedCount: 1, + requiredCount: 1, + satisfied: true, + }, + signalKey: "approval.received", + }, + { + kind: "timer", + label: "Review SLA timeout reached", + name: "review_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], + }), + waitReason: "none", + }), + }; + + return buildWorkflow("wf-story-wait-resolved-signal", "Approval Workflow", [ + collect, + safetyReview, + approve, + ]); +}; + +const buildResolvedByTimeoutWorkflow = (): Workflow => { + const canaryChecks = [ + "canary_check_cost", + "canary_check_errors", + "canary_check_latency", + "canary_check_safety", + "canary_check_support_quality", + "deploy_canary", + ].map((taskName, index) => ({ + ...buildTask(taskName, { + finalizedAt: storyTimeAgo(300 - index * 20), + id: BigInt(4001 + index), + state: JobState.Completed, + waitReason: "none", + }), + })); + + const waitStartedAt = storyTimeAgo(180); + const waitResolvedAt = storyTimeAgo(109); + const taskStagedAt = storyTimeAgo(109); + const promote = { + ...buildTask("promote_global", { + deps: canaryChecks.map((task) => task.name), + finalizedAt: storyTimeAgo(103), + id: 4010n, + stagedAt: taskStagedAt, + state: JobState.Completed, + wait: buildWait({ + evidence: { + evaluatedAt: waitResolvedAt, + workflowAttempt: 1, + }, + exprCel: "release_canary_metrics_received || canary_timeout_reached", + inputs: { + deps: [], + signals: [ + { + key: "release_canary_metrics", + }, + ], + timers: [ + { + afterSeconds: 71, + anchor: { kind: "task_finalized_at", task: "deploy_canary" }, + fireAt: waitResolvedAt, + name: "canary_timeout", + }, + ], + }, + phase: "resolved", + resolvedAt: waitResolvedAt, + startedAt: waitStartedAt, + summary: "Canary timeout reached", + terms: [ + { + kind: "signal", + label: "Release canary metrics received", + name: "release_canary_metrics_received", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "release_canary_metrics", + }, + { + kind: "timer", + label: "Canary timeout reached", + name: "canary_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + ], + }), + waitReason: "none", + }), + }; + + return buildWorkflow( + "wf-story-wait-resolved-timeout", + "Timeout-Resolved Workflow", + [...canaryChecks, promote], + ); +}; + +const buildDirectWaitWorkflow = (): Workflow => { + const startedAt = sub(storyNow, { minutes: 12 }); + const task = buildTask("await_external_signal", { + id: 5001n, + state: JobState.Pending, + wait: buildWait({ + exprCel: "customer_acknowledged || escalation_timeout_reached", + inputs: { + deps: [], + signals: [ + { + key: "customer.acknowledged", + }, + ], + timers: [ + { + afterSeconds: 900, + anchor: { kind: "wait_started_at" }, + name: "escalation_timeout", + }, + ], + }, + phase: "waiting", + startedAt, + summary: "Waiting for customer acknowledgement or escalation timeout.", + terms: [ + { + kind: "signal", + label: "Customer acknowledged", + name: "customer_acknowledged", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "customer.acknowledged", + }, + { + kind: "timer", + label: "Escalation timeout reached", + name: "escalation_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], + }), + waitReason: "wait", + }); + registerStoryWaitDiagnostics(task, { exprResult: false }); + + return buildWorkflow("wf-story-direct-wait", "Direct Wait Workflow", [task]); +}; + +const meta: Meta = { + component: WorkflowDetail, + parameters: { + layout: "fullscreen", + }, + title: "Components/WorkflowDetail", +}; + +export default meta; + +type Story = StoryObj; + +const StatefulStory = ({ + initialSelectedJobId, + workflow, +}: { + initialSelectedJobId?: bigint; + workflow: undefined | Workflow; +}) => { + const [selectedJobId, setSelectedJobId] = useState( + initialSelectedJobId ?? workflow?.tasks[0]?.id, + ); + + return ( + + ); +}; + +const renderSelectedTask = (workflow: Workflow, taskName: string) => ( + task.name === taskName)?.id + } + workflow={workflow} + /> +); + +export const DependenciesProgressing: Story = { + args: { + workflow: buildDependenciesProgressingWorkflow(), + }, + parameters: { + features: storyFeatures, + }, + render: (args) => renderSelectedTask(args.workflow!, "promote_global"), +}; + +export const Waiting: Story = { + args: { + workflow: buildWaitingWorkflow(), + }, + parameters: { + features: storyFeatures, + }, + render: (args) => renderSelectedTask(args.workflow!, "await_review"), +}; + +export const WaitingWithoutDependencies: Story = { + args: { + workflow: buildDirectWaitWorkflow(), + }, + parameters: { + features: storyFeatures, + }, + render: (args) => renderSelectedTask(args.workflow!, "await_external_signal"), +}; + +export const ResolvedBySignal: Story = { + args: { + workflow: buildResolvedBySignalWorkflow(), + }, + parameters: { + features: storyFeatures, + }, + render: (args) => renderSelectedTask(args.workflow!, "await_review"), +}; + +export const ResolvedByTimeout: Story = { + args: { + workflow: buildResolvedByTimeoutWorkflow(), + }, + parameters: { + features: storyFeatures, + }, + render: (args) => renderSelectedTask(args.workflow!, "promote_global"), +}; + +export const FeatureDisabled: Story = { + args: { + workflow: buildWaitingWorkflow(), + }, + parameters: { + features: createFeatures({ + hasWorkflows: false, + workflowQueries: false, + }), + }, + render: (args) => , +}; diff --git a/src/components/WorkflowDetail.test.tsx b/src/components/WorkflowDetail.test.tsx new file mode 100644 index 00000000..17c93fab --- /dev/null +++ b/src/components/WorkflowDetail.test.tsx @@ -0,0 +1,695 @@ +import type { Features } from "@services/features"; +import type { Workflow } from "@services/workflows"; + +import { FeaturesContext } from "@contexts/Features"; +import { JobState } from "@services/types"; +import { + createMemoryHistory, + createRootRoute, + createRoute, + createRouter, + Outlet, + RouterProvider, +} from "@tanstack/react-router"; +import { workflowJobFactory } from "@test/factories/workflowJob"; +import { act, fireEvent, render, screen } from "@testing-library/react"; +import React from "react"; +import { describe, expect, it, vi } from "vitest"; + +import WorkflowDetail from "./WorkflowDetail"; + +vi.mock("@components/workflow-diagram/WorkflowDiagram", () => ({ + default: ({ + setSelectedJobId, + tasks, + }: { + setSelectedJobId: (jobId: bigint | undefined) => void; + tasks: Array<{ id: bigint; name: string }>; + }) => ( +
+ {tasks.map((task) => ( + + ))} +
+ ), +})); + +const features: Features = { + durablePeriodicJobs: false, + hasClientTable: false, + hasProducerTable: true, + hasSequenceTable: false, + hasWorkflows: true, + jobListHideArgsByDefault: false, + producerQueries: true, + workflowQueries: true, +}; + +const renderWorkflowDetail = async ( + workflow: Workflow, + selectedJobId: bigint | undefined, +) => { + const rootRoute = createRootRoute({ + component: () => , + }); + + const jobsRoute = createRoute({ + component: () =>
Job route
, + getParentRoute: () => rootRoute, + path: "/jobs/$jobId", + }); + + const workflowRoute = createRoute({ + component: () => ( + + ({ + hasMore: false, + scope: "history", + signals: [], + })} + loadTaskWaitDiagnostics={async () => ({ + inputs: { + deps: [], + signals: [], + timers: [], + }, + inspectedAt: new Date("2026-04-21T18:00:00Z"), + phase: "waiting", + signalScanCount: 0, + signalScanLimit: 10000, + terms: [], + truncated: false, + workflowAttempt: 1, + })} + selectedJobId={selectedJobId} + setSelectedJobId={vi.fn()} + workflow={workflow} + /> + + ), + getParentRoute: () => rootRoute, + path: "/", + }); + + const routeTree = rootRoute.addChildren([workflowRoute, jobsRoute]); + const router = createRouter({ + history: createMemoryHistory({ initialEntries: ["/"] }), + routeTree, + }); + + let rendered: ReturnType | undefined; + await act(async () => { + await router.load(); + rendered = render(); + }); + + return rendered!; +}; + +describe("WorkflowDetail wait inspector", () => { + it("shows structured wait details in selected task inspector", async () => { + const dependency = workflowJobFactory.build({ + id: 1, + state: JobState.Completed, + task: "classify_intake", + waitReason: "none", + }); + dependency.finalizedAt = new Date("2026-04-21T17:57:00Z"); + const waitingTask = workflowJobFactory.build({ + deps: ["classify_intake"], + id: 2, + state: JobState.Pending, + task: "compose_draft_response", + wait: { + exprCel: "classify_intake_done && approval_received", + inputs: { + deps: [ + { + taskName: "classify_intake", + }, + ], + signals: [ + { + key: "approval.received", + result: undefined, + }, + ], + timers: [ + { + name: "escalation", + }, + ], + }, + phase: "waiting", + startedAt: new Date("2026-04-21T17:58:00Z"), + summary: "Waiting for approval.received.", + terms: [ + { + exprCel: `deps["classify_intake"].output.category == "launch"`, + kind: "generic", + label: "Classify intake done", + name: "classify_intake_done", + }, + { + kind: "signal", + label: "Approval received", + name: "approval_received", + signalKey: "approval.received", + }, + ], + }, + waitReason: "wait", + }); + + await renderWorkflowDetail( + { + id: "wf-test-wait", + name: "Workflow Test", + tasks: [dependency, waitingTask], + }, + waitingTask.id, + ); + + expect( + screen.getByRole("heading", { name: "Wait condition" }), + ).toBeInTheDocument(); + expect( + screen.getByRole("heading", { name: "Timeline" }), + ).toBeInTheDocument(); + expect(screen.getAllByText("Pending")).not.toHaveLength(0); + expect(screen.getByText("Not yet staged")).toBeInTheDocument(); + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + expect(screen.getByText("Dependency")).toBeInTheDocument(); + expect(screen.getAllByText("classify_intake")).not.toHaveLength(0); + expect(screen.getAllByText("approval.received")).not.toHaveLength(0); + expect(screen.getAllByText("escalation")).not.toHaveLength(0); + expect(screen.getAllByText("Blocked by wait condition")).not.toHaveLength( + 0, + ); + expect(screen.getByText("Wait condition pending")).toBeInTheDocument(); + expect(screen.queryByText("Wait started")).toBeNull(); + expect(screen.queryByText("Task staged")).toBeNull(); + }); + + it("does not render wait section when selected task has no wait", async () => { + const task = workflowJobFactory.build({ + id: 1, + state: JobState.Completed, + task: "send_response", + waitReason: "none", + }); + + await renderWorkflowDetail( + { id: "wf-test-no-wait", name: "Workflow Test", tasks: [task] }, + task.id, + ); + + expect( + screen.queryByRole("heading", { name: "Wait condition" }), + ).toBeNull(); + expect(screen.getByText("Not waiting")).toBeInTheDocument(); + }); + + it("updates the lower inspector when the selected task changes", async () => { + const firstTask = workflowJobFactory.build({ + id: 1, + state: JobState.Completed, + task: "classify_intake", + waitReason: "none", + }); + const secondTask = workflowJobFactory.build({ + deps: ["classify_intake"], + id: 2, + state: JobState.Pending, + task: "send_response", + wait: { + exprCel: "approval_received", + inputs: { + deps: [], + signals: [ + { + key: "approval.received", + }, + ], + timers: [], + }, + phase: "waiting", + terms: [ + { + kind: "signal", + label: "Approval received", + name: "approval_received", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], + }, + waitReason: "wait", + }); + + const rootRoute = createRootRoute({ + component: () => , + }); + + const jobsRoute = createRoute({ + component: () =>
Job route
, + getParentRoute: () => rootRoute, + path: "/jobs/$jobId", + }); + + const SelectionHarness = () => { + const [selectedJobId, setSelectedJobId] = React.useState< + bigint | undefined + >(undefined); + + return ( + + + + ); + }; + + const workflowRoute = createRoute({ + component: SelectionHarness, + getParentRoute: () => rootRoute, + path: "/", + }); + + const routeTree = rootRoute.addChildren([workflowRoute, jobsRoute]); + const router = createRouter({ + history: createMemoryHistory({ initialEntries: ["/"] }), + routeTree, + }); + + await act(async () => { + await router.load(); + render(); + }); + + expect( + screen.queryByRole("heading", { name: "Wait condition" }), + ).toBeNull(); + + await act(async () => { + screen.getByRole("button", { name: "Select send_response" }).click(); + }); + + expect( + screen.getByRole("heading", { name: "Wait condition" }), + ).toBeInTheDocument(); + expect(screen.getAllByText("Pending")).not.toHaveLength(0); + }); + + it("renders grouped timeline milestones instead of a flat event dump", async () => { + const collectInputs = { + ...workflowJobFactory.build({ + id: 11, + state: JobState.Completed, + task: "collect_inputs", + waitReason: "none", + }), + finalizedAt: new Date("2026-04-21T17:58:00Z"), + }; + const safetyReview = { + ...workflowJobFactory.build({ + id: 12, + state: JobState.Completed, + task: "safety_review", + waitReason: "none", + }), + finalizedAt: new Date("2026-04-21T17:59:00Z"), + }; + const releaseTask = { + ...workflowJobFactory.build({ + deps: ["collect_inputs", "safety_review"], + id: 13, + state: JobState.Completed, + task: "launch_release", + wait: { + exprCel: "launch_override_received || release_timeout_reached", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "resolved" as const, + resolvedAt: new Date("2026-04-21T18:01:00Z"), + startedAt: new Date("2026-04-21T18:00:00Z"), + summary: "Launch override received", + terms: [ + { + kind: "signal", + label: "Launch override received", + name: "launch_override_received", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + { + kind: "timer", + label: "Release timeout reached", + name: "release_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + ], + }, + waitReason: "none", + }), + attemptedAt: new Date("2026-04-21T18:02:00Z"), + finalizedAt: new Date("2026-04-21T18:02:12Z"), + stagedAt: new Date("2026-04-21T18:01:30Z"), + }; + + await renderWorkflowDetail( + { + id: "wf-test-timeline", + name: "Workflow Test", + tasks: [collectInputs, safetyReview, releaseTask], + }, + releaseTask.id, + ); + + expect( + screen.getByRole("heading", { name: "Timeline" }), + ).toBeInTheDocument(); + expect(screen.getByText("Dependencies completed")).toBeInTheDocument(); + expect(screen.getAllByText("collect_inputs")).not.toHaveLength(0); + expect(screen.getAllByText("safety_review")).not.toHaveLength(0); + expect(screen.getByText("Wait resolved")).toBeInTheDocument(); + expect(screen.queryByText("Wait started")).toBeNull(); + expect( + screen.getByText( + "2 terms satisfied and the wait expression evaluated true.", + ), + ).toBeInTheDocument(); + expect(screen.getAllByText("Launch override received")).not.toHaveLength(0); + expect(screen.getAllByText("Release timeout reached")).not.toHaveLength(0); + expect(screen.getAllByText("launch_override_received")).not.toHaveLength(0); + expect(screen.getAllByText("release_timeout_reached")).not.toHaveLength(0); + expect(screen.getByRole("button", { name: "Details" })).toHaveAttribute( + "aria-expanded", + "false", + ); + + await act(async () => { + fireEvent.click( + screen.getByRole("button", { + name: "Launch override received (launch_override_received)", + }), + ); + }); + + expect(screen.getByRole("button", { name: "Details" })).toHaveAttribute( + "aria-expanded", + "true", + ); + expect(screen.getByText("2 of 2 conditions satisfied")).toBeInTheDocument(); + expect(screen.getByText("Task staged")).toBeInTheDocument(); + expect(screen.getByText("Task started")).toBeInTheDocument(); + expect(screen.getByText("Task completed")).toBeInTheDocument(); + }); + + it("shows dependency tasks once in the timeline without collapsing them", async () => { + const deps = [ + "offline_eval_billing", + "offline_eval_onboarding", + "offline_eval_support", + "safety_scan_core", + "safety_scan_prompting", + ] as const; + + const dependencyTasks = deps.map((taskName, index) => ({ + ...workflowJobFactory.build({ + id: BigInt(100 + index), + state: JobState.Completed, + task: taskName, + waitReason: "none", + }), + finalizedAt: new Date(`2026-04-21T17:${55 + index}:00Z`), + })); + + const waitingTask = workflowJobFactory.build({ + deps: [...deps], + id: 200n, + state: JobState.Pending, + task: "launch_release", + wait: { + exprCel: "launch_timeout_reached", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "waiting", + startedAt: new Date("2026-04-21T18:02:00Z"), + summary: "Waiting for launch timeout.", + terms: [ + { + kind: "timer", + label: "Launch timeout reached", + name: "launch_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], + }, + waitReason: "wait", + }); + + await renderWorkflowDetail( + { + id: "wf-test-timeline-expand", + name: "Workflow Test", + tasks: [...dependencyTasks, waitingTask], + }, + waitingTask.id, + ); + + expect(screen.getAllByText("safety_scan_prompting")).toHaveLength(1); + expect(screen.queryByText("Show 2 more")).toBeNull(); + expect( + screen.getByRole("link", { name: /offline_eval_billing/i }), + ).toHaveAttribute("href", "/?selected=100"); + }); + + it("keeps dependency timeline tasks alphabetized across mixed states", async () => { + const completedDep = { + ...workflowJobFactory.build({ + id: 501n, + state: JobState.Completed, + task: "alpha_cleanup", + waitReason: "none", + }), + finalizedAt: new Date("2026-04-21T17:58:00Z"), + }; + const pendingDep = workflowJobFactory.build({ + id: 502n, + state: JobState.Pending, + task: "beta_collect", + waitReason: "none", + }); + const runningDep = { + ...workflowJobFactory.build({ + id: 503n, + state: JobState.Running, + task: "gamma_check", + waitReason: "none", + }), + attemptedAt: new Date("2026-04-21T17:59:00Z"), + }; + + const blockedTask = workflowJobFactory.build({ + deps: ["gamma_check", "alpha_cleanup", "beta_collect"], + id: 504n, + state: JobState.Pending, + task: "promote_global", + waitReason: "dependencies", + }); + + await renderWorkflowDetail( + { + id: "wf-test-dependency-order", + name: "Workflow Test", + tasks: [runningDep, completedDep, pendingDep, blockedTask], + }, + blockedTask.id, + ); + + const alphaLink = screen.getByRole("link", { name: "alpha_cleanup" }); + const betaLink = screen.getByRole("link", { name: "beta_collect" }); + const gammaLink = screen.getByRole("link", { name: "gamma_check" }); + + expect( + alphaLink.compareDocumentPosition(betaLink) & + Node.DOCUMENT_POSITION_FOLLOWING, + ).toBeTruthy(); + expect( + betaLink.compareDocumentPosition(gammaLink) & + Node.DOCUMENT_POSITION_FOLLOWING, + ).toBeTruthy(); + }); + + it("does not preview a not-started wait from dependency progress", async () => { + const completedDep = { + ...workflowJobFactory.build({ + id: 401n, + state: JobState.Completed, + task: "fetch_account_context", + waitReason: "none", + }), + finalizedAt: new Date("2026-04-21T17:58:00Z"), + }; + + const runningDep = { + ...workflowJobFactory.build({ + id: 402n, + state: JobState.Running, + task: "fetch_entitlements", + waitReason: "none", + }), + attemptedAt: new Date("2026-04-21T17:59:00Z"), + }; + + const pendingDep = workflowJobFactory.build({ + id: 403n, + state: JobState.Pending, + task: "fetch_recent_charges", + waitReason: "none", + }); + + const blockedTask = workflowJobFactory.build({ + deps: [ + "fetch_account_context", + "fetch_entitlements", + "fetch_recent_charges", + ], + id: 404n, + state: JobState.Pending, + task: "promote_global", + wait: { + exprCel: "approval_received || launch_timeout_reached", + inputs: { deps: [], signals: [], timers: [] }, + phase: "not_started", + summary: + "Waits for approval or timeout after dependency checks finish.", + terms: [ + { + kind: "signal", + label: "Approval received", + name: "approval_received", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + { + kind: "timer", + label: "Launch timeout reached", + name: "launch_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], + }, + waitReason: "dependencies_and_wait", + }); + + await renderWorkflowDetail( + { + id: "wf-test-not-started-wait", + name: "Workflow Test", + tasks: [completedDep, runningDep, pendingDep, blockedTask], + }, + blockedTask.id, + ); + + expect(screen.getByText("Dependencies progressing")).toBeInTheDocument(); + expect( + screen.queryByText( + "Then waits for approval or timeout after dependency checks finish.", + ), + ).toBeNull(); + expect(screen.queryByText("Wait condition pending")).toBeNull(); + expect(screen.queryByText("Task staged")).toBeNull(); + }); + + it("shows long matched-term lists without collapsing", async () => { + const resolvedTask = { + ...workflowJobFactory.build({ + id: 301n, + state: JobState.Completed, + task: "launch_release", + wait: { + exprCel: + "term_one || term_two || term_three || term_four || term_five", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "resolved" as const, + resolvedAt: new Date("2026-04-21T18:05:00Z"), + startedAt: new Date("2026-04-21T18:04:00Z"), + terms: [ + { + kind: "signal", + label: "Term one", + name: "term_one", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + { + kind: "signal", + label: "Term two", + name: "term_two", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + { + kind: "signal", + label: "Term three", + name: "term_three", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + { + kind: "signal", + label: "Term four", + name: "term_four", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + { + kind: "signal", + label: "Term five", + name: "term_five", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + ], + }, + waitReason: "none", + }), + finalizedAt: new Date("2026-04-21T18:05:10Z"), + }; + + await renderWorkflowDetail( + { + id: "wf-test-term-expand", + name: "Workflow Test", + tasks: [resolvedTask], + }, + resolvedTask.id, + ); + + expect(screen.queryByText("Show 2 more")).toBeNull(); + expect(screen.getAllByText("term_four")).toHaveLength(1); + expect(screen.queryByRole("button", { name: "Show fewer" })).toBeNull(); + }); +}); diff --git a/src/components/WorkflowDetail.tsx b/src/components/WorkflowDetail.tsx index 875304d6..412fd440 100644 --- a/src/components/WorkflowDetail.tsx +++ b/src/components/WorkflowDetail.tsx @@ -1,37 +1,74 @@ import ButtonForGroup from "@components/ButtonForGroup"; +import { DurationCompact } from "@components/DurationCompact"; import { Subheading } from "@components/Heading"; +import { RunningSpinnerIcon } from "@components/icons/jobStateIcons"; import JSONView from "@components/JSONView"; import RelativeTimeFormatter from "@components/RelativeTimeFormatter"; import RetryWorkflowDialog from "@components/RetryWorkflowDialog"; import { TaskStateIcon } from "@components/TaskStateIcon"; import TopNavTitleOnly from "@components/TopNavTitleOnly"; import WorkflowDiagram from "@components/workflow-diagram/WorkflowDiagram"; +import WorkflowGateInspector, { + ConditionKindIcon, + type TaskSignalLoader, + type TaskWaitDiagnosticsLoader, + type WaitFocusRequest, + WaitStatusPill, +} from "@components/WorkflowGateInspector"; import { useFeatures } from "@contexts/Features.hook"; // (Dialog is now encapsulated in RetryWorkflowDialog) import { CheckIcon } from "@heroicons/react/16/solid"; import { ArrowPathIcon, ClipboardIcon, - XCircleIcon, + LinkIcon, } from "@heroicons/react/24/outline"; -import { JobWithKnownMetadata } from "@services/jobs"; +import { + CheckCircleIcon, + PlayCircleIcon, + QueueListIcon, + TrashIcon, + XCircleIcon, +} from "@heroicons/react/24/solid"; import { toastSuccess } from "@services/toast"; -import { JobState } from "@services/types"; -import { Workflow, type WorkflowRetryMode } from "@services/workflows"; +import { type Heroicon, JobState } from "@services/types"; +import { + Workflow, + type WorkflowRetryMode, + type WorkflowTask, + type WorkflowTaskWaitReason, +} from "@services/workflows"; import { Link } from "@tanstack/react-router"; import { capitalize } from "@utils/string"; import clsx from "clsx"; -import { useMemo, useState } from "react"; +import { compareAsc } from "date-fns"; +import { type ReactNode, useMemo, useState } from "react"; import WorkflowListEmptyState from "./WorkflowListEmptyState"; type JobsByTask = { - [key: string]: JobWithKnownMetadata; + [key: string]: WorkflowTask; +}; + +const dependencyNameCollator = new Intl.Collator("en", { + numeric: true, + sensitivity: "base", +}); + +const compareDependencyNames = (left: string, right: string): number => { + const collatorResult = dependencyNameCollator.compare(left, right); + if (collatorResult !== 0) return collatorResult; + + if (left < right) return -1; + if (left > right) return 1; + return 0; }; type WorkflowDetailProps = { cancelPending?: boolean; loading: boolean; + loadTaskSignals?: TaskSignalLoader; + loadTaskWaitDiagnostics?: TaskWaitDiagnosticsLoader; onCancel?: () => void; onRetry?: (mode: WorkflowRetryMode, resetHistory: boolean) => void; retryPending?: boolean; @@ -43,6 +80,8 @@ type WorkflowDetailProps = { export default function WorkflowDetail({ cancelPending, loading, + loadTaskSignals, + loadTaskWaitDiagnostics, onCancel, onRetry, retryPending, @@ -59,11 +98,11 @@ export default function WorkflowDetail({ ); const firstTask = workflow?.tasks?.[0]; - const workflowID = firstTask?.metadata.workflow_id; + const workflowID = workflow?.id; // TODO: this is being repeated in WorkflowDiagram, dedupe const jobsByTask: JobsByTask = workflow?.tasks ? workflow.tasks.reduce((acc: JobsByTask, job) => { - acc[job.metadata.task] = job; + acc[job.name] = job; return acc; }, {}) : {}; @@ -105,7 +144,8 @@ export default function WorkflowDetail({ return

No tasks available

; } const { tasks } = workflow; - const workflowName = firstTask.metadata.workflow_name || "Unnamed Workflow"; + const workflowName = + workflow.name === "" ? "Unnamed Workflow" : workflow.name; return ( <> @@ -163,13 +203,10 @@ export default function WorkflowDetail({

- ID:{" "} - - {firstTask.metadata.workflow_id} - + ID: {workflow.id}

-
+
{selectedJob && ( - + )}
@@ -219,35 +261,41 @@ export default function WorkflowDetail({ ); } -const dlClasses = "grid grid-cols-[130px_auto] text-base/6 sm:text-sm/6"; -const dtClasses = - "col-start-1 border-zinc-950/5 pt-2 text-zinc-500 first:border-none sm:border-zinc-950/5 sm:py-2 dark:border-white/5 dark:text-zinc-400 sm:dark:border-white/5"; -const ddClasses = - "pb-2 pt-1 text-zinc-950 sm:border-zinc-950/5 sm:py-2 dark:text-white dark:sm:border-white/5 sm:nth-2:border-none"; +const inspectorCardClasses = + "rounded-2xl border border-slate-200 bg-white p-5 shadow-xs dark:border-slate-800 dark:bg-slate-900"; +const inspectorListClasses = "space-y-3"; +const inspectorLabelClasses = "text-sm text-slate-500 dark:text-slate-400"; +const inspectorValueClasses = + "min-w-0 text-sm text-slate-900 dark:text-slate-100"; const SelectedJobDetails = ({ job, jobsByTask, + loadTaskSignals, + loadTaskWaitDiagnostics, }: { - job: JobWithKnownMetadata; + job: WorkflowTask; jobsByTask: JobsByTask; + loadTaskSignals?: TaskSignalLoader; + loadTaskWaitDiagnostics?: TaskWaitDiagnosticsLoader; }) => { - const stagedAt = useMemo( - () => - job.metadata.workflow_staged_at - ? new Date(job.metadata.workflow_staged_at) - : undefined, - [job.metadata.workflow_staged_at], - ); + const stagedAt = useMemo(() => job.stagedAt, [job.stagedAt]); + const [waitFocusRequest, setWaitFocusRequest] = useState(); + const handleSelectWait = (conditionName: string) => { + setWaitFocusRequest((current) => ({ + conditionName, + requestID: (current?.requestID ?? 0) + 1, + })); + }; return ( - <> -
-
- Job Details -
-
ID
-
+
+
+ Job Details +
+ {job.id.toString()} -
- -
State
-
- {capitalize(job.state)} - -
- -
Kind
-
{job.kind}
- -
Attempt
-
- {job.attempt.toString()} / {job.maxAttempts.toString()} -
- -
Queue
-
{job.queue}
+ } + /> + + {capitalize(job.state)} + + + } + /> + {job.kind}} + /> + + {job.queue}} + /> + + } + /> +
-
Priority
-
{job.priority}
+
+ Args + +
-
Created
-
- -
- +
+ Metadata +
+
-
- Workflow Task -
-
Task
-
{job.metadata.task}
-
Dependencies
-
- {job.metadata.deps && - job.metadata.deps.map((dep: string) => ( -
- -
- ))} -
-
Staged
-
- {job.state === JobState.Pending ? ( - Not yet staged, pending dependencies +
+ Workflow Task +
+ + + {job.wait ? ( + } + /> + ) : null} + {getPendingStageLabel(job.waitReason)} ) : ( - )} -
-
-
+ ) + } + /> + -
-
Args
-
- -
-
-
-
Metadata
-
- -
-
+ + + {job.wait ? ( + + ) : null}
- +
+ ); +}; + +const InspectorRow = ({ + label, + value, +}: { + label: string; + value: ReactNode; +}) => { + return ( +
+
{label}
+
{value}
+
); }; @@ -337,12 +419,12 @@ const DependencyItem = ({ depJob, depName, }: { - depJob?: JobWithKnownMetadata; + depJob?: WorkflowTask; depName: string; }) => { if (!depJob) { return ( -
+
{depName}
@@ -350,13 +432,606 @@ const DependencyItem = ({ } return ( - +
- {depName} - + + {depName} + +
); }; + +const formatWaitReason = (waitReason: WorkflowTaskWaitReason): string => { + switch (waitReason) { + case "dependencies": + return "Blocked by dependencies"; + case "dependencies_and_wait": + return "Blocked by dependencies and wait condition"; + case "wait": + return "Blocked by wait condition"; + case "none": + default: + return "Not waiting"; + } +}; + +const getPendingStageLabel = (waitReason: WorkflowTaskWaitReason): string => { + switch (waitReason) { + case "none": + return "Not yet staged"; + default: + return "Not yet staged"; + } +}; + +type TaskTimelineEvent = { + description?: ReactNode; + icon: Heroicon; + items?: TaskTimelineListItem[]; + key: string; + metric?: ReactNode; + status: "active" | "complete" | "failed" | "waiting"; + time: Date; + title: string; +}; + +type TaskTimelineListItem = { + label: string; + mono?: boolean; + rawLabel?: string; + selectedJobId?: bigint; + state?: JobState; + waitTermKind?: string; + waitTermName?: string; +}; + +const TaskTimeline = ({ + job, + jobsByTask, + onSelectWait, +}: { + job: WorkflowTask; + jobsByTask: JobsByTask; + onSelectWait: (conditionName: string) => void; +}) => { + const events = useMemo( + () => getTaskTimelineEvents(job, jobsByTask), + [job, jobsByTask], + ); + const [expandedEventKeys, setExpandedEventKeys] = useState< + Record + >({}); + + if (events.length === 0) return null; + + return ( +
+ Timeline + +
+
    + {events.map((event, eventIdx) => { + const toneClasses = getTaskTimelineToneClasses(event.status); + const collapsibleItems = Boolean( + event.items && + event.key !== "dependencies" && + event.key !== "wait-resolved" && + event.items.length > 3, + ); + const expanded = expandedEventKeys[event.key] ?? false; + const visibleItems = event.items + ? collapsibleItems + ? expanded + ? event.items + : event.items.slice(0, 3) + : event.items + : undefined; + const hiddenItemCount = + collapsibleItems && event.items + ? Math.max(event.items.length - visibleItems!.length, 0) + : 0; + + return ( +
  1. +
    + {eventIdx !== events.length - 1 ? ( +
    +
  2. + ); + })} +
+
+
+ ); +}; + +const getTaskTimelineToneClasses = ( + status: TaskTimelineEvent["status"], +): { + icon: string; + iconBackground: string; + line: string; + row: string; +} => { + switch (status) { + case "active": + return { + icon: "text-blue-700 dark:text-blue-200", + iconBackground: "bg-blue-200 dark:bg-blue-700", + line: "bg-slate-200 dark:bg-slate-700", + row: "", + }; + case "failed": + return { + icon: "text-red-700 dark:text-red-200", + iconBackground: "bg-red-200 dark:bg-red-700", + line: "bg-red-200 dark:bg-red-800", + row: "", + }; + case "waiting": + return { + icon: "text-amber-700 dark:text-amber-200", + iconBackground: "bg-amber-200 dark:bg-amber-700", + line: "bg-slate-200 dark:bg-slate-700", + row: "", + }; + case "complete": + default: + return { + icon: "text-green-800 dark:text-green-200", + iconBackground: "bg-green-300 dark:bg-green-700", + line: "bg-green-300 dark:bg-green-800/80", + row: "", + }; + } +}; + +const getDependencyTimelineTime = ( + job: undefined | WorkflowTask, +): Date | undefined => { + return job?.finalizedAt ?? job?.attemptedAt ?? job?.createdAt; +}; + +const getTimelineDurationMetric = ({ + endTime, + startTime, +}: { + endTime?: Date; + startTime: Date; +}): ReactNode => ( + <> + () + +); + +const getTaskTimelineEvents = ( + job: WorkflowTask, + jobsByTask: JobsByTask, +): TaskTimelineEvent[] => { + const events: TaskTimelineEvent[] = []; + const dependencyItems = [...job.deps] + .sort(compareDependencyNames) + .map((depName) => ({ + depJob: jobsByTask[depName], + label: depName, + })); + + if (dependencyItems.length > 0) { + const finalizedDeps = dependencyItems.filter( + (dep) => dep.depJob?.finalizedAt, + ); + const latestDependencyTime = + dependencyItems + .map((dep) => getDependencyTimelineTime(dep.depJob)) + .filter((time): time is Date => Boolean(time)) + .sort(compareAsc) + .at(-1) ?? job.createdAt; + const dependenciesCleared = finalizedDeps.length === job.deps.length; + + events.push({ + description: getDependencyTimelineDescription({ + dependenciesCleared, + finalizedDependencyCount: finalizedDeps.length, + job, + }), + icon: LinkIcon, + items: dependencyItems.map((dep) => ({ + label: dep.label, + mono: true, + selectedJobId: dep.depJob?.id, + state: dep.depJob?.state, + })), + key: "dependencies", + metric: + compareAsc(latestDependencyTime, job.createdAt) > 0 + ? getTimelineDurationMetric({ + endTime: dependenciesCleared ? latestDependencyTime : undefined, + startTime: job.createdAt, + }) + : undefined, + status: dependenciesCleared ? "complete" : "waiting", + time: latestDependencyTime, + title: + finalizedDeps.length === 0 + ? job.deps.length === 1 + ? "Dependency pending" + : "Dependencies pending" + : finalizedDeps.length === job.deps.length + ? "Dependencies completed" + : "Dependencies progressing", + }); + } + + if (job.wait?.phase === "waiting" && job.wait.startedAt) { + events.push({ + description: getWaitPendingTimelineDescription(job.wait), + icon: QueueListIcon, + key: "wait-pending", + metric: getTimelineDurationMetric({ + startTime: job.wait.startedAt, + }), + status: "waiting", + time: job.wait.startedAt, + title: "Wait condition pending", + }); + } + + if (job.wait?.resolvedAt) { + const matchedTerms = job.wait.terms.filter( + (term) => term.result?.satisfied, + ); + + events.push({ + description: getWaitResolvedTimelineDescription(job.wait), + icon: CheckCircleIcon, + items: matchedTerms.map((term) => ({ + label: term.label, + rawLabel: term.label === term.name ? undefined : term.name, + waitTermKind: term.kind, + waitTermName: term.name, + })), + key: "wait-resolved", + metric: job.wait.startedAt + ? getTimelineDurationMetric({ + endTime: job.wait.resolvedAt, + startTime: job.wait.startedAt, + }) + : undefined, + status: "complete", + time: job.wait.resolvedAt, + title: "Wait resolved", + }); + } + + const stagedTime = getTaskTimelineStagedTime(job); + if (stagedTime) { + events.push({ + icon: QueueListIcon, + key: "task-staged", + metric: + job.attemptedAt && compareAsc(job.attemptedAt, stagedTime) >= 0 + ? getTimelineDurationMetric({ + endTime: job.attemptedAt, + startTime: stagedTime, + }) + : undefined, + status: job.attemptedAt ? "complete" : "waiting", + time: stagedTime, + title: "Task staged", + }); + } + + if (job.attemptedAt) { + events.push({ + icon: + job.state === JobState.Running ? RunningSpinnerIcon : PlayCircleIcon, + key: "task-started", + metric: + job.state === JobState.Running + ? getTimelineDurationMetric({ + startTime: job.attemptedAt, + }) + : undefined, + status: job.state === JobState.Running ? "active" : "complete", + time: job.attemptedAt, + title: "Task started", + }); + } + + if (job.finalizedAt) { + events.push({ + icon: getFinalizedTimelineIcon(job.state), + key: "task-finalized", + metric: job.attemptedAt + ? getTimelineDurationMetric({ + endTime: job.finalizedAt, + startTime: job.attemptedAt, + }) + : undefined, + status: getFinalizedTimelineStatus(job.state), + time: job.finalizedAt, + title: getFinalizedNarrativeLabel(job.state), + }); + } + + return events.sort((leftEvent, rightEvent) => + compareAsc(leftEvent.time, rightEvent.time), + ); +}; + +const getTaskTimelineStagedTime = (job: WorkflowTask): Date | undefined => { + if (job.state === JobState.Pending) return undefined; + + return job.stagedAt ?? job.createdAt; +}; + +const getDependencyTimelineDescription = ({ + dependenciesCleared, + finalizedDependencyCount, + job, +}: { + dependenciesCleared: boolean; + finalizedDependencyCount: number; + job: WorkflowTask; +}): ReactNode => { + const statusDescription = + finalizedDependencyCount === 0 + ? `${job.deps.length} required dependency task${job.deps.length === 1 ? "" : "s"} pending.` + : dependenciesCleared + ? undefined + : `${finalizedDependencyCount} of ${job.deps.length} required dependency tasks finished.`; + + if (!statusDescription) { + return undefined; + } + + return

{statusDescription}

; +}; + +const getWaitResolvedTimelineDescription = ( + wait: NonNullable, +): ReactNode => { + const matchedTerms = wait.terms.filter((term) => term.result?.satisfied); + + if (matchedTerms.length > 1) { + return `${matchedTerms.length} terms satisfied and the wait expression evaluated true.`; + } + + if (wait.summary) { + return `Resolved by ${trimTrailingPeriod(wait.summary)}.`; + } + + if (matchedTerms.length === 1) { + return `Resolved by ${trimTrailingPeriod(matchedTerms[0].label)}.`; + } + + return "The wait no longer blocks this task."; +}; + +const getWaitPendingTimelineDescription = ( + wait: NonNullable, +): ReactNode => { + if (wait.summary) { + return `${trimTrailingPeriod(wait.summary)}.`; + } + + return "This task is still blocked by its wait."; +}; + +const trimTrailingPeriod = (value: string): string => value.replace(/\.+$/, ""); + +const getFinalizedTimelineIcon = (state: JobState): Heroicon => { + switch (state) { + case JobState.Cancelled: + return XCircleIcon; + case JobState.Discarded: + return TrashIcon; + case JobState.Completed: + default: + return CheckCircleIcon; + } +}; + +const getFinalizedTimelineStatus = ( + state: JobState, +): TaskTimelineEvent["status"] => { + switch (state) { + case JobState.Cancelled: + case JobState.Discarded: + return "failed"; + case JobState.Completed: + default: + return "complete"; + } +}; + +const getFinalizedNarrativeLabel = (state: JobState): string => { + switch (state) { + case JobState.Cancelled: + return "Task cancelled"; + case JobState.Completed: + return "Task completed"; + case JobState.Discarded: + return "Task discarded"; + default: + return "Task finalized"; + } +}; diff --git a/src/components/WorkflowGateInspector.model.ts b/src/components/WorkflowGateInspector.model.ts new file mode 100644 index 00000000..6cafd3a2 --- /dev/null +++ b/src/components/WorkflowGateInspector.model.ts @@ -0,0 +1,542 @@ +import { + type WorkflowTask, + type WorkflowTaskSignalList, + type WorkflowTaskWait, + type WorkflowTaskWaitDiagnostics, + type WorkflowTaskWaitTimer, +} from "@services/workflows"; +import { formatDurationShort } from "@utils/time"; + +import { + type SignalHistorySurface, + type SignalInspectorState, + type WaitSignalInput, + type WaitTermResult, + type WaitTermView, + type WaitTimerInput, +} from "./WorkflowGateInspector.types"; + +export const getSignalSurfaceKey = ( + surface: SignalHistorySurface, +): string | undefined => + surface.kind === "condition" && !surface.termName + ? surface.signalKey + : undefined; + +export const getSignalSurfaceTermName = ( + surface: SignalHistorySurface, +): string | undefined => + surface.kind === "condition" ? surface.termName : undefined; + +export const getSignalSurfaceStateKey = ( + surface: SignalHistorySurface, +): string => + surface.kind === "condition" + ? (surface.termName ?? surface.signalKey) + : surface.kind; + +export const waitDiagnosticsErrorMessage = (error: unknown): string => { + if (error instanceof Error && error.message) { + return `Unable to load waiting diagnostics: ${error.message}`; + } + + return "Unable to load waiting diagnostics."; +}; + +export const getConditionSignalStateKey = (condition: WaitTermView): string => + condition.signalTermName ?? condition.signal?.key ?? condition.technicalName; + +export const signalSurfaceForCondition = ( + condition: WaitTermView, +): SignalHistorySurface => ({ + kind: "condition", + signalKey: condition.signal?.key ?? condition.technicalName, + termName: condition.signalTermName, +}); + +export const hasWaitDetails = (wait: WorkflowTaskWait): boolean => { + return ( + wait.terms.length > 0 || + wait.inputs.signals.length > 0 || + wait.inputs.timers.length > 0 || + wait.inputs.deps.length > 0 + ); +}; + +export const buildWaitTermViews = ( + wait: WorkflowTaskWait, + dependencyTasks?: Record, + diagnostics?: WorkflowTaskWaitDiagnostics, +): WaitTermView[] => { + const inputs = wait.inputs; + const usedSignalKeys = new Set(); + const usedTimerNames = new Set(); + const usedDepTasks = new Set(); + const diagnosticsByTerm = new Map( + diagnostics?.terms.map((term) => [term.name, term]) ?? [], + ); + const signalDiagnosticsByKey = new Map( + diagnostics?.inputs.signals.map((signal) => [signal.key, signal]) ?? [], + ); + const timerDiagnosticsByName = new Map( + diagnostics?.inputs.timers.map((timer) => [timer.name, timer]) ?? [], + ); + const conditions: WaitTermView[] = wait.terms.map((term, index) => { + const diagnostic = diagnosticsByTerm.get(term.name); + const signal = term.signalKey + ? inputs.signals.find((input) => input.key === term.signalKey) + : undefined; + const timer = term.timerName + ? inputs.timers.find((input) => input.name === term.timerName) + : undefined; + const result = mergeWaitTermResult(term.result, diagnostic); + const mergedSignal = signal + ? mergeSignalInputDiagnostics( + signal, + signalDiagnosticsByKey.get(signal.key), + ) + : undefined; + const mergedTimer = timer + ? mergeTimerInputDiagnostics( + timer, + timerDiagnosticsByName.get(timer.name), + ) + : undefined; + + if (signal) { + usedSignalKeys.add(signal.key); + } + if (timer) { + usedTimerNames.add(timer.name); + } + + return { + exprCel: term.exprCel, + kind: term.kind, + label: getWaitTermDisplayLabel(term), + matched: result?.satisfied ?? false, + result, + signal: mergedSignal, + signalTermName: signal ? term.name : undefined, + sortIndex: index, + technicalName: term.name, + timer: mergedTimer, + }; + }); + + inputs.signals.forEach((signal, index) => { + if (usedSignalKeys.has(signal.key)) return; + + const diagnostic = signalDiagnosticsByKey.get(signal.key); + const mergedSignal = mergeSignalInputDiagnostics(signal, diagnostic); + conditions.push({ + kind: "signal_input", + label: signal.key, + matched: (mergedSignal.result?.includedCount ?? 0) > 0, + signal: mergedSignal, + sortIndex: wait.terms.length + index, + technicalName: signal.key, + }); + }); + + inputs.timers.forEach((timer, index) => { + if (usedTimerNames.has(timer.name)) return; + + const diagnostic = timerDiagnosticsByName.get(timer.name); + const mergedTimer = mergeTimerInputDiagnostics(timer, diagnostic); + conditions.push({ + kind: "timer_input", + label: humanizeIdentifier(timer.name), + matched: mergedTimer.result?.fired ?? false, + sortIndex: wait.terms.length + inputs.signals.length + index, + technicalName: timer.name, + timer: mergedTimer, + }); + }); + + inputs.deps.forEach((dep, index) => { + if (usedDepTasks.has(dep.taskName)) return; + usedDepTasks.add(dep.taskName); + const diagnostic = diagnostics?.inputs.deps.find( + (input) => input.taskName === dep.taskName, + ); + conditions.push({ + dependencyTask: dependencyTasks?.[dep.taskName], + kind: "dep_input", + label: dep.taskName, + matched: dep.result?.available ?? diagnostic?.available ?? false, + sortIndex: + wait.terms.length + + inputs.signals.length + + inputs.timers.length + + index, + technicalName: dep.taskName, + }); + }); + + return conditions.sort((leftCondition, rightCondition) => + compareConditions(leftCondition, rightCondition, wait.phase), + ); +}; + +const mergeWaitTermResult = ( + result: WorkflowTaskWait["terms"][number]["result"], + diagnostic?: WorkflowTaskWaitDiagnostics["terms"][number], +): undefined | WaitTermResult => { + if (!diagnostic) return result; + + return { + lastMatchedID: diagnostic.lastMatchedID ?? result?.lastMatchedID, + matchedCount: diagnostic.matchedCount, + requiredCount: diagnostic.requiredCount, + satisfied: diagnostic.satisfied, + }; +}; + +const mergeSignalInputDiagnostics = ( + signal: WaitSignalInput, + diagnostic?: WorkflowTaskWaitDiagnostics["inputs"]["signals"][number], +): WaitSignalInput => { + if (!diagnostic) return signal; + + return { + ...signal, + result: { + includedCount: diagnostic.includedCount, + lastIncludedID: diagnostic.lastID ?? signal.result?.lastIncludedID, + }, + }; +}; + +const mergeTimerInputDiagnostics = ( + timer: WaitTimerInput, + diagnostic?: WorkflowTaskWaitDiagnostics["inputs"]["timers"][number], +): WaitTimerInput => { + if (!diagnostic) return timer; + + return { + ...timer, + fireAt: diagnostic.fireAt ?? timer.fireAt, + result: { + fireAt: diagnostic.fireAt ?? timer.result?.fireAt, + fired: diagnostic.fired, + }, + }; +}; + +const compareConditions = ( + leftCondition: WaitTermView, + rightCondition: WaitTermView, + phase: WorkflowTaskWait["phase"], +): number => { + const leftRank = getConditionSortRank(leftCondition, phase); + const rightRank = getConditionSortRank(rightCondition, phase); + + if (leftRank !== rightRank) return leftRank - rightRank; + + const leftFireAt = leftCondition.timer?.fireAt?.getTime(); + const rightFireAt = rightCondition.timer?.fireAt?.getTime(); + if (leftFireAt !== undefined && rightFireAt !== undefined) { + return leftFireAt - rightFireAt; + } + if (leftFireAt !== undefined) return -1; + if (rightFireAt !== undefined) return 1; + + return leftCondition.sortIndex - rightCondition.sortIndex; +}; + +const getConditionSortRank = ( + condition: WaitTermView, + phase: WorkflowTaskWait["phase"], +): number => { + if (condition.matched) return 0; + + if (phase === "resolved") return 1; + if (condition.timer?.fireAt) return 1; + if (condition.timer) return 2; + if (condition.signal) return 3; + + return 4; +}; + +export const getConditionStateLabel = ( + condition: WaitTermView, + phase: WorkflowTaskWait["phase"], +): string => { + if (condition.matched) return "Satisfied"; + if (condition.timer?.result?.fired) return "Fired"; + if (phase === "not_started") { + return condition.timer?.fireAt ? "Scheduled" : "Pending"; + } + if (phase !== "resolved") { + return condition.timer?.fireAt ? "Scheduled" : "Waiting"; + } + + return "Not satisfied"; +}; + +export const getConditionStateTone = ( + condition: WaitTermView, + phase: WorkflowTaskWait["phase"], +): { + borderClassName: string; + dotClassName: string; + labelClassName: string; + rowClassName: string; +} => { + if (condition.matched) { + return { + borderClassName: "border-l-green-400", + dotClassName: "bg-green-500", + labelClassName: "text-green-700 dark:text-green-400", + rowClassName: "bg-green-50/30 dark:bg-green-950/10", + }; + } + + if (condition.timer?.result?.fired) { + return { + borderClassName: "border-l-amber-400", + dotClassName: "bg-amber-500", + labelClassName: "text-amber-700 dark:text-amber-400", + rowClassName: "bg-amber-50/30 dark:bg-amber-950/10", + }; + } + + if (phase !== "resolved" && condition.timer?.fireAt) { + return { + borderClassName: "border-l-blue-400", + dotClassName: "bg-blue-500", + labelClassName: "text-blue-700 dark:text-blue-400", + rowClassName: "bg-blue-50/30 dark:bg-blue-950/10", + }; + } + + if (phase === "waiting") { + return { + borderClassName: "border-l-amber-300", + dotClassName: "bg-amber-400", + labelClassName: "text-amber-700 dark:text-amber-400", + rowClassName: "bg-amber-50/20 dark:bg-amber-950/10", + }; + } + + return { + borderClassName: "border-l-slate-200 dark:border-l-slate-700", + dotClassName: "bg-slate-300 dark:bg-slate-600", + labelClassName: "text-slate-500 dark:text-slate-400", + rowClassName: "bg-white dark:bg-slate-950/20", + }; +}; + +export const getConditionFocusKey = (condition: WaitTermView): string => { + return `${condition.kind}:${condition.technicalName}`; +}; + +export const conditionMatchesName = ( + condition: WaitTermView, + conditionName: string, +): boolean => { + const normalizedConditionName = normalizeConditionName(conditionName); + const possibleNames = [ + condition.technicalName, + condition.label, + condition.signal?.key, + condition.timer?.name, + ].filter((name): name is string => Boolean(name)); + + return possibleNames.some( + (possibleName) => + normalizeConditionName(possibleName) === normalizedConditionName, + ); +}; + +export const orderConditionsForSummary = ( + summary: string | undefined, + conditions: WaitTermView[], +): WaitTermView[] => { + if (!summary) return conditions; + + const normalizedSummary = summary.toLowerCase(); + + return [...conditions].sort((leftCondition, rightCondition) => { + const leftIndex = normalizedSummary.indexOf( + leftCondition.label.toLowerCase(), + ); + const rightIndex = normalizedSummary.indexOf( + rightCondition.label.toLowerCase(), + ); + + if (leftIndex >= 0 && rightIndex >= 0) return leftIndex - rightIndex; + if (leftIndex >= 0) return -1; + if (rightIndex >= 0) return 1; + + return 0; + }); +}; + +const autoOpenSignalEvidenceLimit = 3; + +export const getAutoOpenSignalEvidenceSurface = ( + conditions: WaitTermView[], +): SignalHistorySurface | undefined => { + const condition = conditions.find( + (condition) => + condition.signal && + (condition.signal.result?.includedCount ?? 0) > 0 && + (condition.signal.result?.includedCount ?? 0) <= + autoOpenSignalEvidenceLimit && + condition.matched, + ); + return condition ? signalSurfaceForCondition(condition) : undefined; +}; + +export const getSignalEvidenceSummary = ( + signal: WorkflowTaskWait["inputs"]["signals"][number], +): string => { + const includedCount = signal.result?.includedCount ?? 0; + return includedCount === 1 + ? "1 signal included" + : `${includedCount.toString()} signals included`; +}; + +export const getLoadedSignalHistorySummary = ( + signalListState: SignalInspectorState, +): string | undefined => { + const signalCount = signalListState.signals.length; + if (signalCount === 0) return undefined; + + return `${signalCount.toString()} shown${ + signalListState.hasMore ? " · older signals available" : "" + }`; +}; + +export const signalInspectorStateFromSignalList = ( + signalList: WorkflowTaskSignalList, +): SignalInspectorState => ({ + error: undefined, + hasMore: signalList.hasMore, + isLoading: false, + isLoadingMore: false, + nextCursorID: signalList.nextCursorID, + signals: signalList.signals, +}); + +export const getConditionSignalScope = ( + wait: WorkflowTaskWait, +): WorkflowTaskSignalList["scope"] => + wait.phase === "resolved" ? "evidence" : "history"; + +export const getWaitStatusLabel = ( + phase: WorkflowTaskWait["phase"], +): string => { + switch (phase) { + case "not_started": + return "Not started"; + case "resolved": + return "Resolved"; + case "waiting": + return "Pending"; + default: + return "Unknown"; + } +}; + +export const getWaitSummary = (wait: WorkflowTaskWait): string => { + if (wait.summary) { + return wait.phase === "resolved" + ? `Resolved by: ${wait.summary}.` + : wait.summary; + } + + switch (wait.phase) { + case "not_started": + return "Wait has not started because dependencies are still incomplete."; + case "resolved": + return "Wait resolved."; + case "waiting": + return "Waiting diagnostics are available for this task."; + default: + return "Wait state is unavailable."; + } +}; + +export const getWaitTermKindLabel = (kind: string): string => { + switch (kind) { + case "dep_input": + return "Dependency"; + case "generic": + return "Generic CEL"; + case "signal": + case "signal_input": + return "Signal"; + case "timer": + case "timer_input": + return "Timer"; + default: + return kind.replaceAll("_", " "); + } +}; + +const getWaitTermLabel = (term: WorkflowTaskWait["terms"][number]): string => { + if (!term.label || term.label === term.name) { + return "—"; + } + + return term.label; +}; + +const getWaitTermDisplayLabel = ( + term: WorkflowTaskWait["terms"][number], +): string => { + const label = getWaitTermLabel(term); + return label === "—" ? humanizeIdentifier(term.name) : label; +}; + +const normalizeConditionName = (value: string): string => { + return value + .toLowerCase() + .replaceAll(/[^a-z0-9]+/g, "_") + .replaceAll(/^_+|_+$/g, ""); +}; + +const humanizeIdentifier = (value: string): string => { + return normalizeConditionName(value).replaceAll("_", " "); +}; + +export const formatTimerAnchorWait = ( + anchor: WorkflowTaskWaitTimer["anchor"], +): string => { + if (!anchor) return "Waiting to schedule"; + + switch (anchor.kind) { + case "task_finalized_at": + return anchor.task ? `Waiting for ${anchor.task}` : "Waiting for task"; + case "wait_started_at": + return "Waiting for wait to start"; + case "workflow_created_at": + return "Waiting for workflow start"; + default: + return "Waiting to schedule"; + } +}; + +export const getTimerDelayLabel = ( + timer: WorkflowTaskWait["inputs"]["timers"][number], +): string | undefined => { + if (typeof timer.afterSeconds !== "number") return undefined; + + if (Number.isInteger(timer.afterSeconds)) { + if (timer.afterSeconds % 3600 === 0) { + return `${(timer.afterSeconds / 3600).toString()}h`; + } + if (timer.afterSeconds % 60 === 0) { + return `${(timer.afterSeconds / 60).toString()}m`; + } + } + + return formatDurationShort( + new Date(timer.afterSeconds * 1000), + new Date(0), + false, + ); +}; diff --git a/src/components/WorkflowGateInspector.stories.tsx b/src/components/WorkflowGateInspector.stories.tsx new file mode 100644 index 00000000..d56905e1 --- /dev/null +++ b/src/components/WorkflowGateInspector.stories.tsx @@ -0,0 +1,563 @@ +import type { WorkflowTaskSignal, WorkflowTaskWait } from "@services/workflows"; +import type { Meta, StoryObj } from "@storybook/react-vite"; + +import { add, sub } from "date-fns"; +import { userEvent, within } from "storybook/test"; + +import WorkflowGateInspector, { + type TaskSignalLoader, +} from "./WorkflowGateInspector"; + +const now = new Date(); + +type StorySignalFixtures = Record< + string, + Partial> +>; + +const buildSignal = ({ + attempt = 1, + createdAt, + id, + key, + payload, + source, +}: { + attempt?: number; + createdAt: Date; + id: bigint; + key: string; + payload: unknown; + source: unknown; +}): WorkflowTaskSignal => ({ + attempt, + createdAt, + id, + key, + payload, + source, +}); + +const compareSignalsDesc = ( + leftSignal: WorkflowTaskSignal, + rightSignal: WorkflowTaskSignal, +): number => { + if (leftSignal.id > rightSignal.id) return -1; + if (leftSignal.id < rightSignal.id) return 1; + return 0; +}; + +const createStorySignalLoader = ( + fixtures: StorySignalFixtures, +): TaskSignalLoader => { + return async ({ cursorID, key, limit = 20, scope }) => { + const scopeKey = scope ?? "history"; + const signals = key + ? (fixtures[key]?.[scopeKey] ?? []) + : Object.values(fixtures).flatMap((fixture) => fixture[scopeKey] ?? []); + const sortedSignals = [...signals].sort(compareSignalsDesc); + const cursorBigInt = cursorID === undefined ? undefined : BigInt(cursorID); + const cursorIndex = + cursorBigInt === undefined + ? -1 + : sortedSignals.findIndex((signal) => signal.id === cursorBigInt); + const startIndex = cursorIndex >= 0 ? cursorIndex + 1 : 0; + const page = sortedSignals.slice(startIndex, startIndex + limit); + const hasMore = startIndex + page.length < sortedSignals.length; + const lastSignal = page[page.length - 1]; + + return { + hasMore, + nextCursorID: hasMore ? lastSignal?.id : undefined, + scope: scopeKey, + signals: page, + }; + }; +}; + +const resolvedSignalLoader = createStorySignalLoader({ + "approval.received": { + evidence: [ + buildSignal({ + createdAt: sub(now, { minutes: 2 }), + id: 9123n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + }, + }), + ], + history: [ + buildSignal({ + createdAt: sub(now, { minutes: 1 }), + id: 9124n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + timing: "after-resolution-duplicate", + }, + }), + buildSignal({ + createdAt: sub(now, { minutes: 2 }), + id: 9123n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + }, + }), + ], + }, +}); + +const allTaskSignalsLoader = createStorySignalLoader({ + "approval.override": { + history: [ + buildSignal({ + createdAt: sub(now, { seconds: 30 }), + id: 9125n, + key: "approval.override", + payload: { + override: true, + reviewer: "director", + ticket_id: "approval-9125", + }, + source: { + source: "admin-console", + }, + }), + ], + }, + "approval.received": { + evidence: [ + buildSignal({ + createdAt: sub(now, { minutes: 2 }), + id: 9123n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + }, + }), + ], + history: [ + buildSignal({ + createdAt: sub(now, { minutes: 1 }), + id: 9124n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + timing: "after-resolution-duplicate", + }, + }), + buildSignal({ + createdAt: sub(now, { minutes: 2 }), + id: 9123n, + key: "approval.received", + payload: { + approved: true, + reviewer: "manager", + ticket_id: "approval-9123", + }, + source: { + source: "review-console", + }, + }), + ], + }, +}); + +const waitingOnSignals: WorkflowTaskWait = { + exprCel: "approval_received || manager_override || review_timeout_reached", + inputs: { + deps: [], + signals: [{ key: "approval.received" }, { key: "manager.override" }], + timers: [ + { + afterSeconds: 900, + anchor: { kind: "wait_started_at" }, + fireAt: add(now, { minutes: 12 }), + name: "review_timeout", + }, + ], + }, + phase: "waiting", + startedAt: sub(now, { minutes: 3 }), + summary: "Waiting for human approval, manager override, or review timeout.", + terms: [ + { + exprCel: `payload.approved == true`, + kind: "signal", + label: "Human approval received", + name: "approval_received", + signalKey: "approval.received", + }, + { + exprCel: `payload.manager_override == true`, + kind: "signal", + label: "Manager override received", + name: "manager_override", + signalKey: "manager.override", + }, + { + kind: "timer", + label: "Review timeout reached", + name: "review_timeout_reached", + timerName: "review_timeout", + }, + ], +}; + +const timerHeavyWait: WorkflowTaskWait = { + exprCel: + "soft_timeout_reached || hard_timeout_reached || customer_follow_up_reached", + inputs: { + deps: [], + signals: [], + timers: [ + { + afterSeconds: 300, + anchor: { kind: "wait_started_at" }, + fireAt: sub(now, { minutes: 13 }), + name: "soft_timeout", + }, + { + afterSeconds: 900, + anchor: { kind: "wait_started_at" }, + fireAt: add(now, { minutes: 2 }), + name: "hard_timeout", + }, + { + afterSeconds: 1800, + anchor: { kind: "task_finalized_at", task: "send_response" }, + name: "customer_follow_up", + }, + ], + }, + phase: "waiting", + startedAt: sub(now, { minutes: 18 }), + terms: [ + { + kind: "timer", + label: "Soft timeout reached", + name: "soft_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + timerName: "soft_timeout", + }, + { + kind: "timer", + label: "Hard timeout reached", + name: "hard_timeout_reached", + timerName: "hard_timeout", + }, + { + kind: "timer", + label: "Customer follow-up reached", + name: "customer_follow_up_reached", + timerName: "customer_follow_up", + }, + ], +}; + +const resolvedWait: WorkflowTaskWait = { + evidence: { + evaluatedAt: sub(now, { minutes: 2 }), + workflowAttempt: 1, + }, + exprCel: + "(risk_checks_clear && approval_received) || approval_override || approval_timeout_reached", + inputs: { + deps: [{ taskName: "risk_checks" }], + signals: [ + { + key: "approval.received", + result: { includedCount: 2, lastIncludedID: 9123n }, + }, + { + key: "approval.override", + result: { includedCount: 0 }, + }, + ], + timers: [ + { + afterSeconds: 1800, + anchor: { kind: "wait_started_at" }, + fireAt: add(now, { minutes: 4 }), + name: "approval_timeout", + result: { fireAt: add(now, { minutes: 4 }), fired: false }, + }, + ], + }, + phase: "resolved", + resolvedAt: sub(now, { minutes: 2 }), + startedAt: sub(now, { minutes: 26 }), + summary: "Risk checks clear and human approval received", + terms: [ + { + exprCel: `deps["risk_checks"].output.risk == "clear"`, + kind: "generic", + label: "Risk checks clear", + name: "risk_checks_clear", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + { + exprCel: `payload.approved == true`, + kind: "signal", + label: "Human approval received", + name: "approval_received", + result: { + lastMatchedID: 9123n, + matchedCount: 2, + requiredCount: 1, + satisfied: true, + }, + signalKey: "approval.received", + }, + { + exprCel: `payload.override == true`, + kind: "signal", + label: "Approval override received", + name: "approval_override", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "approval.override", + }, + { + kind: "timer", + label: "Approval timeout reached", + name: "approval_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + timerName: "approval_timeout", + }, + ], +}; + +const manySignals = Array.from({ length: 43 }, (_, index) => { + const id = 9300n - BigInt(index); + const approved = index === 0; + + return buildSignal({ + createdAt: sub(now, { seconds: index * 20 }), + id, + key: "approval.received", + payload: { + approved, + request_id: `review-${id.toString()}`, + reviewer: approved ? "review_lead" : "review_queue", + }, + source: { + channel: index % 2 === 0 ? "webhook" : "admin-console", + sequence: Number(id), + }, + }); +}); + +const manySignalsLoader = createStorySignalLoader({ + "approval.received": { + evidence: manySignals, + history: manySignals, + }, +}); + +const manySignalsWait: WorkflowTaskWait = { + ...resolvedWait, + exprCel: "approval_received || approval_timeout_reached", + inputs: { + ...resolvedWait.inputs, + signals: [ + { + key: "approval.received", + result: { includedCount: manySignals.length, lastIncludedID: 9300n }, + }, + ], + }, + summary: "Approval received after many signal deliveries", + terms: [ + { + exprCel: `payload.approved == true`, + kind: "signal", + label: "Approval received", + name: "approval_received", + result: { + lastMatchedID: 9300n, + matchedCount: 1, + requiredCount: 1, + satisfied: true, + }, + signalKey: "approval.received", + }, + { + kind: "timer", + label: "Approval timeout reached", + name: "approval_timeout_reached", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + timerName: "approval_timeout", + }, + ], +}; + +const declaredSignalWithoutTermsWait: WorkflowTaskWait = { + exprCel: `signals.exists(s, s.key == "approval.received" && s.payload.approved == true)`, + inputs: { + deps: [], + signals: [{ key: "approval.received" }], + timers: [], + }, + phase: "waiting", + startedAt: sub(now, { minutes: 6 }), + summary: "Waiting for approval signal.", + terms: [], +}; + +const longCelExpressionsWait: WorkflowTaskWait = { + exprCel: "approval_payload_ready && draft_quality_gate", + inputs: { + deps: [{ taskName: "verify_draft" }], + signals: [{ key: "approval.received" }], + timers: [], + }, + phase: "waiting", + startedAt: sub(now, { minutes: 8 }), + summary: "Waiting for approval payload and draft quality checks.", + terms: [ + { + exprCel: `payload.approved == true && payload.reviewer != "" && payload.review_score >= 90 && payload.source in ["console", "webhook"]`, + kind: "signal", + label: "Approval payload ready", + name: "approval_payload_ready", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "approval.received", + }, + { + exprCel: `deps["verify_draft"].output.quality.score >= 0.95 && deps["verify_draft"].output.policy.status == "clear" && deps["verify_draft"].output.needs_human_review == false`, + kind: "generic", + label: "Draft passed quality gate", + name: "draft_quality_gate", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + }, + ], +}; + +const meta: Meta = { + component: WorkflowGateInspector, + parameters: { + layout: "padded", + }, + title: "Components/WorkflowGateInspector", +}; + +export default meta; + +type Story = StoryObj; + +const openSignalHistory = async (canvasElement: HTMLElement) => { + const canvas = within(canvasElement); + + await userEvent.click(await canvas.findByRole("button", { name: "Details" })); + await userEvent.click( + await canvas.findByRole("button", { name: /Signal evidence/ }), + ); +}; + +const openAllTaskSignals = async (canvasElement: HTMLElement) => { + const canvas = within(canvasElement); + + await userEvent.click(await canvas.findByRole("button", { name: "Details" })); + await userEvent.click( + await canvas.findByRole("button", { name: "All task signals" }), + ); +}; + +export const WaitingOnSignals: Story = { + args: { + taskName: "await/review", + wait: waitingOnSignals, + workflowID: "wf-story", + }, +}; + +export const TimerHeavy: Story = { + args: { + taskName: "queue/follow-up", + wait: timerHeavyWait, + workflowID: "wf-story", + }, +}; + +export const ResolvedResult: Story = { + args: { + loadTaskSignals: resolvedSignalLoader, + taskName: "await/review", + wait: resolvedWait, + workflowID: "wf-story", + }, + play: async ({ canvasElement }) => openSignalHistory(canvasElement), +}; + +export const ManySignalsReceived: Story = { + args: { + loadTaskSignals: manySignalsLoader, + taskName: "await/review", + wait: manySignalsWait, + workflowID: "wf-story", + }, + play: async ({ canvasElement }) => openSignalHistory(canvasElement), +}; + +export const DeclaredSignalWithoutTerms: Story = { + args: { + loadTaskSignals: manySignalsLoader, + taskName: "await/review", + wait: declaredSignalWithoutTermsWait, + workflowID: "wf-story", + }, + play: async ({ canvasElement }) => openSignalHistory(canvasElement), +}; + +export const LongCelExpressions: Story = { + args: { + loadTaskSignals: manySignalsLoader, + taskName: "await/review", + wait: longCelExpressionsWait, + workflowID: "wf-story", + }, + play: async ({ canvasElement }) => { + const canvas = within(canvasElement); + + await userEvent.click( + await canvas.findByRole("button", { name: "Details" }), + ); + }, +}; + +export const AllTaskSignals: Story = { + args: { + loadTaskSignals: allTaskSignalsLoader, + taskName: "await/review", + wait: resolvedWait, + workflowID: "wf-story", + }, + play: async ({ canvasElement }) => openAllTaskSignals(canvasElement), +}; diff --git a/src/components/WorkflowGateInspector.test.tsx b/src/components/WorkflowGateInspector.test.tsx new file mode 100644 index 00000000..447cc662 --- /dev/null +++ b/src/components/WorkflowGateInspector.test.tsx @@ -0,0 +1,780 @@ +import type { WorkflowTaskWait } from "@services/workflows"; + +import { + act, + fireEvent, + render, + screen, + waitFor, +} from "@testing-library/react"; +import { add } from "date-fns"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +import WorkflowGateInspector, { + type TaskWaitDiagnosticsLoader, + type WaitFocusRequest, +} from "./WorkflowGateInspector"; + +describe("WorkflowGateInspector", () => { + beforeEach(() => { + vi.useFakeTimers(); + vi.setSystemTime(new Date("2026-04-21T18:00:00Z")); + document.body.innerHTML = + ''; + }); + + afterEach(() => { + vi.useRealTimers(); + vi.restoreAllMocks(); + }); + + it("renders summary, terms, signals, and timers from wait data", async () => { + const wait: WorkflowTaskWait = { + evidence: { + evaluatedAt: new Date("2026-04-21T17:59:00Z"), + workflowAttempt: 2, + }, + exprCel: "approval_received || review_sla_timeout", + inputs: { + deps: [], + signals: [ + { + key: "approval.received", + result: { + includedCount: 1, + lastIncludedID: 9001n, + }, + }, + ], + timers: [ + { + afterSeconds: 1200, + anchor: { kind: "wait_started_at" }, + fireAt: add(new Date("2026-04-21T17:50:00Z"), { minutes: 20 }), + name: "review_sla_timeout", + result: { + fireAt: add(new Date("2026-04-21T17:50:00Z"), { minutes: 20 }), + fired: false, + }, + }, + ], + }, + phase: "resolved", + resolvedAt: new Date("2026-04-21T17:59:00Z"), + startedAt: new Date("2026-04-21T17:50:00Z"), + summary: "Human approval received", + terms: [ + { + exprCel: `payload.approved == true`, + kind: "signal", + label: "Human approval received", + name: "approval_received", + result: { + lastMatchedID: 9001n, + matchedCount: 1, + requiredCount: 1, + satisfied: true, + }, + signalKey: "approval.received", + }, + { + kind: "timer", + label: "Review SLA timeout reached", + name: "review_sla_timeout", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + timerName: "review_sla_timeout", + }, + ], + }; + + renderInspector(wait); + + expect( + screen.getByText((_, element) => + Boolean( + element?.textContent === "Resolved by: Human approval received.", + ), + ), + ).toBeInTheDocument(); + expect( + screen.getByRole("button", { name: "Human approval received" }), + ).toBeInTheDocument(); + expect(screen.getByText("Evaluated")).toBeInTheDocument(); + await act(async () => { + fireEvent.click( + screen.getByRole("button", { name: "Human approval received" }), + ); + }); + expect(screen.getByRole("button", { name: "Details" })).toHaveAttribute( + "aria-expanded", + "true", + ); + expect(screen.getByText("1 of 2 conditions satisfied")).toBeInTheDocument(); + expect(screen.getByText("approval_received")).toBeInTheDocument(); + expect(screen.getByText("review_sla_timeout")).toBeInTheDocument(); + expect(screen.getAllByText("Human approval received")).not.toHaveLength(0); + expect(screen.getAllByText("Review SLA timeout reached")).not.toHaveLength( + 0, + ); + expect(screen.getAllByText("Signal")).not.toHaveLength(0); + expect(screen.getAllByText("Timer")).not.toHaveLength(0); + expect(screen.getByText("payload.approved == true")).toBeInTheDocument(); + expect(screen.getByText("Included")).toBeInTheDocument(); + expect(screen.getByText("Last included")).toBeInTheDocument(); + expect(screen.getAllByText("#9001")).not.toHaveLength(0); + expect( + screen.getAllByText((_, element) => + Boolean(element?.textContent?.includes("Satisfied by resolution")), + ), + ).not.toHaveLength(0); + expect(screen.getByText("Fires")).toBeInTheDocument(); + expect( + screen.getAllByText((_, element) => + Boolean(element?.textContent?.includes("20m after wait starts")), + ), + ).not.toHaveLength(0); + expect( + screen.getByRole("button", { name: /Resolution evidence/ }), + ).toBeInTheDocument(); + }); + + it("renders dependency and signal CEL terms", async () => { + const wait: WorkflowTaskWait = { + exprCel: "classify_intake_done && approval_received", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "waiting", + terms: [ + { + exprCel: `deps["classify_intake"].output.category == "launch"`, + kind: "generic", + label: "Classify intake done", + name: "classify_intake_done", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + { + exprCel: `payload.approved == true`, + kind: "signal", + label: "Approval received", + name: "approval_received", + result: { matchedCount: 0, requiredCount: 0, satisfied: false }, + signalKey: "approval.received", + }, + ], + }; + + renderInspector(wait); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect(screen.queryByText("Definition")).not.toBeInTheDocument(); + expect( + screen.getByText(`deps["classify_intake"].output.category == "launch"`), + ).toBeInTheDocument(); + expect(screen.getByText("payload.approved == true")).toBeInTheDocument(); + }); + + it("truncates long CEL terms until expanded", async () => { + const longCel = + 'payload.approved == true && payload.reviewer != "" && payload.review_score >= 90 && payload.region in ["us", "ca"]'; + const wait: WorkflowTaskWait = { + exprCel: "manual_approval_received", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "waiting", + terms: [ + { + exprCel: longCel, + kind: "signal", + label: "Manual approval", + name: "manual_approval_received", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "manual.approval", + }, + ], + }; + + renderInspector(wait); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect(screen.queryByText(longCel)).not.toBeInTheDocument(); + expect( + screen.getByText( + /payload\.approved == true && payload\.reviewer != "".*\.\.\./, + ), + ).toBeInTheDocument(); + + await act(async () => { + fireEvent.click( + screen.getByRole("button", { + name: "Show full CEL expression for Manual approval", + }), + ); + }); + + expect(screen.getByText(longCel)).toBeInTheDocument(); + expect( + screen.getByRole("button", { + name: "Hide full CEL expression for Manual approval", + }), + ).toHaveAttribute("aria-expanded", "true"); + }); + + it("keeps signal evidence controls visible when long CEL expands", async () => { + const longCel = + 'payload.action == "approved" && payload.reviewer.email.endsWith("@example.com") && payload.source == "console"'; + const wait: WorkflowTaskWait = { + exprCel: "approval_received", + inputs: { + deps: [], + signals: [{ key: "approval.received" }], + timers: [], + }, + phase: "waiting", + terms: [ + { + exprCel: longCel, + kind: "signal", + label: "Approval received", + name: "approval_received", + result: { matchedCount: 0, requiredCount: 1, satisfied: false }, + signalKey: "approval.received", + }, + ], + }; + + renderInspector(wait); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect( + screen.getByRole("button", { name: /Signal history/ }), + ).toBeInTheDocument(); + + await act(async () => { + fireEvent.click( + screen.getByRole("button", { + name: "Show full CEL expression for Approval received", + }), + ); + }); + + expect(screen.getByText(longCel)).toBeInTheDocument(); + expect( + screen.getByRole("button", { name: /Signal history/ }), + ).toBeInTheDocument(); + }); + + it("uses phase-aware fallback copy when no summary is available", () => { + const wait: WorkflowTaskWait = { + exprCel: "approval_received", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "not_started", + terms: [], + }; + + renderInspector(wait); + + expect( + screen.getByText( + "Wait has not started because dependencies are still incomplete.", + ), + ).toBeInTheDocument(); + }); + + it("scrolls to a focused condition only once per focus request", async () => { + vi.useRealTimers(); + + const scrollIntoView = vi.fn(); + const focus = vi.fn(); + Object.defineProperty(HTMLElement.prototype, "scrollIntoView", { + configurable: true, + value: scrollIntoView, + }); + vi.spyOn(HTMLElement.prototype, "focus").mockImplementation(focus); + + const wait: WorkflowTaskWait = { + evidence: { + evaluatedAt: new Date("2026-04-21T17:59:00Z"), + workflowAttempt: 1, + }, + exprCel: "approval_received", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "resolved", + resolvedAt: new Date("2026-04-21T17:59:00Z"), + summary: "Human approval received", + terms: [ + { + kind: "signal", + label: "Human approval received", + name: "approval_received", + result: { matchedCount: 0, requiredCount: 0, satisfied: true }, + }, + ], + }; + const focusRequest: WaitFocusRequest = { + conditionName: "approval_received", + requestID: 1, + }; + + const { rerender } = renderInspector(wait, { focusRequest }); + + await waitFor(() => expect(scrollIntoView).toHaveBeenCalledTimes(1)); + + await act(async () => { + rerender( + , + ); + }); + + expect(scrollIntoView).toHaveBeenCalledTimes(1); + + await act(async () => { + rerender( + , + ); + }); + + await waitFor(() => expect(scrollIntoView).toHaveBeenCalledTimes(2)); + expect(focus).toHaveBeenCalledTimes(2); + }); + + it("fetches task signal signals lazily using the evidence scope when resolved", async () => { + vi.useRealTimers(); + + const fetchMock = mockTaskSignalsFetch([ + { + body: { + evidence: { + evaluated_at: "2026-04-21T17:59:00Z", + workflow_attempt: 2, + }, + has_more: false, + scope: "evidence", + signals: [ + { + attempt: 2, + created_at: "2026-04-21T17:58:00Z", + id: "9001", + key: "approval.received", + payload: { decision: "approve" }, + source: { actor: "manager" }, + }, + ], + }, + }, + ]); + + const wait: WorkflowTaskWait = { + exprCel: "approval_received", + inputs: { + deps: [], + signals: [ + { + key: "approval.received", + }, + ], + timers: [], + }, + phase: "resolved", + resolvedAt: new Date("2026-04-21T17:59:00Z"), + summary: "Human approval received", + terms: [], + }; + + renderInspector(wait); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + await act(async () => { + fireEvent.click( + screen.getByRole("button", { name: /Resolution evidence/ }), + ); + }); + + await waitFor(() => expect(fetchMock).toHaveBeenCalledTimes(1)); + expect( + screen.getByRole("button", { name: /Resolution evidence/ }), + ).toBeInTheDocument(); + expect(fetchMock.mock.calls[0]?.[0]).toContain("scope=evidence"); + expect( + await screen.findByText("Signals included when this wait resolved."), + ).toBeInTheDocument(); + expect(screen.getAllByText(/decision/)).not.toHaveLength(0); + expect(screen.getAllByText(/manager/)).not.toHaveLength(0); + }); + + it("uses history scope by default when still waiting", async () => { + vi.useRealTimers(); + + const fetchMock = mockTaskSignalsFetch([ + { + body: { + has_more: false, + scope: "history", + signals: [], + }, + }, + ]); + + const wait: WorkflowTaskWait = { + exprCel: "approval_received", + inputs: { + deps: [], + signals: [ + { + key: "approval.received", + }, + ], + timers: [], + }, + phase: "waiting", + terms: [], + }; + + renderInspector(wait); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect(screen.getByText("0 of 1 conditions satisfied")).toBeInTheDocument(); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: /Signal history/ })); + }); + + await waitFor(() => expect(fetchMock).toHaveBeenCalledTimes(1)); + expect(fetchMock.mock.calls[0]?.[0]).toContain("scope=history"); + expect( + await screen.findByText( + "No signals found in the current workflow attempt.", + ), + ).toBeInTheDocument(); + }); + + it("shows diagnostics load failures instead of masking them", async () => { + vi.useRealTimers(); + + const wait: WorkflowTaskWait = { + exprCel: "approval_received", + inputs: { + deps: [], + signals: [], + timers: [], + }, + phase: "waiting", + terms: [], + }; + + renderInspector(wait, { + loadTaskWaitDiagnostics: async () => { + throw new Error( + "Expected JSON response from /api/pro/workflows/wf-123/task-wait-diagnostics, received text/html; charset=utf-8.", + ); + }, + }); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect( + await screen.findByText( + "Unable to load waiting diagnostics: Expected JSON response from /api/pro/workflows/wf-123/task-wait-diagnostics, received text/html; charset=utf-8.", + ), + ).toBeInTheDocument(); + }); + + it("renders active wait evidence from diagnostics", async () => { + vi.useRealTimers(); + + const wait: WorkflowTaskWait = { + exprCel: "approval_received || review_timeout", + inputs: { + deps: [], + signals: [{ key: "approval.received" }], + timers: [ + { + afterSeconds: 300, + anchor: { kind: "wait_started_at" }, + name: "review_timeout", + }, + ], + }, + phase: "waiting", + terms: [ + { + exprCel: "payload.approved == true", + kind: "signal", + label: "Approval received", + name: "approval_received", + signalKey: "approval.received", + }, + { + kind: "timer", + label: "Review timeout", + name: "review_timeout", + timerName: "review_timeout", + }, + ], + }; + + renderInspector(wait, { + loadTaskWaitDiagnostics: async () => ({ + exprResult: true, + inputs: { + deps: [], + signals: [ + { + includedCount: 3, + key: "approval.received", + lastID: 9003n, + }, + ], + timers: [ + { + fireAt: new Date("2026-04-21T17:55:00Z"), + fired: true, + name: "review_timeout", + }, + ], + }, + inspectedAt: new Date("2026-04-21T18:00:00Z"), + phase: "waiting", + signalScanCount: 3, + signalScanLimit: 10000, + terms: [ + { + lastMatchedID: 9002n, + matchedCount: 2, + name: "approval_received", + requiredCount: 2, + satisfied: true, + }, + { + matchedCount: 0, + name: "review_timeout", + requiredCount: 0, + satisfied: true, + }, + ], + truncated: false, + workflowAttempt: 1, + }), + }); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect( + await screen.findByText("2 of 2 conditions satisfied"), + ).toBeInTheDocument(); + expect(screen.getByText("Included")).toBeInTheDocument(); + expect(screen.getByText("Matched")).toBeInTheDocument(); + expect(screen.getByText("3")).toBeInTheDocument(); + expect(screen.getByText("#9003")).toBeInTheDocument(); + expect(screen.getByText("#9002")).toBeInTheDocument(); + expect(screen.getByText("Fired")).toBeInTheDocument(); + }); + + it("shows when signal diagnostics are truncated", async () => { + vi.useRealTimers(); + + const wait: WorkflowTaskWait = { + exprCel: "approval_received", + inputs: { + deps: [], + signals: [{ key: "approval.received" }], + timers: [], + }, + phase: "waiting", + terms: [], + }; + + renderInspector(wait, { + loadTaskWaitDiagnostics: async () => ({ + exprResult: false, + inputs: { + deps: [], + signals: [ + { + includedCount: 10000, + key: "approval.received", + lastID: 9001n, + }, + ], + timers: [], + }, + inspectedAt: new Date("2026-04-21T18:00:00Z"), + phase: "waiting", + signalScanCount: 10000, + signalScanLimit: 10000, + terms: [], + truncated: true, + workflowAttempt: 1, + }), + }); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect(await screen.findByText("10,000 / 10,000")).toBeInTheDocument(); + expect( + screen.getByText( + "Signal diagnostics reached the scan limit, so expression and match counts are best effort.", + ), + ).toBeInTheDocument(); + }); + + it("explains unavailable dependency outputs before a wait starts", async () => { + vi.useRealTimers(); + + const wait: WorkflowTaskWait = { + exprCel: "draft_ready_to_send", + inputs: { + deps: [{ taskName: "verify_draft" }], + signals: [], + timers: [], + }, + phase: "not_started", + terms: [ + { + exprCel: `deps["verify_draft"].output.needs_human_review == false`, + kind: "generic", + label: "draft ready to send", + name: "draft_ready_to_send", + }, + ], + }; + + renderInspector(wait, { + loadTaskWaitDiagnostics: async () => ({ + evalError: "no such key: needs_human_review", + inputs: { + deps: [ + { + available: false, + state: "pending", + taskName: "verify_draft", + }, + ], + signals: [], + timers: [], + }, + inspectedAt: new Date("2026-04-21T18:00:00Z"), + phase: "not_started", + signalScanCount: 0, + signalScanLimit: 10000, + terms: [], + truncated: false, + workflowAttempt: 1, + }), + }); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Details" })); + }); + + expect( + await screen.findByText("Waiting for dependency output."), + ).toBeInTheDocument(); + expect( + screen.queryByText("no such key: needs_human_review"), + ).not.toBeInTheDocument(); + }); +}); + +const renderInspector = ( + wait: WorkflowTaskWait, + props: { + focusRequest?: WaitFocusRequest; + loadTaskWaitDiagnostics?: TaskWaitDiagnosticsLoader; + } = {}, +) => { + return render( + ({ + inputs: { + deps: [], + signals: [], + timers: [], + }, + inspectedAt: new Date("2026-04-21T18:00:00Z"), + phase: wait.phase, + signalScanCount: 0, + signalScanLimit: 10000, + terms: [], + truncated: false, + workflowAttempt: 1, + })) + } + taskName="task/alpha" + wait={wait} + workflowID="wf-123" + />, + ); +}; + +const mockTaskSignalsFetch = ( + responses: Array<{ + body: unknown; + status?: number; + }>, +) => { + return vi.spyOn(globalThis, "fetch").mockImplementation(async () => { + const nextResponse = responses.shift(); + if (!nextResponse) { + throw new Error("Unexpected fetch call"); + } + + return new Response(JSON.stringify(nextResponse.body), { + headers: { "Content-Type": "application/json" }, + status: nextResponse.status ?? 200, + }); + }); +}; diff --git a/src/components/WorkflowGateInspector.tsx b/src/components/WorkflowGateInspector.tsx new file mode 100644 index 00000000..e09ad20a --- /dev/null +++ b/src/components/WorkflowGateInspector.tsx @@ -0,0 +1,458 @@ +import { Subheading } from "@components/Heading"; +import PlaintextPanel from "@components/PlaintextPanel"; +import { ChevronDownIcon, ChevronRightIcon } from "@heroicons/react/24/outline"; +import { + getWorkflowTaskSignals, + getWorkflowTaskWaitDiagnostics, +} from "@services/workflows"; +import { useEffect, useMemo, useState } from "react"; + +import { + buildWaitTermViews, + getAutoOpenSignalEvidenceSurface, + getConditionSignalScope, + getSignalSurfaceKey, + getSignalSurfaceStateKey, + getSignalSurfaceTermName, + hasWaitDetails, + orderConditionsForSummary, + signalInspectorStateFromSignalList, + waitDiagnosticsErrorMessage, +} from "./WorkflowGateInspector.model"; +import { + emptySignalInspectorState, + emptyWaitDiagnosticsState, + type SignalHistorySurface, + type SignalInspectorState, + type WaitFocusRequest, + type WorkflowWaitInspectorProps, +} from "./WorkflowGateInspector.types"; +import { WaitTermViews } from "./WorkflowGateInspectorConditions"; +import { WaitDiagnosticsPanel } from "./WorkflowGateInspectorDiagnostics"; +import { AllTaskSignalsPanel } from "./WorkflowGateInspectorSignals"; +import { + WaitFacts, + WaitSection, + WaitStatusPill, + WaitSummary, +} from "./WorkflowGateInspectorSummary"; + +export type { + TaskSignalLoader, + TaskWaitDiagnosticsLoader, + WaitFocusRequest, +} from "./WorkflowGateInspector.types"; +export { + ConditionKindIcon, + WaitTermViews, +} from "./WorkflowGateInspectorConditions"; +export { WaitStatusPill } from "./WorkflowGateInspectorSummary"; + +export default function WorkflowWaitInspector({ + dependencyTasks, + focusRequest, + loadTaskSignals = getWorkflowTaskSignals, + loadTaskWaitDiagnostics = getWorkflowTaskWaitDiagnostics, + onSelectCondition, + taskName, + wait, + workflowID, +}: WorkflowWaitInspectorProps) { + const [detailsOpen, setDetailsOpen] = useState(false); + const [closedFocusRequestID, setClosedFocusRequestID] = useState(); + const [conditionFocusRequest, setConditionFocusRequest] = + useState(); + const [diagnosticsState, setDiagnosticsState] = useState( + emptyWaitDiagnosticsState, + ); + const [openSignalSurfaceContextKey, setOpenSignalSurfaceContextKey] = + useState(); + const [openSignalSurface, setOpenSignalSurface] = + useState(); + const [dismissedAutoOpenSignalKey, setDismissedAutoOpenSignalKey] = + useState(); + const [conditionSignalStates, setConditionSignalStates] = useState< + Record + >({}); + const [allSignalListState, setAllSignalListState] = useState( + emptySignalInspectorState, + ); + + const conditions = useMemo( + () => buildWaitTermViews(wait, dependencyTasks, diagnosticsState.value), + [dependencyTasks, diagnosticsState.value, wait], + ); + const matchedConditions = useMemo( + () => conditions.filter((condition) => condition.matched), + [conditions], + ); + const summaryConditions = useMemo( + () => orderConditionsForSummary(wait.summary, matchedConditions), + [matchedConditions, wait.summary], + ); + const autoOpenSignalSurfaceCandidate = useMemo( + () => getAutoOpenSignalEvidenceSurface(conditions), + [conditions], + ); + const conditionSignalScope = getConditionSignalScope(wait); + const allTaskSignalsScope = "history"; + const hasSignals = wait.inputs.signals.length > 0; + const signalHistoryKey = `${workflowID}:${taskName}:${wait.evidence?.workflowAttempt.toString() ?? ""}:${wait.phase}`; + const focusDetailsOpen = + focusRequest !== undefined && + closedFocusRequestID !== focusRequest.requestID; + const detailsVisible = detailsOpen || focusDetailsOpen; + const activeFocusRequest = focusRequest ?? conditionFocusRequest; + const storedOpenSignalSurface = + openSignalSurfaceContextKey === signalHistoryKey + ? openSignalSurface + : undefined; + const autoOpenSignalSurface = useMemo< + SignalHistorySurface | undefined + >(() => { + if ( + !detailsVisible || + !autoOpenSignalSurfaceCandidate || + dismissedAutoOpenSignalKey === + getSignalSurfaceStateKey(autoOpenSignalSurfaceCandidate) || + openSignalSurfaceContextKey === signalHistoryKey + ) { + return undefined; + } + + return autoOpenSignalSurfaceCandidate; + }, [ + autoOpenSignalSurfaceCandidate, + detailsVisible, + dismissedAutoOpenSignalKey, + openSignalSurfaceContextKey, + signalHistoryKey, + ]); + const currentOpenSignalSurface = + storedOpenSignalSurface ?? autoOpenSignalSurface; + const currentAllSignalListState = + openSignalSurfaceContextKey === signalHistoryKey + ? allSignalListState + : emptySignalInspectorState; + + useEffect(() => { + if (!detailsVisible || wait.phase === "resolved") return; + + const abortController = new AbortController(); + queueMicrotask(() => { + if (abortController.signal.aborted) return; + setDiagnosticsState({ isLoading: true }); + }); + void loadTaskWaitDiagnostics({ + signal: abortController.signal, + taskName, + workflowID, + }).then( + (diagnostics) => { + if (abortController.signal.aborted) return; + setDiagnosticsState({ isLoading: false, value: diagnostics }); + }, + (error) => { + if (abortController.signal.aborted) return; + setDiagnosticsState({ + error: waitDiagnosticsErrorMessage(error), + isLoading: false, + }); + }, + ); + + return () => abortController.abort(); + }, [ + detailsVisible, + loadTaskWaitDiagnostics, + taskName, + wait.phase, + workflowID, + ]); + + useEffect(() => { + if (!currentOpenSignalSurface) return; + + const abortController = new AbortController(); + const signalKey = getSignalSurfaceKey(currentOpenSignalSurface); + const termName = getSignalSurfaceTermName(currentOpenSignalSurface); + const scope = + currentOpenSignalSurface.kind === "condition" + ? conditionSignalScope + : allTaskSignalsScope; + + void loadTaskSignals({ + desc: true, + key: signalKey, + limit: 20, + scope, + signal: abortController.signal, + taskName, + termName, + workflowID, + }).then( + (signalList) => { + if (abortController.signal.aborted) return; + const nextState = signalInspectorStateFromSignalList(signalList); + if (currentOpenSignalSurface.kind === "condition") { + setConditionSignalStates((current) => ({ + ...current, + [getSignalSurfaceStateKey(currentOpenSignalSurface)]: nextState, + })); + return; + } + + setAllSignalListState(nextState); + }, + () => { + if (abortController.signal.aborted) return; + const nextState = { + ...emptySignalInspectorState, + error: "Unable to load signal history.", + }; + if (currentOpenSignalSurface.kind === "condition") { + setConditionSignalStates((current) => ({ + ...current, + [getSignalSurfaceStateKey(currentOpenSignalSurface)]: nextState, + })); + return; + } + + setAllSignalListState(nextState); + }, + ); + + return () => abortController.abort(); + }, [ + allTaskSignalsScope, + conditionSignalScope, + currentOpenSignalSurface, + loadTaskSignals, + taskName, + workflowID, + ]); + + const handleSelectCondition = (conditionName: string) => { + if (onSelectCondition) { + onSelectCondition(conditionName); + return; + } + + setDetailsOpen(true); + setConditionFocusRequest((current) => ({ + conditionName, + requestID: (current?.requestID ?? 0) + 1, + })); + }; + + const handleToggleDetails = () => { + if (detailsVisible) { + setDetailsOpen(false); + setClosedFocusRequestID(focusRequest?.requestID); + return; + } + + setDetailsOpen(true); + setClosedFocusRequestID(undefined); + }; + + const handleToggleConditionSignals = (surface: SignalHistorySurface) => { + if (surface.kind !== "condition") return; + const stateKey = getSignalSurfaceStateKey(surface); + if ( + currentOpenSignalSurface?.kind === "condition" && + getSignalSurfaceStateKey(currentOpenSignalSurface) === stateKey + ) { + setOpenSignalSurface(undefined); + setDismissedAutoOpenSignalKey(stateKey); + return; + } + + setOpenSignalSurfaceContextKey(signalHistoryKey); + setOpenSignalSurface(surface); + setConditionSignalStates((current) => ({ + ...current, + [stateKey]: { + ...emptySignalInspectorState, + isLoading: true, + }, + })); + }; + + const handleToggleAllTaskSignals = () => { + if (currentOpenSignalSurface?.kind === "all") { + setOpenSignalSurface(undefined); + return; + } + + setOpenSignalSurfaceContextKey(signalHistoryKey); + setOpenSignalSurface({ kind: "all" }); + setAllSignalListState({ + ...emptySignalInspectorState, + isLoading: true, + }); + }; + + const handleLoadMoreSignals = async (surface: SignalHistorySurface) => { + const currentState = + surface.kind === "condition" + ? (conditionSignalStates[getSignalSurfaceStateKey(surface)] ?? + emptySignalInspectorState) + : currentAllSignalListState; + + if ( + !currentState.hasMore || + !currentState.nextCursorID || + currentState.isLoadingMore + ) { + return; + } + + if (surface.kind === "condition") { + const stateKey = getSignalSurfaceStateKey(surface); + setConditionSignalStates((current) => ({ + ...current, + [stateKey]: { + ...(current[stateKey] ?? emptySignalInspectorState), + error: undefined, + isLoadingMore: true, + }, + })); + } else { + setAllSignalListState((current) => ({ + ...current, + error: undefined, + isLoadingMore: true, + })); + } + + try { + const nextPage = await loadTaskSignals({ + cursorID: currentState.nextCursorID, + desc: true, + key: getSignalSurfaceKey(surface), + limit: 20, + scope: + surface.kind === "condition" + ? conditionSignalScope + : allTaskSignalsScope, + taskName, + termName: getSignalSurfaceTermName(surface), + workflowID, + }); + + const buildNextState = (current: SignalInspectorState) => ({ + error: undefined, + hasMore: nextPage.hasMore, + isLoading: false, + isLoadingMore: false, + nextCursorID: nextPage.nextCursorID, + scope: nextPage.scope, + signals: [...current.signals, ...nextPage.signals], + }); + + if (surface.kind === "condition") { + const stateKey = getSignalSurfaceStateKey(surface); + setConditionSignalStates((current) => ({ + ...current, + [stateKey]: buildNextState( + current[stateKey] ?? emptySignalInspectorState, + ), + })); + } else { + setAllSignalListState(buildNextState); + } + } catch { + if (surface.kind === "condition") { + const stateKey = getSignalSurfaceStateKey(surface); + setConditionSignalStates((current) => ({ + ...current, + [stateKey]: { + ...(current[stateKey] ?? emptySignalInspectorState), + error: "Unable to load more signal history.", + isLoadingMore: false, + }, + })); + } else { + setAllSignalListState((current) => ({ + ...current, + error: "Unable to load more signal history.", + isLoadingMore: false, + })); + } + } + }; + + return ( +
+
+
+ Wait condition + +
+ + + +
+ + {detailsVisible ? ( +
+ {hasWaitDetails(wait) ? ( + + + + ) : null} + + {wait.phase !== "resolved" ? ( + + ) : null} + + + + +
+ ) : null} + + {hasSignals ? ( +
+ +
+ ) : null} +
+ ); +} diff --git a/src/components/WorkflowGateInspector.types.ts b/src/components/WorkflowGateInspector.types.ts new file mode 100644 index 00000000..6d6db968 --- /dev/null +++ b/src/components/WorkflowGateInspector.types.ts @@ -0,0 +1,89 @@ +import { + getWorkflowTaskSignals, + getWorkflowTaskWaitDiagnostics, + type WorkflowTask, + type WorkflowTaskSignal, + type WorkflowTaskWait, + type WorkflowTaskWaitDiagnostics, +} from "@services/workflows"; + +export type SignalHistorySurface = + | { + kind: "all"; + } + | { + kind: "condition"; + signalKey: string; + termName?: string; + }; + +export type SignalInspectorState = { + error?: string; + hasMore: boolean; + isLoading: boolean; + isLoadingMore: boolean; + nextCursorID?: bigint; + signals: WorkflowTaskSignal[]; +}; + +export type TaskSignalLoader = typeof getWorkflowTaskSignals; +export type TaskWaitDiagnosticsLoader = typeof getWorkflowTaskWaitDiagnostics; + +export type WaitDiagnosticsState = { + error?: string; + isLoading: boolean; + value?: WorkflowTaskWaitDiagnostics; +}; + +export type WaitFocusRequest = { + conditionName: string; + requestID: number; +}; + +export type WaitSignalInput = WorkflowTaskWait["inputs"]["signals"][number]; +export type WaitTermResult = NonNullable< + WorkflowTaskWait["terms"][number]["result"] +>; + +export type WaitTermView = { + dependencyTask?: WorkflowTask; + exprCel?: string; + kind: string; + label: string; + matched: boolean; + result?: WorkflowTaskWait["terms"][number]["result"]; + signal?: WorkflowTaskWait["inputs"]["signals"][number]; + signalTermName?: string; + sortIndex: number; + technicalName: string; + timer?: WorkflowTaskWait["inputs"]["timers"][number]; +}; + +export type WaitTimerInput = WorkflowTaskWait["inputs"]["timers"][number]; + +export type WorkflowWaitInspectorProps = { + dependencyTasks?: Record; + focusRequest?: undefined | WaitFocusRequest; + loadTaskSignals?: TaskSignalLoader; + loadTaskWaitDiagnostics?: TaskWaitDiagnosticsLoader; + onSelectCondition?: (conditionName: string) => void; + taskName: string; + wait: WorkflowTaskWait; + workflowID: string; +}; + +export const emptyWaitDiagnosticsState: WaitDiagnosticsState = { + isLoading: false, +}; + +export const emptySignalInspectorState: SignalInspectorState = { + hasMore: false, + isLoading: false, + isLoadingMore: false, + signals: [], +}; + +export const loadingSignalInspectorState: SignalInspectorState = { + ...emptySignalInspectorState, + isLoading: true, +}; diff --git a/src/components/WorkflowGateInspectorConditions.tsx b/src/components/WorkflowGateInspectorConditions.tsx new file mode 100644 index 00000000..bb700d52 --- /dev/null +++ b/src/components/WorkflowGateInspectorConditions.tsx @@ -0,0 +1,620 @@ +import RelativeTimeFormatter from "@components/RelativeTimeFormatter"; +import { + ChevronDownIcon, + ChevronRightIcon, + ClockIcon, + InboxIcon, + LinkIcon, +} from "@heroicons/react/24/outline"; +import { type WorkflowTask, type WorkflowTaskWait } from "@services/workflows"; +import clsx from "clsx"; +import { type ReactNode, useEffect, useId, useRef, useState } from "react"; + +import { + conditionMatchesName, + formatTimerAnchorWait, + getConditionFocusKey, + getConditionSignalStateKey, + getConditionStateLabel, + getConditionStateTone, + getSignalSurfaceStateKey, + getTimerDelayLabel, + getWaitTermKindLabel, + signalSurfaceForCondition, +} from "./WorkflowGateInspector.model"; +import { + emptySignalInspectorState, + loadingSignalInspectorState, + type SignalHistorySurface, + type SignalInspectorState, + type WaitFocusRequest, + type WaitTermView, +} from "./WorkflowGateInspector.types"; +import { ConditionSignalEvidenceDisclosure } from "./WorkflowGateInspectorSignals"; + +const INLINE_CEL_MAX_LENGTH = 72; + +export const WaitTermViews = ({ + conditions, + focusRequest, + onLoadMore, + onToggleConditionSignals, + openSignalSurface, + signalListStates, + wait, +}: { + conditions: WaitTermView[]; + focusRequest: undefined | WaitFocusRequest; + onLoadMore: (surface: SignalHistorySurface) => void; + onToggleConditionSignals: (surface: SignalHistorySurface) => void; + openSignalSurface: SignalHistorySurface | undefined; + signalListStates: Record; + wait: WorkflowTaskWait; +}) => { + const matchedConditions = conditions.filter((condition) => condition.matched); + const latestConditionsRef = useRef(conditions); + const conditionRowRefs = useRef(new Map()); + const handledFocusRequestIDRef = useRef(undefined); + + useEffect(() => { + latestConditionsRef.current = conditions; + }, [conditions]); + + useEffect(() => { + if (!focusRequest) return; + if (handledFocusRequestIDRef.current === focusRequest.requestID) return; + + const focusedCondition = latestConditionsRef.current.find((condition) => + conditionMatchesName(condition, focusRequest.conditionName), + ); + if (!focusedCondition) return; + + const row = conditionRowRefs.current.get( + getConditionFocusKey(focusedCondition), + ); + row?.scrollIntoView?.({ behavior: "smooth", block: "center" }); + row?.focus({ preventScroll: true }); + handledFocusRequestIDRef.current = focusRequest.requestID; + }, [focusRequest]); + + const registerConditionRow = ( + condition: WaitTermView, + node: HTMLDivElement | null, + ) => { + const key = getConditionFocusKey(condition); + if (node) { + conditionRowRefs.current.set(key, node); + return; + } + + conditionRowRefs.current.delete(key); + }; + + return ( +
+

+ {matchedConditions.length.toString()} of {conditions.length.toString()}{" "} + conditions satisfied +

+ +
+
+ Status + Condition +
+
+ {conditions.map((condition) => { + const conditionSignalState = condition.signal + ? signalListStates[getConditionSignalStateKey(condition)] + : undefined; + const conditionSignalsOpen = + condition.signal !== undefined && + openSignalSurface?.kind === "condition" && + getSignalSurfaceStateKey(openSignalSurface) === + getConditionSignalStateKey(condition); + + return ( + + ); + })} +
+
+
+ ); +}; + +const ConditionRow = ({ + condition, + focused, + onLoadMore, + onRegisterRow, + onToggleConditionSignals, + openSignalSurface, + signalListState, + wait, +}: { + condition: WaitTermView; + focused: boolean; + onLoadMore: (surface: SignalHistorySurface) => void; + onRegisterRow: (condition: WaitTermView, node: HTMLDivElement | null) => void; + onToggleConditionSignals: (surface: SignalHistorySurface) => void; + openSignalSurface: SignalHistorySurface | undefined; + signalListState: SignalInspectorState; + wait: WorkflowTaskWait; +}) => { + const stateTone = getConditionStateTone(condition, wait.phase); + const signal = condition.signal; + const timer = condition.timer; + const hasEvidence = + condition.dependencyTask !== undefined || + signal !== undefined || + timer !== undefined; + const showRawTechnicalName = Boolean(condition.exprCel || timer); + const metadataContent: ReactNode = timer ? ( + + ) : ( + condition.technicalName + ); + + return ( +
onRegisterRow(condition, node)} + tabIndex={-1} + > +
+
+
+ +
+
+ {condition.label} +
+ {showRawTechnicalName ? ( +
+ {condition.technicalName} +
+ ) : null} +
+ + + {timer ? ( + {metadataContent} + ) : condition.exprCel ? ( + + ) : ( + {metadataContent} + )} +
+
+ + {hasEvidence ? ( +
+ +
+ ) : null} +
+ + {signal ? ( + + onToggleConditionSignals(signalSurfaceForCondition(condition)) + } + open={ + openSignalSurface?.kind === "condition" && + getSignalSurfaceStateKey(openSignalSurface) === + getConditionSignalStateKey(condition) + } + phase={wait.phase} + signal={signal} + signalListState={signalListState} + surface={signalSurfaceForCondition(condition)} + /> + ) : null} +
+ ); +}; + +const ConditionExpression = ({ + conditionLabel, + expression, +}: { + conditionLabel: string; + expression: string; +}) => { + const [expanded, setExpanded] = useState(false); + const expressionID = useId(); + const isLongExpression = + expression.length > INLINE_CEL_MAX_LENGTH || /[\r\n]/.test(expression); + + if (!isLongExpression) { + return ( + + {expression} + + ); + } + + const previewExpression = getExpressionPreview(expression); + const buttonLabel = `${expanded ? "Hide" : "Show"} full CEL expression for ${conditionLabel}`; + + return ( + <> + + {expanded ? ( +
+          {expression}
+        
+ ) : null} + + ); +}; + +const getExpressionPreview = (expression: string): string => { + const oneLineExpression = expression.replace(/\s+/g, " ").trim(); + if (oneLineExpression.length <= INLINE_CEL_MAX_LENGTH) { + return `${oneLineExpression}...`; + } + + return `${oneLineExpression.slice(0, INLINE_CEL_MAX_LENGTH)}...`; +}; + +const ConditionEvidence = ({ + condition, + wait, +}: { + condition: WaitTermView; + wait: WorkflowTaskWait; +}) => { + const timer = condition.timer; + if (timer) { + return ; + } + + const signal = condition.signal; + if (signal) { + return ( + + ); + } + + const dependencyTask = condition.dependencyTask; + if (dependencyTask) { + return ( + + ); + } + + return null; +}; + +const TimerConditionDefinition = ({ + timer, +}: { + timer: WorkflowTaskWait["inputs"]["timers"][number]; +}) => { + const delay = getTimerDelayLabel(timer); + const anchor = timer.anchor; + + if (!delay) { + if (!anchor) return <>Immediate; + switch (anchor.kind) { + case "task_finalized_at": + return anchor.task ? ( + <> + When finalizes + + ) : ( + <>When dependency finalizes + ); + case "wait_started_at": + return <>When wait starts; + case "workflow_created_at": + return <>When workflow starts; + default: + return anchor.task ? ( + <> + {anchor.kind.replaceAll("_", " ")} ( + ) + + ) : ( + <>{anchor.kind.replaceAll("_", " ")} + ); + } + } + + if (!anchor) return <>After {delay}; + switch (anchor.kind) { + case "task_finalized_at": + return anchor.task ? ( + <> + {delay} after finalizes + + ) : ( + <>{delay} after dependency finalizes + ); + case "wait_started_at": + return <>{delay} after wait starts; + case "workflow_created_at": + return <>{delay} after workflow starts; + default: + return anchor.task ? ( + <> + {delay} after {anchor.kind.replaceAll("_", " ")} ( + ) + + ) : ( + <> + {delay} after {anchor.kind.replaceAll("_", " ")} + + ); + } +}; + +const TimerTaskName = ({ taskName }: { taskName: string }) => ( + + {taskName} + +); + +const TimerConditionEvidence = ({ + timer, +}: { + timer: WorkflowTaskWait["inputs"]["timers"][number]; +}) => { + const fired = timer.result?.fired ?? false; + return ( +

+ + {fired ? "Fired" : "Fires"} + {" "} + +

+ ); +}; + +const DependencyConditionEvidence = ({ + condition, + wait, +}: { + condition: { + dependencyTask: WorkflowTask; + } & WaitTermView; + wait: WorkflowTaskWait; +}) => { + if (condition.dependencyTask.finalizedAt) { + return ( +

+ + Finalized + {" "} + +

+ ); + } + + return condition.matched ? ( + + ) : null; +}; + +const SignalConditionEvidence = ({ + condition, + wait, +}: { + condition: { + signal: WorkflowTaskWait["inputs"]["signals"][number]; + } & WaitTermView; + wait: WorkflowTaskWait; +}) => { + const signalResult = condition.signal.result; + const termResult = condition.result ?? undefined; + return ( +
+
+ + + {termResult ? ( + + ) : null} + {signalResult?.lastIncludedID ? ( + + ) : null} + {termResult?.lastMatchedID ? ( + + ) : null} +
+ {condition.matched ? ( + + ) : null} +
+ ); +}; + +const ConditionSnapshotTiming = ({ + label, + resolvedLabel, + wait, +}: { + label: string; + resolvedLabel: string; + wait: WorkflowTaskWait; +}) => { + const time = wait.resolvedAt ?? wait.evidence?.evaluatedAt; + if (!time) return null; + + return ( + + + {label} + {" "} + {wait.resolvedAt ? resolvedLabel : "by evaluation"}{" "} + + + ); +}; + +const CompactEvidenceField = ({ + label, + value, +}: { + label: string; + value: ReactNode; +}) => { + return ( +
+
{label}
+
+ {value} +
+
+ ); +}; + +const ConditionKindLabel = ({ kind }: { kind: string }) => { + return ( + + + {getWaitTermKindLabel(kind)} + + ); +}; + +export const ConditionKindIcon = ({ + className, + kind, +}: { + className?: string; + kind: string; +}) => { + switch (kind) { + case "dep_input": + case "generic": + return ( +