From 7c737c7c6f697d9d1cce5ab910ef7d68c2655489 Mon Sep 17 00:00:00 2001 From: dimmur-brw Date: Fri, 28 Nov 2025 11:06:02 +0100 Subject: [PATCH 1/2] feat: show all generation jobs in AIField in modal (#4265) * Show jobs for AIField in modal * Use list_job endpoint * Remove console.log --------- Co-authored-by: Davide Silvestri --- backend/src/baserow/api/jobs/serializers.py | 118 +++++++ backend/src/baserow/api/jobs/views.py | 53 ++- .../baserow/contrib/database/table/handler.py | 6 +- backend/src/baserow/core/jobs/handler.py | 15 +- backend/src/baserow/core/jobs/registries.py | 16 +- .../src/baserow/test_utils/fixtures/job.py | 17 + .../test_export_applications_views.py | 2 + .../tests/baserow/api/jobs/test_jobs_views.py | 304 +++++++++++------- .../api/workflows/test_workflow_views.py | 27 +- .../api/airtable/test_airtable_views.py | 81 +++-- backend/tests/baserow/ws/test_ws_signals.py | 6 +- .../4264_show_jobs_for_ai_field_in_modal.json | 9 + .../src/baserow_premium/fields/job_types.py | 23 ++ .../api/fields/test_ai_field_views.py | 95 ++++++ .../assets/scss/components/all.scss | 1 + .../components/generate_ai_values_modal.scss | 75 +++++ .../field/GenerateAIValuesFormFooter.vue | 2 +- .../field/GenerateAIValuesJobListItem.vue | 96 ++++++ .../field/GenerateAIValuesModal.vue | 106 +++++- .../modules/baserow_premium/jobTypes.js | 38 +++ .../modules/baserow_premium/locales/en.json | 13 +- .../modules/baserow_premium/services/field.js | 16 + web-frontend/modules/core/jobTypes.js | 21 +- web-frontend/modules/core/locales/en.json | 2 +- web-frontend/modules/core/mixins/job.js | 32 +- web-frontend/modules/core/store/job.js | 8 +- 26 files changed, 952 insertions(+), 230 deletions(-) create mode 100644 changelog/entries/unreleased/feature/4264_show_jobs_for_ai_field_in_modal.json create mode 100644 premium/web-frontend/modules/baserow_premium/assets/scss/components/generate_ai_values_modal.scss create mode 100644 premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue diff --git a/backend/src/baserow/api/jobs/serializers.py b/backend/src/baserow/api/jobs/serializers.py index 78fda3e494..2e0159670f 100644 --- a/backend/src/baserow/api/jobs/serializers.py +++ b/backend/src/baserow/api/jobs/serializers.py @@ -1,5 +1,7 @@ from django.utils.functional import lazy +from drf_spectacular.extensions import OpenApiSerializerExtension +from drf_spectacular.plumbing import force_instance from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import extend_schema_field from rest_framework import serializers @@ -42,6 +44,8 @@ class Meta: "progress_percentage", "state", "human_readable_error", + "created_on", + "updated_on", ) extra_kwargs = { "id": {"read_only": True}, @@ -63,9 +67,31 @@ class Meta: fields = ("user_id", "type") +class JobTypeFiltersSerializer(serializers.Serializer): + """ + Base serializer for job type-specific filters. This serves as the base class + for all job type filter serializers and uses 'type' as a discriminator field. + """ + + type = serializers.ChoiceField( + choices=lazy(job_type_registry.get_types, list)(), + required=True, + help_text="The type of job to filter for. Determines which additional filter fields are available.", + ) + + class ListJobQuerySerializer(serializers.Serializer): states = serializers.CharField(required=False) job_ids = serializers.CharField(required=False) + type = serializers.ChoiceField( + choices=lazy(job_type_registry.get_types, list)(), + required=False, + help_text="The type of job to filter for. Determines which additional filter fields are available.", + ) + offset = serializers.IntegerField(required=False, min_value=0) + limit = serializers.IntegerField( + required=False, min_value=1, max_value=100, default=20 + ) def validate_states(self, value): if not value: @@ -95,3 +121,95 @@ def validate_job_ids(self, value): f"Job id {job_id} is not a valid integer." ) return validated_job_ids + + def validate(self, attrs): + job_type_name = attrs.get("type") + + # Collect type-specific filters in a separate dict + type_filters = {} + + if job_type_name: + job_type = job_type_registry.get(job_type_name) + filters_serializer_class = job_type.get_filters_serializer() + + if filters_serializer_class: + filters_data = {} + + # Add any type-specific fields from initial_data + filters_serializer = filters_serializer_class() + + for field_name in filters_serializer.fields.keys(): + if field_name in self.initial_data: + filters_data[field_name] = self.initial_data[field_name] + + # Validate using the type-specific serializer + filters_serializer = filters_serializer_class(data=filters_data) + if filters_serializer.is_valid(): + for field_name, value in filters_serializer.validated_data.items(): + # if the field starts with the job_type name to disambiguate + # the query parameter, remove it + field_key = field_name + if field_name.startswith(f"{job_type.type}_"): + field_key = field_name[len(job_type.type) + 1 :] + type_filters[field_key] = value + else: + raise serializers.ValidationError(filters_serializer.errors) + + # Add type_filters dict to attrs for easy access in the view + attrs["type_filters"] = type_filters + attrs["job_type_name"] = job_type_name + + return attrs + + +class ListJobQuerySerializerExtension(OpenApiSerializerExtension): + """ + Custom OpenAPI serializer extension that dynamically adds type-specific filter + fields to the ListJobQuerySerializer based on the job registry. This creates a flat + parameter list where type-specific fields appear when the corresponding type is + selected, since it's not possible to use a discriminator in query parameters. + """ + + target_class = "baserow.api.jobs.serializers.ListJobQuerySerializer" + + def map_serializer(self, auto_schema, direction): + """ + Generate the schema by adding all type-specific fields from job filters + serializers to the base ListJobQuerySerializer properties. + """ + + schema = auto_schema._map_serializer( + self.target, direction, bypass_extensions=True + ) + + properties = schema.get("properties", {}) + base_field_names = set(ListJobQuerySerializer().fields.keys()) + + # Collect all type-specific fields from job registry + for job_type in job_type_registry.get_all(): + filters_serializer_class = job_type.get_filters_serializer() + if ( + not filters_serializer_class + or filters_serializer_class == JobTypeFiltersSerializer + ): + continue + + serializer = force_instance(filters_serializer_class) + + for field_name, field in serializer.fields.items(): + # Skip base fields and the type field + if field_name in base_field_names or field_name == "type": + continue + + field_schema = auto_schema._map_serializer_field(field, direction) + + help_text = field_schema.get("description", "") + field_schema[ + "description" + ] = f"**[Only for type='{job_type.type}']** {help_text}" + + if field_name not in properties: + properties[field_name] = field_schema + + schema["properties"] = properties + return schema diff --git a/backend/src/baserow/api/jobs/views.py b/backend/src/baserow/api/jobs/views.py index cc84281797..eaa0993303 100644 --- a/backend/src/baserow/api/jobs/views.py +++ b/backend/src/baserow/api/jobs/views.py @@ -13,6 +13,7 @@ ) from baserow.api.schemas import get_error_schema from baserow.api.utils import DiscriminatorCustomFieldsMappingSerializer +from baserow.core.db import specific_iterator from baserow.core.jobs.exceptions import ( JobDoesNotExist, JobNotCancellable, @@ -33,30 +34,15 @@ class JobsView(APIView): permission_classes = (IsAuthenticated,) @extend_schema( - parameters=[ - OpenApiParameter( - name="states", - location=OpenApiParameter.QUERY, - type=OpenApiTypes.STR, - description="A comma separated list of jobs state to look for. " - "The only possible values are: `pending`, `finished`, `failed` and `cancelled`. " - "It's possible to exclude a state by prefixing it with a `!`. ", - ), - OpenApiParameter( - name="job_ids", - location=OpenApiParameter.QUERY, - type=OpenApiTypes.STR, - description="A comma separated list of job ids in the desired order." - "The jobs will be returned in the same order as the ids." - "If a job id is not found it will be ignored.", - ), - ], + parameters=[ListJobQuerySerializer], tags=["Jobs"], operation_id="list_job", description=( "List all existing jobs. Jobs are task executed asynchronously in the " - "background. You can use the `get_job` endpoint to read the current" - "progress of a the job." + "background. You can use the `get_job` endpoint to read the current " + "progress of the job. The available query parameters depend on the job type " + "selected via the `type` parameter. Each job type may support additional " + "type-specific filter parameters." ), responses={ 200: DiscriminatorCustomFieldsMappingSerializer( @@ -68,22 +54,33 @@ class JobsView(APIView): def get(self, request, query_params): states = query_params.get("states", None) job_ids = query_params.get("job_ids", None) + offset = query_params.get("offset", 0) + limit = query_params.get("limit", 20) + + # Get job type and filters from the validated data + job_type_name = query_params.get("job_type_name", None) + type_filters = query_params.get("type_filters", {}) + + base_model = None + if job_type_name: + job_type = job_type_registry.get(job_type_name) + base_model = job_type.model_class jobs = JobHandler.get_jobs_for_user( - request.user, filter_states=states, filter_ids=job_ids - ) + request.user, + filter_states=states, + filter_ids=job_ids, + base_model=base_model, + type_filters=type_filters if type_filters else None, + )[offset : offset + limit] - # FIXME: job.specific makes a query for each job to get the specific instance. - # As long as we have max_count=1 for each job type, there's not much we can do, - # but this should be optimized in the future if we allow multiple jobs of the - # same type. serialized_jobs = [ job_type_registry.get_serializer( - job.specific, + job, JobSerializer, context={"request": request}, ).data - for job in jobs + for job in specific_iterator(jobs) ] return Response({"jobs": serialized_jobs}) diff --git a/backend/src/baserow/contrib/database/table/handler.py b/backend/src/baserow/contrib/database/table/handler.py index f8b0c42158..0dbea657fb 100644 --- a/backend/src/baserow/contrib/database/table/handler.py +++ b/backend/src/baserow/contrib/database/table/handler.py @@ -279,8 +279,10 @@ def list_workspace_tables( table_qs = base_queryset if base_queryset else Table.objects.all() - table_qs = table_qs.filter(database__workspace=workspace).select_related( - "database__workspace", "data_sync" + table_qs = ( + table_qs.filter(database__workspace=workspace) + .select_related("database__workspace", "data_sync") + .order_by("database_id", "order", "id") ) if not include_trashed: diff --git a/backend/src/baserow/core/jobs/handler.py b/backend/src/baserow/core/jobs/handler.py index b28fce3eb4..97eaa47535 100644 --- a/backend/src/baserow/core/jobs/handler.py +++ b/backend/src/baserow/core/jobs/handler.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta, timezone -from typing import List, Optional, Type +from typing import Any, Dict, List, Optional, Type from django.conf import settings from django.contrib.auth.models import AbstractUser @@ -112,6 +112,8 @@ def get_jobs_for_user( user: AbstractUser, filter_states: Optional[List[str]], filter_ids: Optional[List[int]], + base_model: Optional[Type[AnyJob]] = None, + type_filters: Optional[Dict[str, Any]] = None, ) -> QuerySet: """ Returns all jobs belonging to the specified user. @@ -120,9 +122,15 @@ def get_jobs_for_user( :param filter_states: A list of states that the jobs should have, or not have if prefixed with a !. :param filter_ids: A list of specific job ids to return. + :param base_model: An optional Job model. + :param type_filters: Optional type-specific filters (e.g., field_id for + GenerateAIValuesJob). :return: A QuerySet with the filtered jobs for the user. """ + if base_model is None: + base_model = Job + def get_job_states_filter(states): states_q = Q() for state in states: @@ -132,7 +140,7 @@ def get_job_states_filter(states): states_q |= Q(state=state) return states_q - queryset = Job.objects.filter(user=user).order_by("-updated_on") + queryset = base_model.objects.filter(user=user).order_by("-id") if filter_states: queryset = queryset.filter(get_job_states_filter(filter_states)) @@ -140,6 +148,9 @@ def get_job_states_filter(states): if filter_ids: queryset = queryset.filter(id__in=filter_ids) + if type_filters: + queryset = queryset.filter(**type_filters) + return queryset.select_related("content_type") @classmethod diff --git a/backend/src/baserow/core/jobs/registries.py b/backend/src/baserow/core/jobs/registries.py index 97ffd28c15..63dbb67188 100644 --- a/backend/src/baserow/core/jobs/registries.py +++ b/backend/src/baserow/core/jobs/registries.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any, Dict, Type from django.contrib.auth.models import AbstractUser @@ -164,6 +164,20 @@ def response_serializer_class(self): meta_ref_name=f"{self.__class__.__name__}ResponseSerializer", ) + def get_filters_serializer(self) -> Type[serializers.Serializer] | None: + """ + This method enables job types to define custom filters for job listing + operations. Since query parameters cannot utilize Discriminator fields and must + be flattened, all filter field names should be prefixed with the job type name + followed by an underscore to prevent naming conflicts between different job + types. + + :return: A serializer class extending JobTypeFiltersSerializer, or None if no + type-specific filters are needed. + """ + + return None + class JobTypeRegistry( CustomFieldsRegistryMixin, diff --git a/backend/src/baserow/test_utils/fixtures/job.py b/backend/src/baserow/test_utils/fixtures/job.py index c9547ea2ed..fb7fa9811e 100644 --- a/backend/src/baserow/test_utils/fixtures/job.py +++ b/backend/src/baserow/test_utils/fixtures/job.py @@ -1,3 +1,5 @@ +from typing import Type + from rest_framework import serializers from rest_framework.status import HTTP_404_NOT_FOUND @@ -16,6 +18,16 @@ class TestException(Exception): ... +class TmpJobType1FiltersSerializer(serializers.Serializer): + """Just for testing: expose a filter on progress_percentage""" + + tmp_job_type_1_progress_percentage = serializers.IntegerField( + min_value=0, + required=False, + help_text="Filter by the progress percentage.", + ) + + class TmpJobType1(JobType): type = "tmp_job_type_1" @@ -51,6 +63,11 @@ def prepare_values(self, values, user): def run(self, job, progress): pass + def get_filters_serializer(self) -> Type[serializers.Serializer] | None: + """Returns the filters serializer for this job type.""" + + return TmpJobType1FiltersSerializer + class TmpJobType2(JobType): type = "tmp_job_type_2" diff --git a/backend/tests/baserow/api/import_export/test_export_applications_views.py b/backend/tests/baserow/api/import_export/test_export_applications_views.py index caa18dcf3a..e515150ba8 100644 --- a/backend/tests/baserow/api/import_export/test_export_applications_views.py +++ b/backend/tests/baserow/api/import_export/test_export_applications_views.py @@ -121,6 +121,7 @@ def test_exporting_empty_workspace( job_id = response_json["id"] assert response_json == { "created_on": run_time, + "updated_on": run_time, "exported_file_name": None, "human_readable_error": "", "id": job_id, @@ -200,6 +201,7 @@ def test_exporting_workspace_with_single_empty_database( job_id = response_json["id"] assert response_json == { "created_on": run_time, + "updated_on": run_time, "exported_file_name": None, "human_readable_error": "", "id": job_id, diff --git a/backend/tests/baserow/api/jobs/test_jobs_views.py b/backend/tests/baserow/api/jobs/test_jobs_views.py index 79d4eaa246..3bbdcedc96 100644 --- a/backend/tests/baserow/api/jobs/test_jobs_views.py +++ b/backend/tests/baserow/api/jobs/test_jobs_views.py @@ -4,6 +4,7 @@ from django.urls import reverse import pytest +from freezegun import freeze_time from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND from baserow.core.jobs.constants import JOB_CANCELLED @@ -18,115 +19,125 @@ def test_create_job(mock_run_async, data_fixture, api_client): data_fixture.register_temp_job_types() - user, token = data_fixture.create_user_and_token() - - response = api_client.post( - reverse("api:jobs:list"), - {}, - HTTP_AUTHORIZATION=f"JWT {token}", - ) - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION" - assert response.json() == { - "error": "ERROR_REQUEST_BODY_VALIDATION", - "detail": { - "type": [{"error": "This field is required.", "code": "required"}], - }, - } + user = data_fixture.create_user() - response = api_client.post( - reverse("api:jobs:list"), - { - "type": "tmp_job_type_1", - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION" - assert response.json() == { - "error": "ERROR_REQUEST_BODY_VALIDATION", - "detail": { - "test_request_field": [ - {"error": "This field is required.", "code": "required"} - ], - }, - } + run_time = "2023-01-01T12:00:00Z" + with freeze_time("2023-01-01 12:00:00"): + token = data_fixture.generate_token(user) + response = api_client.post( + reverse("api:jobs:list"), + {}, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response.status_code == HTTP_400_BAD_REQUEST + assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION" + assert response.json() == { + "error": "ERROR_REQUEST_BODY_VALIDATION", + "detail": { + "type": [{"error": "This field is required.", "code": "required"}], + }, + } - response = api_client.post( - reverse("api:jobs:list"), - { - "type": "tmp_job_type_1", - "test_request_field": "test", - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION" - assert response.json() == { - "error": "ERROR_REQUEST_BODY_VALIDATION", - "detail": { - "test_request_field": [ - {"error": "A valid integer is required.", "code": "invalid"} - ], - }, - } + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "tmp_job_type_1", + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response.status_code == HTTP_400_BAD_REQUEST + assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION" + assert response.json() == { + "error": "ERROR_REQUEST_BODY_VALIDATION", + "detail": { + "test_request_field": [ + {"error": "This field is required.", "code": "required"} + ], + }, + } - response = api_client.post( - reverse("api:jobs:list"), - { - "type": "tmp_job_type_3", - "test_request_field": 1, - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) - assert response.status_code == HTTP_404_NOT_FOUND - assert response.json()["error"] == "TEST_EXCEPTION" + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "tmp_job_type_1", + "test_request_field": "test", + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response.status_code == HTTP_400_BAD_REQUEST + assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION" + assert response.json() == { + "error": "ERROR_REQUEST_BODY_VALIDATION", + "detail": { + "test_request_field": [ + {"error": "A valid integer is required.", "code": "invalid"} + ], + }, + } - response = api_client.post( - reverse("api:jobs:list"), - { - "type": "tmp_job_type_1", - "test_request_field": 1, - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) - assert response.status_code == HTTP_200_OK - job = Job.objects.all().first() + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "tmp_job_type_3", + "test_request_field": 1, + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response.status_code == HTTP_404_NOT_FOUND + assert response.json()["error"] == "TEST_EXCEPTION" - assert response.json() == { - "id": job.id, - "type": "tmp_job_type_1", - "test_field": 42, - "state": "pending", - "progress_percentage": 0, - "human_readable_error": "", - } - mock_run_async.delay.assert_called() + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "tmp_job_type_1", + "test_request_field": 1, + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response.status_code == HTTP_200_OK + job = Job.objects.all().first() - response = api_client.post( - reverse("api:jobs:list"), - { + assert response.json() == { + "id": job.id, + "created_on": run_time, + "updated_on": run_time, "type": "tmp_job_type_1", - "test_request_field": 1, - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) - assert response.status_code == HTTP_400_BAD_REQUEST - assert response.json()["error"] == "ERROR_MAX_JOB_COUNT_EXCEEDED" + "test_field": 42, + "state": "pending", + "progress_percentage": 0, + "human_readable_error": "", + } + mock_run_async.delay.assert_called() + + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "tmp_job_type_1", + "test_request_field": 1, + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response.status_code == HTTP_400_BAD_REQUEST + assert response.json()["error"] == "ERROR_MAX_JOB_COUNT_EXCEEDED" @pytest.mark.django_db def test_list_jobs(data_fixture, api_client): user, token = data_fixture.create_user_and_token() - job_1 = data_fixture.create_fake_job(user=user) - job_2 = data_fixture.create_fake_job(user=user, state="failed") - job_3 = data_fixture.create_fake_job() + + with freeze_time("2023-01-01 12:00:00"): + job_1 = data_fixture.create_fake_job(user=user) + job_2 = data_fixture.create_fake_job(user=user, state="failed") + job_3 = data_fixture.create_fake_job() + url = reverse("api:jobs:list") response = api_client.get(url, HTTP_AUTHORIZATION=f"JWT {token}") job_1_json = { "id": job_1.id, + "created_on": "2023-01-01T12:00:00Z", + "updated_on": "2023-01-01T12:00:00Z", "type": "tmp_job_type_1", "progress_percentage": 0, "state": "pending", @@ -137,6 +148,8 @@ def test_list_jobs(data_fixture, api_client): job_2_json = { "id": job_2.id, "type": "tmp_job_type_1", + "created_on": "2023-01-01T12:00:00Z", + "updated_on": "2023-01-01T12:00:00Z", "progress_percentage": 0, "state": "failed", "human_readable_error": "", @@ -211,11 +224,51 @@ def test_list_jobs(data_fixture, api_client): } +@pytest.mark.django_db +@patch("baserow.core.jobs.handler.run_async_job") +def test_list_jobs_with_type_specific_filters(mock_run_async, data_fixture, api_client): + """Test that type-specific filters work correctly and are isolated per job type.""" + + data_fixture.register_temp_job_types() + user, token = data_fixture.create_user_and_token() + job = data_fixture.create_fake_job(user=user, type="tmp_job_type_1") + + response = api_client.get( + reverse("api:jobs:list"), + { + "non_existing_field": "will_be_ignored", + "type": "tmp_job_type_1", + "test_request_field": 1, + "tmp_job_type_1_progress_percentage": 100, + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response.status_code == HTTP_200_OK + assert len(response.json()["jobs"]) == 0 + + response = api_client.get( + reverse("api:jobs:list"), + { + "type": "tmp_job_type_1", + "test_request_field": 1, + "tmp_job_type_1_progress_percentage": 0, + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) + + assert response.status_code == HTTP_200_OK + assert len(response.json()["jobs"]) == 1 + assert response.json()["jobs"][0]["id"] == job.id + + @pytest.mark.django_db def test_get_job(data_fixture, api_client): user, token = data_fixture.create_user_and_token() - job_1 = data_fixture.create_fake_job(user=user) - job_2 = data_fixture.create_fake_job() + + run_time = "2023-01-01T12:00:00Z" + with freeze_time("2023-01-01 12:00:00"): + job_1 = data_fixture.create_fake_job(user=user) + job_2 = data_fixture.create_fake_job() response = api_client.get( reverse( @@ -239,6 +292,8 @@ def test_get_job(data_fixture, api_client): assert json == { "id": job_1.id, + "created_on": run_time, + "updated_on": run_time, "type": "tmp_job_type_1", "progress_percentage": 0, "state": "pending", @@ -246,10 +301,12 @@ def test_get_job(data_fixture, api_client): "test_field": 42, } - job_1.progress_percentage = 50 - job_1.state = "failed" - job_1.human_readable_error = "Wrong" - job_1.save() + updated_time = "2023-01-01T13:00:00Z" + with freeze_time("2023-01-01 13:00:00"): + job_1.progress_percentage = 50 + job_1.state = "failed" + job_1.human_readable_error = "Wrong" + job_1.save() response = api_client.get( reverse( @@ -263,6 +320,8 @@ def test_get_job(data_fixture, api_client): assert json == { "id": job_1.id, + "created_on": run_time, + "updated_on": updated_time, "type": "tmp_job_type_1", "progress_percentage": 50, "state": "failed", @@ -320,13 +379,16 @@ def run(self, job, progress): assert job assert job.id assert job.specific - assert resp == { - "id": job.id, - "type": IdlingJobType.type, - "state": "pending", - "progress_percentage": 0, - "human_readable_error": "", - } + assert is_dict_subset( + { + "id": job.id, + "type": IdlingJobType.type, + "state": "pending", + "progress_percentage": 0, + "human_readable_error": "", + }, + resp, + ) with test_thread(run_async_job.apply, args=(job.id,)) as t: assert job.pending, job.get_cached_state() @@ -402,13 +464,16 @@ def run(self, job, progress): assert job assert job.id assert job.specific - assert resp == { - "id": job.id, - "type": IdlingJobType.type, - "state": "pending", - "progress_percentage": 0, - "human_readable_error": "", - } + assert is_dict_subset( + { + "id": job.id, + "type": IdlingJobType.type, + "state": "pending", + "progress_percentage": 0, + "human_readable_error": "", + }, + resp, + ) with test_thread(run_async_job.apply, args=(job.id,)) as t: assert job.pending, job.get_cached_state() @@ -472,13 +537,16 @@ def run(self, job, progress): assert job assert job.id assert job.specific - assert resp == { - "id": job.id, - "type": IdlingJobType.type, - "state": "pending", - "progress_percentage": 0, - "human_readable_error": "", - } + assert is_dict_subset( + { + "id": job.id, + "type": IdlingJobType.type, + "state": "pending", + "progress_percentage": 0, + "human_readable_error": "", + }, + resp, + ) with test_thread(run_async_job.apply, args=(job.id,)) as t: assert job.pending, job.get_cached_state() diff --git a/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py b/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py index 0930093ed4..b33a08ce9f 100644 --- a/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py +++ b/backend/tests/baserow/contrib/automation/api/workflows/test_workflow_views.py @@ -337,7 +337,7 @@ def test_delete_workflow_does_not_exist(api_client, data_fixture): @pytest.mark.django_db def test_duplicate_workflow(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() + user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( user, automation=automation, name="test" @@ -345,7 +345,11 @@ def test_duplicate_workflow(api_client, data_fixture): trigger = workflow.get_trigger() url = reverse(API_URL_WORKFLOW_DUPLICATE, kwargs={"workflow_id": workflow.id}) - response = api_client.post(url, format="json", HTTP_AUTHORIZATION=f"JWT {token}") + with freeze_time("2025-06-04 11:00"): + token = data_fixture.generate_token(user) + response = api_client.post( + url, format="json", HTTP_AUTHORIZATION=f"JWT {token}" + ) assert response.status_code == HTTP_202_ACCEPTED @@ -367,6 +371,8 @@ def test_duplicate_workflow(api_client, data_fixture): "progress_percentage": 0, "state": "pending", "type": "duplicate_automation_workflow", + "created_on": "2025-06-04T11:00:00Z", + "updated_on": "2025-06-04T11:00:00Z", } @@ -490,19 +496,20 @@ def test_run_workflow_in_test_mode(api_client, data_fixture): @pytest.mark.django_db def test_publish_workflow(api_client, data_fixture): - user, token = data_fixture.create_user_and_token() + user = data_fixture.create_user() automation = data_fixture.create_automation_application(user) workflow = data_fixture.create_automation_workflow( user, automation=automation, name="test" ) url = reverse(API_URL_WORKFLOW_PUBLISH, kwargs={"workflow_id": workflow.id}) - response = api_client.post( - url, - format="json", - HTTP_AUTHORIZATION=f"JWT {token}", - ) - + with freeze_time("2025-06-04 11:00"): + token = data_fixture.generate_token(user) + response = api_client.post( + url, + format="json", + HTTP_AUTHORIZATION=f"JWT {token}", + ) assert response.status_code == HTTP_202_ACCEPTED assert response.json() == { "human_readable_error": "", @@ -510,6 +517,8 @@ def test_publish_workflow(api_client, data_fixture): "progress_percentage": AnyInt(), "state": "pending", "type": "publish_automation_workflow", + "created_on": "2025-06-04T11:00:00Z", + "updated_on": "2025-06-04T11:00:00Z", } diff --git a/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py b/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py index b8b23fbe98..13c472a790 100644 --- a/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py +++ b/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py @@ -3,6 +3,7 @@ from django.urls import reverse import pytest +from freezegun import freeze_time from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND from baserow.contrib.database.airtable.models import AirtableImportJob @@ -85,21 +86,25 @@ def test_create_airtable_import_job( }, } - response = api_client.post( - reverse("api:jobs:list"), - { - "type": "airtable", - "workspace_id": workspace.id, - "airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx", - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) + with freeze_time("2025-01-01 12:00:00"): + token = data_fixture.generate_token(user) + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "airtable", + "workspace_id": workspace.id, + "airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx", + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) assert response.status_code == HTTP_200_OK airtable_import_job = AirtableImportJob.objects.all().first() assert airtable_import_job.workspace_id == workspace.id assert airtable_import_job.airtable_share_id == "shrxxxxxxxxxxxxxx" assert response.json() == { "id": airtable_import_job.id, + "created_on": "2025-01-01T12:00:00Z", + "updated_on": "2025-01-01T12:00:00Z", "type": "airtable", "workspace_id": workspace.id, "airtable_share_id": "shrxxxxxxxxxxxxxx", @@ -112,21 +117,25 @@ def test_create_airtable_import_job( mock_run_import_from_airtable.delay.assert_called() airtable_import_job.delete() - response = api_client.post( - reverse("api:jobs:list"), - { - "type": "airtable", - "workspace_id": workspace.id, - "airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx", - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) + with freeze_time("2025-01-01 12:00:00"): + token = data_fixture.generate_token(user) + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "airtable", + "workspace_id": workspace.id, + "airtable_share_url": "https://airtable.com/shrxxxxxxxxxxxxxx", + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) assert response.status_code == HTTP_200_OK airtable_import_job = AirtableImportJob.objects.all().first() assert airtable_import_job.workspace_id == workspace.id assert airtable_import_job.airtable_share_id == "shrxxxxxxxxxxxxxx" assert response.json() == { "id": airtable_import_job.id, + "created_on": "2025-01-01T12:00:00Z", + "updated_on": "2025-01-01T12:00:00Z", "type": "airtable", "workspace_id": workspace.id, "airtable_share_id": "shrxxxxxxxxxxxxxx", @@ -137,6 +146,7 @@ def test_create_airtable_import_job( "database": None, } + token = data_fixture.generate_token(user) response = api_client.post( reverse("api:jobs:list"), { @@ -162,15 +172,17 @@ def test_create_airtable_import_job_long_share_id( "viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" ) - response = api_client.post( - reverse("api:jobs:list"), - { - "type": "airtable", - "workspace_id": workspace.id, - "airtable_share_url": f"https://airtable.com/{long_share_id}", - }, - HTTP_AUTHORIZATION=f"JWT {token}", - ) + with freeze_time("2025-01-01 12:00:00"): + token = data_fixture.generate_token(user) + response = api_client.post( + reverse("api:jobs:list"), + { + "type": "airtable", + "workspace_id": workspace.id, + "airtable_share_url": f"https://airtable.com/{long_share_id}", + }, + HTTP_AUTHORIZATION=f"JWT {token}", + ) assert response.status_code == HTTP_200_OK airtable_import_job = AirtableImportJob.objects.all().first() @@ -178,6 +190,8 @@ def test_create_airtable_import_job_long_share_id( assert airtable_import_job.airtable_share_id == long_share_id assert response.json() == { "id": airtable_import_job.id, + "created_on": "2025-01-01T12:00:00Z", + "updated_on": "2025-01-01T12:00:00Z", "type": "airtable", "workspace_id": workspace.id, "airtable_share_id": long_share_id, @@ -274,8 +288,10 @@ def test_create_airtable_import_job_with_session( @pytest.mark.django_db def test_get_airtable_import_job(data_fixture, api_client): user, token = data_fixture.create_user_and_token() - airtable_job_1 = data_fixture.create_airtable_import_job(user=user) - airtable_job_2 = data_fixture.create_airtable_import_job() + + with freeze_time("2025-01-01 12:00:00"): + airtable_job_1 = data_fixture.create_airtable_import_job(user=user) + airtable_job_2 = data_fixture.create_airtable_import_job() response = api_client.get( reverse( @@ -299,6 +315,8 @@ def test_get_airtable_import_job(data_fixture, api_client): assert json == { "id": airtable_job_1.id, "type": "airtable", + "created_on": "2025-01-01T12:00:00Z", + "updated_on": "2025-01-01T12:00:00Z", "workspace_id": airtable_job_1.workspace_id, "airtable_share_id": "test", "skip_files": False, @@ -312,7 +330,8 @@ def test_get_airtable_import_job(data_fixture, api_client): airtable_job_1.state = "failed" airtable_job_1.human_readable_error = "Wrong" airtable_job_1.database = data_fixture.create_database_application() - airtable_job_1.save() + with freeze_time("2025-01-01 12:00:00"): + airtable_job_1.save() response = api_client.get( reverse( @@ -325,6 +344,8 @@ def test_get_airtable_import_job(data_fixture, api_client): json = response.json() assert json == { "id": airtable_job_1.id, + "created_on": "2025-01-01T12:00:00Z", + "updated_on": "2025-01-01T12:00:00Z", "type": "airtable", "workspace_id": airtable_job_1.workspace_id, "airtable_share_id": "test", diff --git a/backend/tests/baserow/ws/test_ws_signals.py b/backend/tests/baserow/ws/test_ws_signals.py index 2aafba32ed..cb2091de7d 100644 --- a/backend/tests/baserow/ws/test_ws_signals.py +++ b/backend/tests/baserow/ws/test_ws_signals.py @@ -4,6 +4,7 @@ from django.db import transaction import pytest +from freezegun import freeze_time from pytest_unordered import unordered from baserow.core.handler import CoreHandler @@ -484,7 +485,8 @@ def test_job_started(mock_broadcast_to_users, data_fixture): data_fixture.register_temp_job_types() user = data_fixture.create_user() - JobHandler().create_and_start_job(user, "tmp_job_type_1") + with freeze_time("2024-01-01 12:00:00"): + JobHandler().create_and_start_job(user, "tmp_job_type_1") mock_broadcast_to_users.delay.assert_called_once() args = mock_broadcast_to_users.delay.call_args @@ -498,5 +500,7 @@ def test_job_started(mock_broadcast_to_users, data_fixture): "state": "started", "human_readable_error": "", "test_field": 42, + "created_on": "2024-01-01T12:00:00Z", + "updated_on": "2024-01-01T12:00:00Z", }, } diff --git a/changelog/entries/unreleased/feature/4264_show_jobs_for_ai_field_in_modal.json b/changelog/entries/unreleased/feature/4264_show_jobs_for_ai_field_in_modal.json new file mode 100644 index 0000000000..8683f015e7 --- /dev/null +++ b/changelog/entries/unreleased/feature/4264_show_jobs_for_ai_field_in_modal.json @@ -0,0 +1,9 @@ +{ + "type": "feature", + "message": "Show jobs for AI Field in modal", + "issue_origin": "github", + "issue_number": 4264, + "domain": "database", + "bullet_points": [], + "created_at": "2025-11-14" +} \ No newline at end of file diff --git a/premium/backend/src/baserow_premium/fields/job_types.py b/premium/backend/src/baserow_premium/fields/job_types.py index b9327bc360..77c16ca715 100644 --- a/premium/backend/src/baserow_premium/fields/job_types.py +++ b/premium/backend/src/baserow_premium/fields/job_types.py @@ -1,3 +1,5 @@ +from typing import Type + from django.db.models import QuerySet from baserow_premium.generative_ai.managers import AIFileManager @@ -36,6 +38,18 @@ from .registries import ai_field_output_registry +class GenerateAIValuesJobFiltersSerializer(serializers.Serializer): + """ + Adds the ability to filter GenerateAIValuesJob by AI field ID. + """ + + generate_ai_values_field_id = serializers.IntegerField( + min_value=1, + required=False, + help_text="Filter by the AI field ID.", + ) + + class GenerateAIValuesJobType(JobType): type = "generate_ai_values" model_class = GenerateAIValuesJob @@ -209,6 +223,15 @@ def prepare_values(self, values, user): return values + def get_filters_serializer(self) -> Type[serializers.Serializer] | None: + """ + Adds the ability to filter GenerateAIValuesJob by AI field ID. + + :return: A serializer class extending JobTypeFiltersSerializer. + """ + + return GenerateAIValuesJobFiltersSerializer + def run(self, job: GenerateAIValuesJob, progress): user = job.user ai_field = self._get_field(job.field_id) diff --git a/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py b/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py index 59770df2cd..2ef11ac725 100644 --- a/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/fields/test_ai_field_views.py @@ -4,6 +4,7 @@ import pytest from rest_framework.status import ( + HTTP_200_OK, HTTP_202_ACCEPTED, HTTP_400_BAD_REQUEST, HTTP_402_PAYMENT_REQUIRED, @@ -384,3 +385,97 @@ def test_batch_generate_ai_field_value_limit(api_client, premium_data_fixture): }, ], } + + +@pytest.mark.django_db +@pytest.mark.field_ai +@override_settings(DEBUG=True) +def test_list_jobs_filter_by_type_and_field_id(premium_data_fixture, api_client): + """Test that generate_ai_values jobs can be filtered by type and field_id.""" + + premium_data_fixture.register_fake_generate_ai_type() + user, token = premium_data_fixture.create_user_and_token( + has_active_premium_license=True + ) + + database = premium_data_fixture.create_database_application( + user=user, name="database" + ) + table = premium_data_fixture.create_database_table(name="table", database=database) + + # Create multiple AI fields + field_1 = premium_data_fixture.create_ai_field( + table=table, name="ai_1", ai_prompt="'Hello'" + ) + field_2 = premium_data_fixture.create_ai_field( + table=table, name="ai_2", ai_prompt="'World'" + ) + + rows = RowHandler().create_rows(user, table, rows_values=[{}, {}]).created_rows + + # Create jobs for field_1 + response_1 = api_client.post( + reverse( + "api:premium:fields:async_generate_ai_field_values", + kwargs={"field_id": field_1.id}, + ), + {"row_ids": [rows[0].id]}, + format="json", + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response_1.status_code == HTTP_202_ACCEPTED + job_1_id = response_1.json()["id"] + + # Create jobs for field_2 + response_2 = api_client.post( + reverse( + "api:premium:fields:async_generate_ai_field_values", + kwargs={"field_id": field_2.id}, + ), + {"row_ids": [rows[1].id]}, + format="json", + HTTP_AUTHORIZATION=f"JWT {token}", + ) + assert response_2.status_code == HTTP_202_ACCEPTED + job_2_id = response_2.json()["id"] + + # Test filtering by type only + jobs_url = reverse("api:jobs:list") + response = api_client.get( + f"{jobs_url}?type=generate_ai_values", + HTTP_AUTHORIZATION=f"JWT {token}", + ) + + assert response.status_code == HTTP_200_OK + response_data = response.json() + job_ids = [job["id"] for job in response_data["jobs"]] + assert job_1_id in job_ids + assert job_2_id in job_ids + # All returned jobs should be of type generate_ai_values + assert all(job["type"] == "generate_ai_values" for job in response_data["jobs"]) + + # Test filtering by type and field_id for field_1 + response = api_client.get( + f"{jobs_url}?type=generate_ai_values&generate_ai_values_field_id={field_1.id}", + HTTP_AUTHORIZATION=f"JWT {token}", + ) + + assert response.status_code == HTTP_200_OK + response_data = response.json() + assert len(response_data["jobs"]) == 1 + assert response_data["jobs"][0]["id"] == job_1_id + assert response_data["jobs"][0]["type"] == "generate_ai_values" + assert response_data["jobs"][0]["field_id"] == field_1.id + + # Test filtering by type and field_id for field_2 + response = api_client.get( + f"{jobs_url}?type=generate_ai_values&generate_ai_values_field_id={field_2.id}", + HTTP_AUTHORIZATION=f"JWT {token}", + ) + + assert response.status_code == HTTP_200_OK + response_data = response.json() + assert len(response_data["jobs"]) == 1 + assert response_data["jobs"][0]["id"] == job_2_id + assert response_data["jobs"][0]["type"] == "generate_ai_values" + assert response_data["jobs"][0]["field_id"] == field_2.id diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/all.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/all.scss index d43ea27463..4ba9c2db78 100644 --- a/premium/web-frontend/modules/baserow_premium/assets/scss/components/all.scss +++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/all.scss @@ -14,3 +14,4 @@ @import 'dashboard_chart_widget'; @import 'aggregation_series_form'; @import 'redirect_modal'; +@import 'generate_ai_values_modal'; diff --git a/premium/web-frontend/modules/baserow_premium/assets/scss/components/generate_ai_values_modal.scss b/premium/web-frontend/modules/baserow_premium/assets/scss/components/generate_ai_values_modal.scss new file mode 100644 index 0000000000..f2570e3ca3 --- /dev/null +++ b/premium/web-frontend/modules/baserow_premium/assets/scss/components/generate_ai_values_modal.scss @@ -0,0 +1,75 @@ +.generate-ai-values__generate-button { + margin-top: 4px; +} + +.generate-ai-values__list { + border-top: 1px solid $color-neutral-200; + padding: 6px; + margin-top: 20px; +} + +.generate-ai-values-job { + padding: 10px 0; + border-bottom: 1px solid $color-neutral-200; + + &:last-child { + border-bottom: none; + } + + &__header { + display: flex; + justify-content: space-between; + align-items: baseline; + gap: 8px; + margin-bottom: 7px; + } + + &__name { + font-size: 13px; + font-weight: 600; + color: $color-neutral-900; + line-height: 1.35; + word-break: break-word; + flex: 1; + min-width: 0; + } + + &__started { + font-size: 12px; + font-weight: 400; + color: $color-neutral-600; + margin-left: 6px; + } + + &__detail { + font-size: 12px; + color: $color-neutral-600; + line-height: 1.35; + } + + &__progress-row { + display: flex; + align-items: center; + gap: 50px; + } + + &__progress-bar { + flex: 1; + min-width: 0; + } + + &__progress-value { + flex-shrink: 0; + font-size: 13px; + font-weight: 600; + color: $color-neutral-700; + min-width: 35px; + text-align: right; + margin-left: auto; + } + + &__cancel { + color: $color-error-500; + flex-shrink: 0; + } +} diff --git a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesFormFooter.vue b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesFormFooter.vue index ab6ef278e5..879345d2e9 100644 --- a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesFormFooter.vue +++ b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesFormFooter.vue @@ -21,7 +21,7 @@ :loading="loading" :disabled="disabled || loading" full-width - class="modal-progress__primary-button" + class="modal-progress__primary-button generate-ai-values__generate-button" > {{ $t('generateAIValuesFormFooter.generate') }} diff --git a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue new file mode 100644 index 0000000000..39ab9cf829 --- /dev/null +++ b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue @@ -0,0 +1,96 @@ + + + diff --git a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue index 1b4e0dbeb1..b216db8171 100644 --- a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue +++ b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue @@ -1,5 +1,5 @@ @@ -37,12 +59,17 @@ import FieldService from '@baserow_premium/services/field' import { populateView } from '@baserow/modules/database/store/view' import GenerateAIValuesForm from '@baserow_premium/components/field/GenerateAIValuesForm' import GenerateAIValuesFormFooter from '@baserow_premium/components/field/GenerateAIValuesFormFooter' +import GenerateAIValuesJobListItem from '@baserow_premium/components/field/GenerateAIValuesJobListItem' import job from '@baserow/modules/core/mixins/job' import { GenerateAIValuesJobType } from '@baserow_premium/jobTypes' export default { name: 'GenerateAIValuesModal', - components: { GenerateAIValuesForm, GenerateAIValuesFormFooter }, + components: { + GenerateAIValuesForm, + GenerateAIValuesFormFooter, + GenerateAIValuesJobListItem, + }, mixins: [modal, error, job], props: { database: { @@ -65,13 +92,23 @@ export default { }, data() { return { - views: [], - loadingViews: false, loading: false, - cancelLoading: false, isValid: false, + views: [], + loadingViews: false, + previousJobs: [], + loadingPreviousJobs: false, } }, + computed: { + unfinishedJobsFromStore() { + return this.$store.getters['job/getUnfinishedJobs'].filter( + (job) => + job.type === GenerateAIValuesJobType.getType() && + job.field_id === this.field.id + ) + }, + }, methods: { loadRunningJob() { const runningJob = this.$store.getters['job/getUnfinishedJobs'].find( @@ -88,18 +125,53 @@ export default { this.loading = true } }, - async show(...args) { + show(...args) { const show = modal.methods.show.call(this, ...args) - this.loading = false - await this.fetchViews() + // Don't await to avoid blocking the modal display this.loadRunningJob() + this.fetchViews() + this.loadPreviousJobs() this.$nextTick(() => { this.valuesChanged() }) return show }, + async loadPreviousJobs() { + this.loadingPreviousJobs = true + + try { + const { data } = await FieldService( + this.$client + ).listGenerateAIValuesJobs(this.field.id) + const jobs = data.jobs + const storeJobs = this.unfinishedJobsFromStore + let addedRunningJobs = false + + jobs.forEach((job, index) => { + const storeJob = storeJobs.find((sj) => sj.id === job.id) + if (storeJob) { + jobs[index] = storeJob + } else if (job.state === 'pending' || job.state === 'started') { + this.$store.dispatch('job/forceCreate', job) + addedRunningJobs = true + } + }) + + if (addedRunningJobs) { + this.$store.dispatch('job/tryScheduleNextUpdate') + } + + // Filter out the current job being shown in the form to avoid duplication + this.previousJobs = jobs.filter((job) => job.id !== this.job?.id) + } catch (error) { + this.handleError(error) + } finally { + this.loadingPreviousJobs = false + } + }, async fetchViews() { this.loadingViews = true + try { const { data: viewsData } = await ViewService(this.$client).fetchAll( this.table.id @@ -108,8 +180,9 @@ export default { this.views = viewsData } catch (error) { this.handleError(error, 'views') + } finally { + this.loadingViews = false } - this.loadingViews = false }, async submitted(values) { if (!this.$refs.form.isFormValid()) { @@ -133,16 +206,23 @@ export default { this.handleError(error) } }, - onJobFinished() { + // eslint-disable-next-line require-await + async onJobFinished() { + this.previousJobs.unshift(this.job) this.job = null this.loading = false }, - onJobFailed() { + // eslint-disable-next-line require-await + async onJobFailed() { + this.previousJobs.unshift(this.job) + this.job = null this.loading = false }, - onJobCancelled() { + // eslint-disable-next-line require-await + async onJobCancelled() { + this.previousJobs.unshift(this.job) + this.job = null this.loading = false - this.cancelLoading = false }, valuesChanged() { this.isValid = this.$refs.form.isFormValid() diff --git a/premium/web-frontend/modules/baserow_premium/jobTypes.js b/premium/web-frontend/modules/baserow_premium/jobTypes.js index 1837e74ad1..040cb27019 100644 --- a/premium/web-frontend/modules/baserow_premium/jobTypes.js +++ b/premium/web-frontend/modules/baserow_premium/jobTypes.js @@ -13,4 +13,42 @@ export class GenerateAIValuesJobType extends JobType { const { i18n } = this.app return i18n.t('jobType.generateAIValues') } + + /** + * Clear pending field operations when job completes (success or failure). + * This ensures spinners are removed from cells even if job was cancelled. + */ + async _clearPendingOperations(job) { + const { store } = this.app + // If the job has row_ids, clear pending state for those specific rows + if (job.row_ids && job.row_ids.length > 0 && job.field_id) { + // We need to find all store prefixes that might have this pending state + // Grid views typically use 'page/view/grid' as the store prefix + const storePrefix = 'page/' + try { + await store.dispatch( + `${storePrefix}view/grid/setPendingFieldOperations`, + { + fieldId: job.field_id, + rowIds: job.row_ids, + value: false, + } + ) + } catch (error) { + // Silently fail if the store action doesn't exist (e.g., not in grid view) + } + } + } + + async onJobDone(job) { + await this._clearPendingOperations(job) + } + + async onJobFailed(job) { + await this._clearPendingOperations(job) + } + + async onJobCancelled(job) { + await this._clearPendingOperations(job) + } } diff --git a/premium/web-frontend/modules/baserow_premium/locales/en.json b/premium/web-frontend/modules/baserow_premium/locales/en.json index c82b0e15d9..1f6bdb2144 100644 --- a/premium/web-frontend/modules/baserow_premium/locales/en.json +++ b/premium/web-frontend/modules/baserow_premium/locales/en.json @@ -374,7 +374,18 @@ "other": "Other" }, "generateAIValuesModal": { - "title": "Generate all AI values" + "title": "Generate all AI values", + "noPreviousJobs": "No previous jobs", + "started": "Started", + "finished": "Completed", + "running": "Running", + "failed": "Failed", + "cancelled": "Cancelled", + "pending": "Pending", + "view": "View: {name}", + "table": "Table: All rows", + "deletedView": "View: ID {viewId} (deleted)", + "rows": "Rows: {count} rows" }, "generateAIValuesForm": { "scopeLabel": "Scope", diff --git a/premium/web-frontend/modules/baserow_premium/services/field.js b/premium/web-frontend/modules/baserow_premium/services/field.js index 0a964a03e6..6299e43d42 100644 --- a/premium/web-frontend/modules/baserow_premium/services/field.js +++ b/premium/web-frontend/modules/baserow_premium/services/field.js @@ -1,3 +1,5 @@ +import { GenerateAIValuesJobType } from '@baserow_premium/jobTypes' + export default (client) => { return { generateAIFieldValues(fieldId, rowIds) { @@ -36,5 +38,19 @@ export default (client) => { return client.post(`/jobs/`, payload) }, + listGenerateAIValuesJobs(fieldId, { offset, limit = 10 } = {}) { + const params = new URLSearchParams() + params.append('type', GenerateAIValuesJobType.getType()) + params.append(GenerateAIValuesJobType.getType() + '_field_id', fieldId) + if (offset !== undefined) { + params.append('offset', offset) + } + if (limit !== undefined) { + params.append('limit', limit) + } + + const config = { params } + return client.get(`/jobs/`, config) + }, } } diff --git a/web-frontend/modules/core/jobTypes.js b/web-frontend/modules/core/jobTypes.js index 1a35f48feb..0c2690620b 100644 --- a/web-frontend/modules/core/jobTypes.js +++ b/web-frontend/modules/core/jobTypes.js @@ -101,15 +101,24 @@ export class JobType extends Registerable { async beforeUpdate(job, data) {} async afterUpdate(job, data) { - if (job.state === 'finished') { - await this.onJobDone(job, data) - } else if (job.state === 'failed') { - await this.onJobFailed(job, data) + switch (job.state) { + case 'cancelled': + await this.onJobCancelled(job, data) + break + case 'failed': + await this.onJobFailed(job, data) + break + case 'finished': + await this.onJobDone(job, data) + break + default: + break } } - async onJobDone(job) {} - async onJobFailed(job) {} + async onJobDone(job, data) {} + async onJobFailed(job, data) {} + async onJobCancelled(job, data) {} } export class DuplicateApplicationJobType extends JobType { diff --git a/web-frontend/modules/core/locales/en.json b/web-frontend/modules/core/locales/en.json index 213e85c11f..7921f29a50 100644 --- a/web-frontend/modules/core/locales/en.json +++ b/web-frontend/modules/core/locales/en.json @@ -689,7 +689,7 @@ "daysAgo": "0 days ago | 1 day ago | {n} days ago", "monthsAgo": "0 months ago | 1 month ago | {n} months ago", "yearsAgo": "0 years ago | 1 year ago | {n} years ago", - "lessThanMinuteAgo": "less than minute ago", + "lessThanMinuteAgo": "less than a minute ago", "justNow": "just now" }, "crudTableSearch": { diff --git a/web-frontend/modules/core/mixins/job.js b/web-frontend/modules/core/mixins/job.js index 8fe13ce25a..64c7ff4cda 100644 --- a/web-frontend/modules/core/mixins/job.js +++ b/web-frontend/modules/core/mixins/job.js @@ -113,21 +113,23 @@ export default { }) } catch (error) { const errMsg = error.response?.data - if (errMsg?.error === 'ERROR_JOB_NOT_CANCELLABLE') { - this.showError( - this.$t('job.errorJobCannotBeCancelledTitle'), - this.$t('job.errorJobCannotBeCancelledDescription') - ) - } else { - this.showError({ - title: this.$t('clientHandler.notCompletedTitle'), - message: - (error ? error?.detail || error?.message : null) || - this.$t('unknown error'), - }) - } - if (typeof this.onJobCancelFailed === 'function') { - this.onJobCancelFailed() + if (typeof this.showError === 'function') { + if (errMsg?.error === 'ERROR_JOB_NOT_CANCELLABLE') { + this.showError( + this.$t('job.errorJobCannotBeCancelledTitle'), + this.$t('job.errorJobCannotBeCancelledDescription') + ) + } else { + this.showError({ + title: this.$t('clientHandler.notCompletedTitle'), + message: + (error ? error?.detail || error?.message : null) || + this.$t('unknown error'), + }) + } + if (this.onJobCancelFailed === 'function') { + this.onJobCancelFailed() + } } } finally { this.cancelLoading = false diff --git a/web-frontend/modules/core/store/job.js b/web-frontend/modules/core/store/job.js index 410607264d..b8991811cb 100644 --- a/web-frontend/modules/core/store/job.js +++ b/web-frontend/modules/core/store/job.js @@ -203,10 +203,14 @@ export const actions = { /** * Cancels a scheduled or running job. */ - async cancel({ dispatch, commit }, job) { + async cancel({ dispatch }, { id }) { + const job = this.getters['job/get'](id) + if (!job) { + throw new Error('Job not found in store.') + } try { const { data } = await JobService(this.$client).cancel(job.id) - commit('UPDATE_ITEM', { id: data.id, values: data }) + await dispatch('forceUpdate', { job, data }) } finally { await dispatch('tryScheduleNextUpdate') } From 39e77221d585baaff10df0506529ac9fffa68f36 Mon Sep 17 00:00:00 2001 From: Bram Date: Fri, 28 Nov 2025 14:42:28 +0100 Subject: [PATCH 2/2] Keep websocket connection open longer by sending ping (#4327) --- .../bug/change_websocket_connection_error.json | 9 +++++++++ web-frontend/modules/core/locales/en.json | 4 ++-- 2 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 changelog/entries/unreleased/bug/change_websocket_connection_error.json diff --git a/changelog/entries/unreleased/bug/change_websocket_connection_error.json b/changelog/entries/unreleased/bug/change_websocket_connection_error.json new file mode 100644 index 0000000000..aa0eaccdf5 --- /dev/null +++ b/changelog/entries/unreleased/bug/change_websocket_connection_error.json @@ -0,0 +1,9 @@ +{ + "type": "bug", + "message": "Change WebSocket connection closed error message.", + "issue_origin": "github", + "issue_number": null, + "domain": "core", + "bullet_points": [], + "created_at": "2025-11-24" +} diff --git a/web-frontend/modules/core/locales/en.json b/web-frontend/modules/core/locales/en.json index 7921f29a50..11ed5371b4 100644 --- a/web-frontend/modules/core/locales/en.json +++ b/web-frontend/modules/core/locales/en.json @@ -427,11 +427,11 @@ }, "connectingToast": { "title": "Reconnecting", - "content": "Reconnecting with server." + "content": "Trying to reestablish real-time updates." }, "failedConnectingToast": { "title": "Failed", - "content": "Connection to the server has failed. Please refresh the page.", + "content": "Real-time updates could not be reestablished. Please refresh to continue.", "action": "Refresh page" }, "authorizationErrorToast": {