From 8d094b51dc962d989440d5c41a992045f28ded56 Mon Sep 17 00:00:00 2001 From: Melvin Stans Date: Fri, 14 Nov 2025 15:34:06 +0100 Subject: [PATCH 1/6] Feature/helm chart embeddings service (#4257) --- deploy/helm/baserow/Chart.lock | 21 +- deploy/helm/baserow/Chart.yaml | 22 +- deploy/helm/baserow/README.md | 53 +++ .../baserow/charts/baserow-common/Chart.yaml | 4 +- .../baserow-common/templates/_helpers.tpl | 4 +- .../baserow/templates/backend-configmap.yaml | 6 + deploy/helm/baserow/values.yaml | 59 +++ docs/installation/install-on-aws.md | 8 +- docs/installation/install-with-helm.md | 429 ++++++++++++++++-- docs/installation/install-with-k8s.md | 4 +- 10 files changed, 537 insertions(+), 73 deletions(-) diff --git a/deploy/helm/baserow/Chart.lock b/deploy/helm/baserow/Chart.lock index 6f2f4fe528..5b311c2450 100644 --- a/deploy/helm/baserow/Chart.lock +++ b/deploy/helm/baserow/Chart.lock @@ -1,25 +1,28 @@ dependencies: - name: baserow repository: file://charts/baserow-common - version: 1.0.35 + version: 1.0.36 - name: baserow repository: file://charts/baserow-common - version: 1.0.35 + version: 1.0.36 - name: baserow repository: file://charts/baserow-common - version: 1.0.35 + version: 1.0.36 - name: baserow repository: file://charts/baserow-common - version: 1.0.35 + version: 1.0.36 - name: baserow repository: file://charts/baserow-common - version: 1.0.35 + version: 1.0.36 - name: baserow repository: file://charts/baserow-common - version: 1.0.35 + version: 1.0.36 - name: baserow repository: file://charts/baserow-common - version: 1.0.35 + version: 1.0.36 +- name: baserow + repository: file://charts/baserow-common + version: 1.0.36 - name: redis repository: https://charts.bitnami.com/bitnami version: 19.5.5 @@ -32,5 +35,5 @@ dependencies: - name: caddy-ingress-controller repository: https://caddyserver.github.io/ingress version: 1.1.0 -digest: sha256:d0bd922613ee89c3472f6c3bae1c8195b470af2590d2d83307cbd15fb36a5dca -generated: "2025-10-08T14:18:40.864014+02:00" +digest: sha256:482eebfe24d9ce5182fe40b83b0251a14316f1f64e0199e6ae87ce724a7d29ed +generated: "2025-11-04T11:22:44.737069+01:00" diff --git a/deploy/helm/baserow/Chart.yaml b/deploy/helm/baserow/Chart.yaml index cd14e4a900..483bad2ab7 100644 --- a/deploy/helm/baserow/Chart.yaml +++ b/deploy/helm/baserow/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: baserow description: The open platform to create scalable databases and applications—without coding. type: application -version: 1.0.35 +version: 1.0.36 appVersion: "1.35.3" home: https://github.com/baserow/baserow/blob/develop/deploy/helm/baserow?ref_type=heads icon: https://baserow.io/img/favicon_192.png @@ -13,40 +13,46 @@ sources: dependencies: - name: baserow alias: baserow-backend-asgi - version: "1.0.35" + version: "1.0.36" repository: "file://charts/baserow-common" - name: baserow alias: baserow-backend-wsgi - version: "1.0.35" + version: "1.0.36" repository: "file://charts/baserow-common" - name: baserow alias: baserow-frontend - version: "1.0.35" + version: "1.0.36" repository: "file://charts/baserow-common" - name: baserow alias: baserow-celery-beat-worker - version: "1.0.35" + version: "1.0.36" repository: "file://charts/baserow-common" - name: baserow alias: baserow-celery-export-worker - version: "1.0.35" + version: "1.0.36" repository: "file://charts/baserow-common" - name: baserow alias: baserow-celery-worker - version: "1.0.35" + version: "1.0.36" repository: "file://charts/baserow-common" - name: baserow alias: baserow-celery-flower - version: "1.0.35" + version: "1.0.36" repository: "file://charts/baserow-common" condition: baserow-celery-flower.enabled + - name: baserow + alias: baserow-embeddings + version: "1.0.36" + repository: "file://charts/baserow-common" + condition: baserow-embeddings.enabled + - name: redis version: 19.5.x repository: https://charts.bitnami.com/bitnami diff --git a/deploy/helm/baserow/README.md b/deploy/helm/baserow/README.md index 48d3e792da..cb8620d485 100644 --- a/deploy/helm/baserow/README.md +++ b/deploy/helm/baserow/README.md @@ -92,6 +92,36 @@ baserow-backend-wsgi: onDemandAsk: "http://my-baserow-baserow-backend-wsgi/api/builder/domains/ask-public-domain-exists/" ``` +## AI and Embeddings Configuration + +Baserow supports multiple AI providers for generative AI features and the AI assistant. The embeddings service powers semantic search for the AI assistant's documentation lookup feature. For more documentation check the [Baserow AI documentation](/docs/installation/ai-assistant.md). + +### Enable AI Assistant + +To enable the AI assistant, you need to configure the LLM model and provide the necessary API keys for the chosen provider. + +```yaml +global: + baserow: + assistantLLMModel: "groq/openai/gpt-oss-120b" + +backendSecrets: + GROQ_API_KEY: "your-groq-api-key" +``` + +More information about the available providers can be found here: https://baserow.io/docs/installation%2Fai-assistant + +### Enable Embeddings Service + +The AI assistant uses the embeddings service and requires the LLM model to be configured. You need to enable this next to the global ai configuration. + +#### Basic Configuration + +```yaml +baserow-embeddings: + enabled: true +``` + ## Different Cloud Providers On different cloud providers, you may need to configure the Object storage, ingress and Load Balancer differently. Below are some examples of how to configure them. @@ -425,6 +455,29 @@ caddy: | `baserow-celery-flower.args` | Arguments passed to the Celery Flower monitoring tool. | `["celery-flower"]` | | `baserow-celery-flower.replicaCount` | Number of replicas for the Celery Flower monitoring tool. | `1` | +### Baserow Embeddings Configuration + +| Name | Description | Value | +| --------------------------------------------------------------- | --------------------------------------------------------------- | -------------------------- | +| `baserow-embeddings.enabled` | Set to true to enable the Baserow Embeddings service. | `false` | +| `baserow-embeddings.assistantLLMModel` | The LLM model to use for the Embeddings service. | `groq/openai/gpt-oss-120b` | +| `baserow-embeddings.image.repository` | Docker image repository for the Embeddings service. | `embeddings` | +| `baserow-embeddings.resources` | Resource requests and limits for the Embeddings service. | | +| `baserow-embeddings.autoscaling.enabled` | Enable autoscaling for the Embeddings service. | `false` | +| `baserow-embeddings.autoscaling.minReplicas` | Minimum number of replicas for autoscaling. | `1` | +| `baserow-embeddings.autoscaling.maxReplicas` | Maximum number of replicas for autoscaling. | `3` | +| `baserow-embeddings.autoscaling.targetCPUUtilizationPercentage` | Target CPU utilization percentage for autoscaling. | `80` | +| `baserow-embeddings.service.port` | Service port for the Embeddings service. | `80` | +| `baserow-embeddings.service.targetPort` | Target port for the Embeddings service. | `80` | +| `baserow-embeddings.readinessProbe.initialDelaySeconds` | Initial delay for readiness probe. | `10` | +| `baserow-embeddings.readinessProbe.periodSeconds` | Period for readiness probe. | `10` | +| `baserow-embeddings.readinessProbe.timeoutSeconds` | Timeout for readiness probe. | `5` | +| `baserow-embeddings.livenessProbe.initialDelaySeconds` | Initial delay for liveness probe. | `10` | +| `baserow-embeddings.livenessProbe.periodSeconds` | Period for liveness probe. | `10` | +| `baserow-embeddings.livenessProbe.timeoutSeconds` | Timeout for liveness probe. | `5` | +| `baserow-embeddings.pdb.create` | Enable/disable a Pod Disruption Budget creation. | `false` | +| `baserow-embeddings.pdb.minAvailable` | Minimum number/percentage of pods that should remain scheduled. | `75%` | + ### Ingress Configuration | Name | Description | Value | diff --git a/deploy/helm/baserow/charts/baserow-common/Chart.yaml b/deploy/helm/baserow/charts/baserow-common/Chart.yaml index a1c2a1304a..a6c5ace7f2 100644 --- a/deploy/helm/baserow/charts/baserow-common/Chart.yaml +++ b/deploy/helm/baserow/charts/baserow-common/Chart.yaml @@ -1,6 +1,6 @@ apiVersion: v2 name: baserow -description: A Helm chart for Kubernetes +description: Internal common chart for baserow components # A chart can be either an 'application' or a 'library' chart. # @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 1.0.35 +version: 1.0.36 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/deploy/helm/baserow/charts/baserow-common/templates/_helpers.tpl b/deploy/helm/baserow/charts/baserow-common/templates/_helpers.tpl index c3dc113b06..b0b2b1cbd7 100644 --- a/deploy/helm/baserow/charts/baserow-common/templates/_helpers.tpl +++ b/deploy/helm/baserow/charts/baserow-common/templates/_helpers.tpl @@ -55,7 +55,7 @@ Common labels {{- define "baserow.labels" -}} helm.sh/chart: {{ include "baserow.chart" . }} {{ include "baserow.selectorLabels" . }} -{{ include "baserow.additionalLabels" . }} +{{- include "baserow.additionalLabels" . }} {{- if .Chart.AppVersion }} app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} {{- end }} @@ -68,7 +68,7 @@ Selector labels {{- define "baserow.selectorLabels" -}} app.kubernetes.io/name: {{ include "baserow.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} -{{ include "baserow.additionalSelectorLabels" . }} +{{- include "baserow.additionalSelectorLabels" . }} {{- end }} {{/* diff --git a/deploy/helm/baserow/templates/backend-configmap.yaml b/deploy/helm/baserow/templates/backend-configmap.yaml index b7e7bc9a3b..473d31ca92 100644 --- a/deploy/helm/baserow/templates/backend-configmap.yaml +++ b/deploy/helm/baserow/templates/backend-configmap.yaml @@ -19,6 +19,12 @@ data: AWS_S3_CUSTOM_DOMAIN: {{ .Values.global.baserow.objectsDomain }}/{{ (index .Values.minio.provisioning.buckets 0).name }} AWS_S3_REGION_NAME: "us-east-1" {{- end }} +{{- if .Values.global.baserow.assistantLLMModel -}} + BASEROW_ENTERPRISE_ASSISTANT_LLM_MODEL: "{{ .Values.baserow.assistantLLMModel }}" +{{- end }} +{{- if (index .Values "baserow-embeddings").enabled }} + BASEROW_EMBEDDINGS_API_URL: http://{{ include "baserow.fullname" (index .Subcharts "baserow-embeddings") }} +{{- end }} {{- range $key, $val := .Values.backendConfigMap }} {{ $key }}: {{ $val | quote }} {{- end }} diff --git a/deploy/helm/baserow/values.yaml b/deploy/helm/baserow/values.yaml index 3f27433af7..bef335008d 100644 --- a/deploy/helm/baserow/values.yaml +++ b/deploy/helm/baserow/values.yaml @@ -59,6 +59,7 @@ global: domain: cluster.local backendDomain: api.cluster.local objectsDomain: objects.cluster.local + assistantLLMModel: "groq/openai/gpt-oss-120b" securityContext: enabled: false @@ -545,6 +546,64 @@ baserow-celery-flower: - celery-flower replicaCount: 1 +## Configuration for the Baserow Embeddings service +## @param baserow-embeddings.enabled Set to true to enable the Baserow Embeddings service. +## @param baserow-embeddings.assistantLLMModel The LLM model to use for the AI assistant (e.g., "groq/openai/gpt-oss-120b"). +## @param baserow-embeddings.image.repository Docker image repository for the Embeddings service. +## @param baserow-embeddings.resources Resource requests and limits for the Embeddings service. +## @param baserow-embeddings.autoscaling.enabled Enable autoscaling for the Embeddings service. +## @param baserow-embeddings.autoscaling.minReplicas Minimum number of replicas for autoscaling. +## @param baserow-embeddings.autoscaling.maxReplicas Maximum number of replicas for autoscaling. +## @param baserow-embeddings.autoscaling.targetCPUUtilizationPercentage Target CPU utilization percentage for autoscaling. +## @param baserow-embeddings.service.port Service port for the Embeddings service. +## @param baserow-embeddings.service.targetPort Target port for the Embeddings service. +## @param baserow-embeddings.readinessProbe.initialDelaySeconds Initial delay for readiness probe. +## @param baserow-embeddings.readinessProbe.periodSeconds Period for readiness probe. +## @param baserow-embeddings.readinessProbe.timeoutSeconds Timeout for readiness probe. +## @param baserow-embeddings.livenessProbe.initialDelaySeconds Initial delay for liveness probe. +## @param baserow-embeddings.livenessProbe.periodSeconds Period for liveness probe. +## @param baserow-embeddings.livenessProbe.timeoutSeconds Timeout for liveness probe. +baserow-embeddings: + enabled: false + image: + repository: embeddings + resources: + requests: + cpu: 800m + memory: 256Mi + limits: + cpu: 1000m + memory: 512Mi + readinessProbe: + enabled: true + httpGet: + path: /health + port: 80 + initialDelaySeconds: 10 + periodSeconds: 10 + timeoutSeconds: 5 + successThreshold: 1 + failureThreshold: 3 + livenessProbe: + enabled: true + httpGet: + path: /health + port: 80 + initialDelaySeconds: 10 + periodSeconds: 10 + timeoutSeconds: 5 + successThreshold: 1 + failureThreshold: 3 + autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 3 + targetCPUUtilizationPercentage: 80 + service: + type: ClusterIP + port: 80 + targetPort: 80 + ## @section Ingress Configuration ## Configuration for the Ingress resource ## @param ingress.enabled Enable the Ingress resource diff --git a/docs/installation/install-on-aws.md b/docs/installation/install-on-aws.md index f230538256..992f1aace8 100644 --- a/docs/installation/install-on-aws.md +++ b/docs/installation/install-on-aws.md @@ -17,7 +17,7 @@ Baserow can be deployed to AWS in the following ways: file or our [sample K8S configuration](./install-with-k8s.md) as a starting point to configure ECS/Fargate tasks for more advanced, production ready, one service per container model. **See below for a detailed guide** -3. Using the [Baserow community maintained helm chart](./install-with-helm.md) and EKS. +3. Using the [Official Baserow helm chart](./install-with-helm.md) and EKS. 4. Customizing our [sample K8S configuration](./install-with-k8s.md) and using that with EKS. 5. Installing and using docker/docker-compose on an EC2 instance with @@ -209,7 +209,7 @@ with 2vCPUs and 4 GB of RAM each to start with. In short, you will want to: > found [here](./configuration.md). | Env variable | Description | -|-------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `DISABLE_VOLUME_CHECK=true` | *Must be set to true*. Needed to disable the check designed to help non-technical users who are not configuring an external Postgres and S3. Because we are configuring external services we do not need any volume mounted into the container. | | `BASEROW_PUBLIC_URL` | The public URL or IP that will be used to access baserow in your user's browser. Always should start with http:// https:// even if accessing via an IP address. | | `DATABASE_HOST` | The hostname of the Postgres database Baserow will use to store its data in. | @@ -371,7 +371,7 @@ in-tool settings, active enterprise licenses, promote other users to being staff The `baserow/backend:1.35.3` and `baserow/web-frontend:1.35.3` images allow you to run Baserow's various services as separate containers. -These images are used by the community Helm chart, our various docker-compose.yml +These images are used by the Official Helm chart, our various docker-compose.yml example setups and are best for production environments where you want full control and flexibility managing Baserow. @@ -456,7 +456,7 @@ This service is our HTTP REST API service. When creating the task definition you file to share these is also a good idea. | Env variable | Description | -|-------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `BASEROW_PUBLIC_URL` | The public URL or IP that will be used to access baserow in your user's browser. Always should start with http:// https:// even if accessing via an IP address. | | `DATABASE_HOST` | The hostname of the Postgres database Baserow will use to store its data in. | | `DATABASE_USER` | The username of the database user Baserow will use to connect to the database at `DATABASE_HOST`. | diff --git a/docs/installation/install-with-helm.md b/docs/installation/install-with-helm.md index 143da7779a..fae3b38ea8 100644 --- a/docs/installation/install-with-helm.md +++ b/docs/installation/install-with-helm.md @@ -9,12 +9,35 @@ certificates. Here you can also documentation for all the configuration possibil like using an external PostgreSQL server, how to setup Caddy with various Cloud providers, add environment variables, and more. +### Prerequisites + +Before installing Baserow with Helm, ensure you have: + +1. **Kubernetes Cluster**: A running Kubernetes cluster (v1.19+) +2. **Helm**: Helm 3.x installed ([installation guide](https://helm.sh/docs/intro/install/)) +3. **kubectl**: Configured to access your cluster +4. **Domains**: Three DNS records (or subdomains) pointing to your cluster: + - Main domain (e.g., `baserow.example.com`) + - Backend API domain (e.g., `api.baserow.example.com`) + - Objects/media domain (e.g., `objects.baserow.example.com`) + ### Installation -Create a `config.yaml` file with the minimum configuration that defines the domains -you would like it to run on. +#### Step 1: Add the Helm repository + +First, add the Baserow Helm chart repository: +```bash +helm repo add baserow-chart https://baserow.gitlab.io/baserow-chart +helm repo update ``` + +#### Step 2: Create configuration file + +Create a `config.yaml` file with the minimum configuration that defines the domains +you would like it to run on: + +```yaml global: baserow: domain: "your-baserow-domain.com" @@ -22,78 +45,392 @@ global: objectsDomain: "objects.your-baserow-domain.com" ``` -To install the chart with the release name `my-baserow` run the following commands: +#### Step 3: Install the chart + +To install the chart with the release name `my-baserow`, run: +```bash +helm install my-baserow baserow-chart/baserow \ + --namespace baserow \ + --create-namespace \ + --values config.yaml ``` -helm repo add baserow-chart https://baserow.gitlab.io/baserow-chart -helm install my-baserow baserow-chart/baserow --namespace baserow --create-namespace --values config.yaml + +From source code + +``` +helm dependency update +helm install my-baserow ./chart/baserow \ + --namespace baserow \ + --create-namespace \ + --values config.yaml +helm upgrade my-baserow ./chart/baserow \ + --namespace baserow \ + --values config.yaml ``` +#### Step 4: Verify installation + +Check the deployment status: + +```bash +# Check pod status +kubectl get pods -n baserow + +# Check services +kubectl get services -n baserow + +# Check ingress +kubectl get ingress -n baserow +``` + +Wait for all pods to be in `Running` state. This may take several minutes on first install. + +#### Step 5: Access Baserow + +Once all pods are running, access Baserow at the domain you configured. + ### Upgrading -You can change the Baserow version you're running by updating your `config.yaml` -file directly. +#### Check current version +Before upgrading, check your current installed version: + +```bash +helm list -n baserow ``` + +#### Update Helm repository + +Ensure you have the latest chart versions: + +```bash +helm repo update baserow-chart +``` + +#### Check available versions + +To see available chart versions: + +```bash +helm search repo baserow-chart/baserow --versions +``` + +#### Upgrade to latest version + +To upgrade to the latest Baserow version using the latest chart: + +```bash +helm upgrade my-baserow baserow-chart/baserow \ + --namespace baserow \ + --values config.yaml +``` + +#### Upgrade to specific version + +You can specify a particular Baserow version by updating your `config.yaml`: + +```yaml global: baserow: image: 1.35.3 ``` -Or use the latest Helm chart default. Then run the following command to upgrade your -installation. +Or specify the chart version directly: +```bash +helm upgrade my-baserow baserow-chart/baserow \ + --namespace baserow \ + --values config.yaml \ + --version 1.0.36 ``` -helm upgrade my-baserow baserow-chart/baserow --namespace baserow --values config.yaml + +#### Verify upgrade + +After upgrading, verify the new version is running: + +```bash +# Check pod status +kubectl get pods -n baserow + +# Check Baserow version +kubectl logs -n baserow deployment/my-baserow-baserow-backend-wsgi | grep "Baserow" ``` -## Community Maintained Helm Chart +#### Rollback if needed -Find the community -Baserow [helm chart here](https://artifacthub.io/packages/helm/christianhuth/baserow) -maintained -by [Christian Huth](https://github.com/christianhuth). +If the upgrade fails, you can rollback to the previous version: -We recommend that you: +```bash +# List release history +helm history my-baserow -n baserow -1. Run the chart with ingress enabled: - 2. `backend.ingress.enabled=true` - 3. `frontend.ingress.enabled=true` -4. Make sure you configure two domains, one for the backend api API one for the frontend - server. - 5. Set `config.publicFrontendUrl=https://your-baserow-servers-domain.com` - 5. Set `config.publicBackendUrl=https://api.your-baserow-servers-domain.com` -6. Configure all the relevant `backend.config.aws` variables to upload and serve user - files in a S3 compatible service of your own choosing. +# Rollback to previous revision +helm rollback my-baserow -n baserow + +# Or rollback to specific revision +helm rollback my-baserow 1 -n baserow +``` + +### Configuring AI Features + +Baserow supports multiple AI providers for generative AI features and the AI assistant. To enable AI capabilities, you need to configure the embeddings service and AI providers. + +#### Enable AI Assistant with Embeddings + +Add to your `config.yaml`: + +```yaml +baserow-embeddings: + enabled: true + assistantLLMModel: "groq/openai/gpt-oss-120b" + +backendSecrets: + GROQ_API_KEY: "your-groq-api-key" +``` + +#### Configure Additional AI Providers + +To enable AI field with multiple providers: + +```yaml +backendSecrets: + # OpenAI + BASEROW_OPENAI_API_KEY: "sk-..." + BASEROW_OPENAI_MODELS: "gpt-3.5-turbo,gpt-4o" + + # Anthropic + BASEROW_ANTHROPIC_API_KEY: "sk-ant-..." + BASEROW_ANTHROPIC_MODELS: "claude-3-5-sonnet-20241022" + + # Mistral + BASEROW_MISTRAL_API_KEY: "..." + BASEROW_MISTRAL_MODELS: "mistral-large-latest" +``` + +For self-hosted Ollama: + +```yaml +backendConfigMap: + BASEROW_OLLAMA_HOST: "http://ollama-service:11434" + BASEROW_OLLAMA_MODELS: "llama2,mistral" +``` + +See the [official Helm chart documentation](https://github.com/baserow/baserow/blob/develop/deploy/helm/baserow/README.md) for detailed AI configuration options. -### Deploying Baserow using Helm and K3S +### Testing Baserow with Minikube -[K3S](https://k3s.io/) is an easy way of getting a local K8S cluster running locally for -testing and development. This guide will walk you through setting it K3S with Baserow -using the community helm chart above. +[Minikube](https://minikube.sigs.k8s.io/) is an excellent way to run a local Kubernetes cluster for testing and development. This guide will walk you through setting up Minikube and deploying Baserow using the official Helm chart. -1. Install [K3S](https://docs.k3s.io/quick-start) -2. Install [Helm](https://helm.sh/docs/helm/helm_install/) -3. Configure Helm to use your K3S cluster: +#### Prerequisites + +1. Install [Minikube](https://minikube.sigs.k8s.io/docs/start/) +2. Install [Helm](https://helm.sh/docs/intro/install/) +3. Install [kubectl](https://kubernetes.io/docs/tasks/tools/) + +#### Step 1: Start Minikube + +Start Minikube with recommended resources for Baserow: + +```bash +# Start with 4GB RAM and 2 CPUs (adjust based on your system) +minikube start --memory=4096 --cpus=2 + +# Verify cluster is running +kubectl cluster-info +kubectl get nodes +``` + +#### Step 2: Enable required addons + +Enable the ingress addon for routing traffic: + +```bash +minikube addons enable ingress + +# Verify ingress controller is running +kubectl get pods -n ingress-nginx +``` + +#### Step 3: Configure local DNS + +For local testing, you'll need to configure local DNS or use `/etc/hosts`. Get your Minikube IP: ```bash -# From https://devops.stackexchange.com/questions/16043/error-error-loading-config-file-etc-rancher-k3s-k3s-yaml-open-etc-rancher -export KUBECONFIG=~/.kube/config -mkdir ~/.kube 2> /dev/null -# Make sure you aren't overriding an existing k8s configuration in ~/.kube/config -(set -o noclobber; sudo k3s kubectl config view --raw > "$KUBECONFIG") -chmod 600 "$KUBECONFIG" -# Check you can access the cluster -helm ls --all-namespaces -# You should see something like -# NAME NAMESPACE REVISION UPDATED STATUS CHART APP VERSION +minikube ip ``` -4. Install Baserow using Helm +Add entries to your `/etc/hosts` file (replace `` with the actual IP): ```bash -helm repo add christianknell https://christianknell.github.io/helm-charts + baserow.local + api.baserow.local + objects.baserow.local +``` + +#### Step 4: Create Baserow configuration + +Create a `config.yaml` file for local testing: + +```yaml +global: + baserow: + domain: "baserow.local" + backendDomain: "api.baserow.local" + objectsDomain: "objects.baserow.local" + +# Disable Caddy since we're using Minikube ingress +caddy: + enabled: false + +# Use smaller resource requests for local testing +baserow-backend-wsgi: + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + +baserow-backend-asgi: + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + +baserow-frontend: + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + +# Configure ingress for Minikube +ingress: + enabled: true + className: "nginx" + annotations: + nginx.ingress.kubernetes.io/ssl-redirect: "false" +``` + +#### Step 5: Install Baserow + +Add the Baserow Helm repository and install: + +```bash +# Add Baserow chart repository +helm repo add baserow-chart https://baserow.gitlab.io/baserow-chart helm repo update -helm install 1.5.3 christianknell/baserow -# Finally follow the printed instructions. + +# Install Baserow +helm install baserow baserow-chart/baserow \ + --namespace baserow \ + --create-namespace \ + --values config.yaml ``` + +#### Step 6: Monitor deployment + +Watch the pods come up: + +```bash +# Watch all pods +kubectl get pods -n baserow -w + +# Check deployment status +kubectl get deployments -n baserow + +# Check services +kubectl get services -n baserow + +# Check ingress +kubectl get ingress -n baserow +``` + +Wait until all pods show `Running` status. This may take 5-10 minutes on first deployment. + +#### Step 7: Access Baserow + +Once all pods are running, access Baserow at: +- Frontend: http://baserow.local +- API: http://api.baserow.local +- Objects: http://objects.baserow.local + +#### Step 8: Test the deployment + +```bash +# Port-forward to access directly (alternative to ingress) +kubectl port-forward -n baserow svc/baserow-baserow-frontend 8000:80 + +# Access at http://localhost:8000 +``` + +#### Troubleshooting + +**Pods not starting:** +```bash +# Check pod logs +kubectl logs -n baserow + +# Describe pod for events +kubectl describe pod -n baserow +``` + +**Out of resources:** +```bash +# Increase Minikube resources +minikube stop +minikube delete +minikube start --memory=8192 --cpus=4 +``` + +**Ingress not working:** +```bash +# Check ingress controller +kubectl get pods -n ingress-nginx + +# Check ingress configuration +kubectl describe ingress -n baserow +``` + +#### Cleanup + +When done testing, you can clean up: + +```bash +# Uninstall Baserow +helm uninstall baserow -n baserow + +# Delete namespace +kubectl delete namespace baserow + +# Stop Minikube +minikube stop + +# Delete Minikube cluster +minikube delete +``` + + +## Alternative Community Maintained Helm Chart + +Find the community Baserow [helm chart here](https://artifacthub.io/packages/helm/christianhuth/baserow) +maintained by [Christian Huth](https://github.com/christianhuth). + +We recommend that you: + +1. Run the chart with ingress enabled: + 1. `backend.ingress.enabled=true` + 2. `frontend.ingress.enabled=true` +2. Make sure you configure two domains, one for the backend api API one for the frontend + server. + 1. Set `config.publicFrontendUrl=https://your-baserow-servers-domain.com` + 2. Set `config.publicBackendUrl=https://api.your-baserow-servers-domain.com` +3. Configure all the relevant `backend.config.aws` variables to upload and serve user + files in a S3 compatible service of your own choosing. \ No newline at end of file diff --git a/docs/installation/install-with-k8s.md b/docs/installation/install-with-k8s.md index 3d76d34dd9..d6ccba6931 100644 --- a/docs/installation/install-with-k8s.md +++ b/docs/installation/install-with-k8s.md @@ -1,8 +1,8 @@ # Install with K8S -## Community Maintained Helm Chart +## Official Baserow Helm Chart -We recommend you use the [community maintained helm chart](./install-with-helm.md) to +We recommend you use the [official baserow helm chart](./install-with-helm.md) to install Baserow on K8S. ## Raw K8S starting point From 6d27a73ce3a036369a500ee4e0fbcebae96cf856 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9mie=20Pardou?= <571533+jrmi@users.noreply.github.com> Date: Fri, 14 Nov 2025 16:26:29 +0100 Subject: [PATCH 2/6] Fix payload update issue (#4262) --- .../modules/automation/store/automationWorkflowNode.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/web-frontend/modules/automation/store/automationWorkflowNode.js b/web-frontend/modules/automation/store/automationWorkflowNode.js index ae140187a1..b18701820f 100644 --- a/web-frontend/modules/automation/store/automationWorkflowNode.js +++ b/web-frontend/modules/automation/store/automationWorkflowNode.js @@ -20,9 +20,8 @@ const updateContext = { const updateCachedValues = (workflow) => { if (!workflow || !workflow.nodes) return - Object.assign( - workflow.nodeMap, - Object.fromEntries(workflow.nodes.map((node) => [`${node.id}`, node])) + workflow.nodeMap = Object.fromEntries( + workflow.nodes.map((node) => [`${node.id}`, node]) ) } From 89ae76522163529c5219e48d0a87ebf3c83f4255 Mon Sep 17 00:00:00 2001 From: Peter Evans Date: Fri, 14 Nov 2025 16:00:50 +0000 Subject: [PATCH 3/6] Improvements to the changelog. 1) Adding the integration domain and 2) Introduce the issue_origin support. By default this will be github for new entries, but for compat reasons, when we generate the markdown for entries, if the issue_origin isn't present then we'll default to gitlab. (#4255) --- changelog/src/changelog.py | 6 +++++- changelog/src/changelog_entry.py | 14 +++++++++++--- changelog/src/domains.py | 6 ++++++ changelog/src/handler.py | 19 +++++++++++++++---- .../tests/changelog/test_changelog_handler.py | 16 ++++++++-------- 5 files changed, 45 insertions(+), 16 deletions(-) diff --git a/changelog/src/changelog.py b/changelog/src/changelog.py index 1bd581a33f..2506353707 100755 --- a/changelog/src/changelog.py +++ b/changelog/src/changelog.py @@ -43,7 +43,11 @@ def add(working_dir: Optional[str] = typer.Option(default=default_path)): issue_number = None ChangelogHandler(working_dir).add_entry( - domain_type, changelog_type, message, issue_number=issue_number + domain_type, + changelog_type, + message, + issue_number=issue_number, + issue_origin="github", # All new changelogs originate from GitHub ) diff --git a/changelog/src/changelog_entry.py b/changelog/src/changelog_entry.py index b8a59e23bc..2187b52f7e 100644 --- a/changelog/src/changelog_entry.py +++ b/changelog/src/changelog_entry.py @@ -4,6 +4,7 @@ from typing import Dict, List, Optional, Union GITLAB_URL = os.environ.get("GITLAB_URL", "https://gitlab.com/baserow/baserow") +GITHUB_URL = os.environ.get("GITHUB_URL", "https://github.com/baserow/baserow") class ChangelogEntry(abc.ABC): @@ -17,6 +18,7 @@ def generate_entry_dict( self, domain_type_name: str, message: str, + issue_origin: str, issue_number: Optional[int] = None, bullet_points: List[str] = None, ) -> Dict[str, any]: @@ -26,18 +28,24 @@ def generate_entry_dict( return { "type": self.type, "message": message, - "domain": domain_type_name, + "issue_origin": issue_origin, "issue_number": issue_number, + "domain": domain_type_name, "bullet_points": bullet_points, "created_at": datetime.now(tz=timezone.utc).strftime("%Y-%m-%d"), } @staticmethod - def get_markdown_string(message: str, issue_number: Union[int, None] = None) -> str: + def get_markdown_string( + message: str, + issue_number: Union[int, None] = None, + issue_origin: Optional[str] = "gitlab", + ) -> str: string = f"* {message}" if issue_number is not None: - string += f" [#{issue_number}]({GITLAB_URL}/-/issues/{issue_number})" + url_prefix = GITLAB_URL if issue_origin == "gitlab" else GITHUB_URL + string += f" [#{issue_number}]({url_prefix}/-/issues/{issue_number})" return string diff --git a/changelog/src/domains.py b/changelog/src/domains.py index 2676ca82c2..eea54e7ff8 100644 --- a/changelog/src/domains.py +++ b/changelog/src/domains.py @@ -35,10 +35,16 @@ class AutomationDomain(BaserowDomain): heading = "Automation" +class IntegrationDomain(BaserowDomain): + type = "integration" + heading = "Integration" + + domain_types: Dict[str, type[BaserowDomain]] = { CoreDomain.type: CoreDomain, DashboardDomain.type: DashboardDomain, DatabaseDomain.type: DatabaseDomain, BuilderDomain.type: BuilderDomain, AutomationDomain.type: AutomationDomain, + IntegrationDomain.type: IntegrationDomain, } diff --git a/changelog/src/handler.py b/changelog/src/handler.py index a4432542ae..23d658e763 100644 --- a/changelog/src/handler.py +++ b/changelog/src/handler.py @@ -40,6 +40,7 @@ def add_entry( changelog_entry_type_name: str, message: str, issue_number: Optional[int] = None, + issue_origin: Optional[str] = "github", release: str = UNRELEASED_FOLDER_NAME, bullet_points: List[str] = None, ) -> str: @@ -57,7 +58,11 @@ def add_entry( with open(full_path, "w+") as entry_file: entry = changelog_entry_type().generate_entry_dict( - domain_type_name, message, issue_number, bullet_points=bullet_points + domain_type_name, + message, + issue_origin, + issue_number, + bullet_points=bullet_points, ) json.dump(entry, entry_file, indent=4) @@ -161,8 +166,13 @@ def generate_changelog_markdown_file(self): ) entry_message = f"{domain_prefix}{entry['message']}" + # Note: if no `issue_origin` is found, we default to "gitlab" + # for compatibility with older entries. All new entries will + # point to "github" as their origin. entry_markdown_string = entry_type.get_markdown_string( - entry_message, entry["issue_number"] + entry_message, + entry["issue_number"], + entry.get("issue_origin", "gitlab"), ) changelog_file.write( @@ -193,7 +203,8 @@ def move_entries_to_release_folder( # Delete all .gitignore files in the subfolders of release folder because # there is not reason to have empty folders there. for gitignore_file in glob.glob( - f"{release_path}/**/.gitkeep", recursive=True): + f"{release_path}/**/.gitkeep", recursive=True + ): os.remove(gitignore_file) # Delete all empty subfolders in the release folder because we don't need @@ -213,7 +224,7 @@ def move_entries_to_release_folder( except FileExistsError: print(f'Release with name "{release_name}" already exists.') except OSError as e: - print(f'OS error occurred: {e}') + print(f"OS error occurred: {e}") return None @staticmethod diff --git a/changelog/tests/changelog/test_changelog_handler.py b/changelog/tests/changelog/test_changelog_handler.py index b9b0dc2457..5cded44b9c 100644 --- a/changelog/tests/changelog/test_changelog_handler.py +++ b/changelog/tests/changelog/test_changelog_handler.py @@ -18,29 +18,29 @@ def test_add_entry(fs): def test_get_changelog_entries(fs): handler = ChangelogHandler() - handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "1") - handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "2") + handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "msg1") + handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "msg2") changelog_entries = handler.get_changelog_entries() assert BugChangelogEntry.type in changelog_entries assert [ - BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "1"), - BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "2"), + BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "msg1", "github"), + BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "msg2", "github"), ] in changelog_entries.values() def test_get_changelog_entries_order(fs): handler = ChangelogHandler() - handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "2") - handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "1") + handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "msg2") + handler.add_entry(DatabaseDomain.type, BugChangelogEntry.type, "msg1") changelog_entries = handler.get_changelog_entries() assert BugChangelogEntry.type in changelog_entries assert [ - BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "1"), - BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "2"), + BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "msg1", "github"), + BugChangelogEntry().generate_entry_dict(DatabaseDomain.type, "msg2", "github"), ] in changelog_entries.values() From b364938befac8fd1c5c233084bccabf73c2ce474 Mon Sep 17 00:00:00 2001 From: Davide Silvestri <75379892+silvestrid@users.noreply.github.com> Date: Fri, 14 Nov 2025 18:07:59 +0100 Subject: [PATCH 4/6] bug (AI Assistant): fix filter name for the empty/not empty filter --- .../tools/database/types/view_filters.py | 12 +- ...t_assistant_database_view_filters_tools.py | 713 ++++++++++++++++++ 2 files changed, 717 insertions(+), 8 deletions(-) create mode 100644 enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/view_filters.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/view_filters.py index 76f5b8711f..343fc14773 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/view_filters.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/view_filters.py @@ -76,9 +76,7 @@ class TextNotContainsViewFilterItem( class TextEmptyViewFilterItemCreate(TextViewFilterItemCreate): - operator: Literal["is_empty"] = Field( - ..., description="Checks if the field is empty." - ) + operator: Literal["empty"] = Field(..., description="Checks if the field is empty.") class TextEmptyViewFilterItem(TextEmptyViewFilterItemCreate, ViewFilterItem): @@ -86,7 +84,7 @@ class TextEmptyViewFilterItem(TextEmptyViewFilterItemCreate, ViewFilterItem): class TextNotEmptyViewFilterItemCreate(TextViewFilterItemCreate): - operator: Literal["is_not_empty"] = Field( + operator: Literal["not_empty"] = Field( ..., description="Checks if the field is not empty." ) @@ -183,9 +181,7 @@ class NumberLowerThanViewFilterItem( class NumberEmptyViewFilterItemCreate(NumberViewFilterItemCreate): - operator: Literal["is_empty"] = Field( - ..., description="Checks if the field is empty." - ) + operator: Literal["empty"] = Field(..., description="Checks if the field is empty.") class NumberEmptyViewFilterItem(NumberEmptyViewFilterItemCreate, ViewFilterItem): @@ -193,7 +189,7 @@ class NumberEmptyViewFilterItem(NumberEmptyViewFilterItemCreate, ViewFilterItem) class NumberNotEmptyViewFilterItemCreate(NumberViewFilterItemCreate): - operator: Literal["is_not_empty"] = Field( + operator: Literal["not_empty"] = Field( ..., description="Checks if the field is not empty." ) diff --git a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py new file mode 100644 index 0000000000..b8d5e13b4e --- /dev/null +++ b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py @@ -0,0 +1,713 @@ +import pytest + +from baserow.contrib.database.views.models import ViewFilter +from baserow_enterprise.assistant.tools.database.types import ( + BooleanIsViewFilterItemCreate, + DateAfterViewFilterItemCreate, + DateBeforeViewFilterItemCreate, + DateEqualsViewFilterItemCreate, + DateNotEqualsViewFilterItemCreate, + LinkRowHasNotViewFilterItemCreate, + LinkRowHasViewFilterItemCreate, + MultipleSelectIsAnyViewFilterItemCreate, + MultipleSelectIsNoneOfNotViewFilterItemCreate, + NumberEmptyViewFilterItemCreate, + NumberEqualsViewFilterItemCreate, + NumberHigherThanViewFilterItemCreate, + NumberLowerThanViewFilterItemCreate, + NumberNotEmptyViewFilterItemCreate, + NumberNotEqualsViewFilterItemCreate, + SingleSelectIsAnyViewFilterItemCreate, + SingleSelectIsNoneOfNotViewFilterItemCreate, + TextContainsViewFilterItemCreate, + TextEmptyViewFilterItemCreate, + TextEqualViewFilterItemCreate, + TextNotContainsViewFilterItemCreate, + TextNotEmptyViewFilterItemCreate, + TextNotEqualViewFilterItemCreate, +) +from baserow_enterprise.assistant.tools.database.types.base import Date +from baserow_enterprise.assistant.tools.database.types.view_filters import ( + ViewFilterItemCreate, +) +from baserow_enterprise.assistant.tools.database.utils import create_view_filter + + +@pytest.mark.django_db +def test_all_text_filters_conversion(data_fixture): + """Test all text filter types can be converted to Baserow filters.""" + + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table = data_fixture.create_database_table(database=database) + field = data_fixture.create_text_field(table=table, name="Text Field") + view = data_fixture.create_grid_view(table=table) + table_fields = {field.id: field} + + text_filters = [ + ( + TextEqualViewFilterItemCreate( + field_id=field.id, type="text", operator="equal", value="test" + ), + "equal", + "test", + ), + ( + TextNotEqualViewFilterItemCreate( + field_id=field.id, type="text", operator="not_equal", value="test" + ), + "not_equal", + "test", + ), + ( + TextContainsViewFilterItemCreate( + field_id=field.id, type="text", operator="contains", value="keyword" + ), + "contains", + "keyword", + ), + ( + TextNotContainsViewFilterItemCreate( + field_id=field.id, type="text", operator="contains_not", value="spam" + ), + "contains_not", + "spam", + ), + ( + TextEmptyViewFilterItemCreate( + field_id=field.id, type="text", operator="empty", value="" + ), + "empty", + "", + ), + ( + TextNotEmptyViewFilterItemCreate( + field_id=field.id, type="text", operator="not_empty", value="" + ), + "not_empty", + "", + ), + ] + + for filter_create, expected_type, expected_value in text_filters: + created_filter = create_view_filter(user, view, table_fields, filter_create) + + assert created_filter is not None + assert created_filter.view.id == view.id + assert created_filter.field.id == field.id + assert created_filter.type == expected_type + assert created_filter.value == expected_value + + # Verify in database + assert ViewFilter.objects.filter( + view=view, field=field, type=expected_type + ).exists() + + +@pytest.mark.django_db +def test_all_number_filters_conversion(data_fixture): + """Test all number filter types can be converted to Baserow filters.""" + + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table = data_fixture.create_database_table(database=database) + field = data_fixture.create_number_field(table=table, name="Number Field") + view = data_fixture.create_grid_view(table=table) + table_fields = {field.id: field} + + number_filters = [ + ( + NumberEqualsViewFilterItemCreate( + field_id=field.id, type="number", operator="equal", value=42.0 + ), + "equal", + "42.0", + ), + ( + NumberNotEqualsViewFilterItemCreate( + field_id=field.id, type="number", operator="not_equal", value=0.0 + ), + "not_equal", + "0.0", + ), + ( + NumberHigherThanViewFilterItemCreate( + field_id=field.id, + type="number", + operator="higher_than", + value=100.0, + or_equal=False, + ), + "higher_than", + "100.0", + ), + ( + NumberLowerThanViewFilterItemCreate( + field_id=field.id, + type="number", + operator="lower_than", + value=50.0, + or_equal=False, + ), + "lower_than", + "50.0", + ), + ( + NumberEmptyViewFilterItemCreate( + field_id=field.id, type="number", operator="empty", value=0.0 + ), + "empty", + "0.0", + ), + ( + NumberNotEmptyViewFilterItemCreate( + field_id=field.id, type="number", operator="not_empty", value=0.0 + ), + "not_empty", + "0.0", + ), + ] + + for filter_create, expected_type, expected_value in number_filters: + created_filter = create_view_filter(user, view, table_fields, filter_create) + + assert created_filter is not None + assert created_filter.type == expected_type + assert created_filter.value == expected_value + assert ViewFilter.objects.filter( + view=view, field=field, type=expected_type + ).exists() + + +@pytest.mark.django_db +def test_all_date_filters_conversion(data_fixture): + """Test all date filter types can be converted to Baserow filters.""" + + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table = data_fixture.create_database_table(database=database) + field = data_fixture.create_date_field(table=table, name="Date Field") + view = data_fixture.create_grid_view(table=table) + table_fields = {field.id: field} + + # Test with exact date + date_filter = DateEqualsViewFilterItemCreate( + field_id=field.id, + type="date", + operator="equal", + value=Date(year=2024, month=1, day=15), + mode="exact_date", + ) + created_filter = create_view_filter(user, view, table_fields, date_filter) + assert created_filter.type == "date_is" + assert "2024-01-15" in created_filter.value + assert created_filter.value.endswith("?exact_date") + + # Test with relative date (today) + date_filter2 = DateNotEqualsViewFilterItemCreate( + field_id=field.id, type="date", operator="not_equal", value=None, mode="today" + ) + created_filter2 = create_view_filter(user, view, table_fields, date_filter2) + assert created_filter2.type == "date_is_not" + assert created_filter2.value.endswith("??today") + + # Test date_is_after + date_filter3 = DateAfterViewFilterItemCreate( + field_id=field.id, + type="date", + operator="after", + value=7, + mode="nr_days_ago", + or_equal=False, + ) + created_filter3 = create_view_filter(user, view, table_fields, date_filter3) + assert created_filter3.type == "date_is_after" + assert "?7?" in created_filter3.value + assert created_filter3.value.endswith("nr_days_ago") + + # Test date_is_on_or_after + date_filter4 = DateAfterViewFilterItemCreate( + field_id=field.id, + type="date", + operator="after", + value=30, + mode="nr_days_from_now", + or_equal=True, + ) + created_filter4 = create_view_filter(user, view, table_fields, date_filter4) + assert created_filter4.type == "date_is_on_or_after" + + # Test date_is_before + date_filter5 = DateBeforeViewFilterItemCreate( + field_id=field.id, + type="date", + operator="before", + value=None, + mode="tomorrow", + or_equal=False, + ) + created_filter5 = create_view_filter(user, view, table_fields, date_filter5) + assert created_filter5.type == "date_is_before" + + # Test date_is_on_or_before + date_filter6 = DateBeforeViewFilterItemCreate( + field_id=field.id, + type="date", + operator="before", + value=14, + mode="nr_weeks_from_now", + or_equal=True, + ) + created_filter6 = create_view_filter(user, view, table_fields, date_filter6) + assert created_filter6.type == "date_is_on_or_before" + + +@pytest.mark.django_db +def test_all_single_select_filters_conversion(data_fixture): + """Test all single select filter types can be converted to Baserow filters.""" + + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table = data_fixture.create_database_table(database=database) + field = data_fixture.create_single_select_field(table=table, name="Status") + option1 = data_fixture.create_select_option(field=field, value="Active", order=1) + option2 = data_fixture.create_select_option(field=field, value="Pending", order=2) + option3 = data_fixture.create_select_option(field=field, value="Inactive", order=3) + view = data_fixture.create_grid_view(table=table) + table_fields = {field.id: field} + + # Test is_any_of + filter_create = SingleSelectIsAnyViewFilterItemCreate( + field_id=field.id, + type="single_select", + operator="is_any_of", + value=["Active", "Pending"], + ) + created_filter = create_view_filter(user, view, table_fields, filter_create) + assert created_filter.type == "single_select_is_any_of" + # Value should contain option IDs + option_ids = created_filter.value.split(",") + assert str(option1.id) in option_ids + assert str(option2.id) in option_ids + assert len(option_ids) == 2 + + # Test case insensitive matching + filter_create2 = SingleSelectIsAnyViewFilterItemCreate( + field_id=field.id, + type="single_select", + operator="is_any_of", + value=["active"], # lowercase + ) + created_filter2 = create_view_filter(user, view, table_fields, filter_create2) + assert str(option1.id) in created_filter2.value + + # Test is_none_of + filter_create3 = SingleSelectIsNoneOfNotViewFilterItemCreate( + field_id=field.id, + type="single_select", + operator="is_none_of", + value=["Inactive"], + ) + created_filter3 = create_view_filter(user, view, table_fields, filter_create3) + assert created_filter3.type == "single_select_is_none_of" + assert str(option3.id) in created_filter3.value + + +@pytest.mark.django_db +def test_all_multiple_select_filters_conversion(data_fixture): + """Test all multiple select filter types can be converted to Baserow filters.""" + + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table = data_fixture.create_database_table(database=database) + field = data_fixture.create_multiple_select_field(table=table, name="Tags") + option1 = data_fixture.create_select_option(field=field, value="Important", order=1) + option2 = data_fixture.create_select_option(field=field, value="Urgent", order=2) + option3 = data_fixture.create_select_option(field=field, value="Archived", order=3) + view = data_fixture.create_grid_view(table=table) + table_fields = {field.id: field} + + # Test is_any_of (has) + filter_create = MultipleSelectIsAnyViewFilterItemCreate( + field_id=field.id, + type="multiple_select", + operator="is_any_of", + value=["Important", "Urgent"], + ) + created_filter = create_view_filter(user, view, table_fields, filter_create) + assert created_filter.type == "multiple_select_has" + option_ids = created_filter.value.split(",") + assert str(option1.id) in option_ids + assert str(option2.id) in option_ids + + # Test is_none_of (has_not) + filter_create2 = MultipleSelectIsNoneOfNotViewFilterItemCreate( + field_id=field.id, + type="multiple_select", + operator="is_none_of", + value=["Archived"], + ) + created_filter2 = create_view_filter(user, view, table_fields, filter_create2) + assert created_filter2.type == "multiple_select_has_not" + assert str(option3.id) in created_filter2.value + + +@pytest.mark.django_db +@pytest.mark.skip( + reason="Link row filters have a bug in Baserow (UnboundLocalError in view_filters.py:1301)" +) +def test_all_link_row_filters_conversion(data_fixture): + """Test all link row filter types can be converted to Baserow filters.""" + + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table1 = data_fixture.create_database_table(database=database, name="Projects") + table2 = data_fixture.create_database_table(database=database, name="Tasks") + field = data_fixture.create_link_row_field(table=table1, link_row_table=table2) + view = data_fixture.create_grid_view(table=table1) + table_fields = {field.id: field} + + # Test link_row_has + filter_create = LinkRowHasViewFilterItemCreate( + field_id=field.id, type="link_row", operator="has", value=123 + ) + created_filter = create_view_filter(user, view, table_fields, filter_create) + assert created_filter.type == "link_row_has" + assert created_filter.value == "123" + + # Test link_row_has_not + filter_create2 = LinkRowHasNotViewFilterItemCreate( + field_id=field.id, type="link_row", operator="has_not", value=456 + ) + created_filter2 = create_view_filter(user, view, table_fields, filter_create2) + assert created_filter2.type == "link_row_has_not" + assert created_filter2.value == "456" + + +@pytest.mark.django_db +def test_all_boolean_filters_conversion(data_fixture): + """Test all boolean filter types can be converted to Baserow filters.""" + + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table = data_fixture.create_database_table(database=database) + field = data_fixture.create_boolean_field(table=table, name="Active") + view = data_fixture.create_grid_view(table=table) + table_fields = {field.id: field} + + # Test is true + filter_create = BooleanIsViewFilterItemCreate( + field_id=field.id, type="boolean", operator="is", value=True + ) + created_filter = create_view_filter(user, view, table_fields, filter_create) + assert created_filter.type == "boolean" + assert created_filter.value == "1" + + # Test is false + filter_create2 = BooleanIsViewFilterItemCreate( + field_id=field.id, type="boolean", operator="is", value=False + ) + created_filter2 = create_view_filter(user, view, table_fields, filter_create2) + assert created_filter2.type == "boolean" + assert created_filter2.value == "0" + + +def get_all_concrete_filter_classes(): + """ + Recursively find all concrete ViewFilterItemCreate subclasses. Concrete classes are + those that have specific operators and are meant to be instantiated. + """ + + def get_all_subclasses(cls): + all_subclasses = [] + for subclass in cls.__subclasses__(): + all_subclasses.append(subclass) + all_subclasses.extend(get_all_subclasses(subclass)) + return all_subclasses + + all_subclasses = get_all_subclasses(ViewFilterItemCreate) + + # Filter to only concrete classes (those with specific operators defined as Literal) + # These are the classes that end with "Create" and have a specific operator + concrete_classes = [] + for cls in all_subclasses: + # Check if this class defines a specific operator (has Literal type annotation) + if hasattr(cls, "__annotations__") and "operator" in cls.__annotations__: + annotation = cls.__annotations__["operator"] + # Check if it's a Literal type (concrete operator) + if hasattr(annotation, "__origin__") or "Literal" in str(annotation): + concrete_classes.append(cls) + + return concrete_classes + + +def test_filter_class_discovery(): + """ + Test that the filter class discovery mechanism works correctly. This ensures our + introspection logic properly identifies concrete filter classes. + """ + + all_concrete_classes = get_all_concrete_filter_classes() + + # Verify we found a reasonable number of filter classes + # As of now, there should be at least 20+ concrete filter classes + assert len(all_concrete_classes) >= 20, ( + f"Expected at least 20 concrete filter classes, found {len(all_concrete_classes)}. " + f"Classes found: {[cls.__name__ for cls in all_concrete_classes]}" + ) + + # Verify that known concrete classes are discovered + class_names = {cls.__name__ for cls in all_concrete_classes} + expected_classes = { + "TextEqualViewFilterItemCreate", + "NumberEqualsViewFilterItemCreate", + "DateEqualsViewFilterItemCreate", + "BooleanIsViewFilterItemCreate", + "LinkRowHasViewFilterItemCreate", + "SingleSelectIsAnyViewFilterItemCreate", + "MultipleSelectIsAnyViewFilterItemCreate", + } + + missing = expected_classes - class_names + assert not missing, f"Expected classes not found: {missing}" + + # Verify that base/intermediate classes are NOT included + excluded_classes = { + "ViewFilterItemCreate", + "TextViewFilterItemCreate", + "NumberViewFilterItemCreate", + "DateViewFilterItemCreate", + } + + found_excluded = excluded_classes & class_names + assert ( + not found_excluded + ), f"Base/intermediate classes should not be included: {found_excluded}" + + +@pytest.mark.django_db +def test_comprehensive_all_filter_types_conversion(data_fixture): + """ + Comprehensive test ensuring ALL filter types can be successfully converted to + Baserow filters with a table containing all supported field types. + """ + + # Setup + user = data_fixture.create_user() + workspace = data_fixture.create_workspace(user=user) + database = data_fixture.create_database_application(workspace=workspace) + table = data_fixture.create_database_table(database=database, name="All Fields") + + # Create all field types + text_field = data_fixture.create_text_field(table=table, name="Text", primary=True) + number_field = data_fixture.create_number_field(table=table, name="Number") + date_field = data_fixture.create_date_field(table=table, name="Date") + boolean_field = data_fixture.create_boolean_field(table=table, name="Boolean") + single_select = data_fixture.create_single_select_field(table=table, name="Status") + multi_select = data_fixture.create_multiple_select_field(table=table, name="Tags") + + linked_table = data_fixture.create_database_table(database=database, name="Linked") + data_fixture.create_text_field(table=linked_table, name="Linked Text", primary=True) + link_field = data_fixture.create_link_row_field( + table=table, link_row_table=linked_table + ) + + data_fixture.create_select_option(field=single_select, value="Active", order=1) + data_fixture.create_select_option(field=multi_select, value="Important", order=1) + + # Create view and table_fields dict + view = data_fixture.create_grid_view(table=table) + table_fields = { + text_field.id: text_field, + number_field.id: number_field, + date_field.id: date_field, + boolean_field.id: boolean_field, + single_select.id: single_select, + multi_select.id: multi_select, + link_field.id: link_field, + } + + # List of all filter types to test + all_filters = [ + # Text filters + TextEqualViewFilterItemCreate( + field_id=text_field.id, type="text", operator="equal", value="test" + ), + TextNotEqualViewFilterItemCreate( + field_id=text_field.id, type="text", operator="not_equal", value="test" + ), + TextContainsViewFilterItemCreate( + field_id=text_field.id, type="text", operator="contains", value="test" + ), + TextNotContainsViewFilterItemCreate( + field_id=text_field.id, type="text", operator="contains_not", value="test" + ), + TextEmptyViewFilterItemCreate( + field_id=text_field.id, type="text", operator="empty", value="" + ), + TextNotEmptyViewFilterItemCreate( + field_id=text_field.id, type="text", operator="not_empty", value="" + ), + # Number filters + NumberEqualsViewFilterItemCreate( + field_id=number_field.id, type="number", operator="equal", value=42.0 + ), + NumberNotEqualsViewFilterItemCreate( + field_id=number_field.id, type="number", operator="not_equal", value=0.0 + ), + NumberHigherThanViewFilterItemCreate( + field_id=number_field.id, + type="number", + operator="higher_than", + value=10.0, + or_equal=False, + ), + NumberLowerThanViewFilterItemCreate( + field_id=number_field.id, + type="number", + operator="lower_than", + value=100.0, + or_equal=True, + ), + NumberEmptyViewFilterItemCreate( + field_id=number_field.id, type="number", operator="empty", value=0.0 + ), + NumberNotEmptyViewFilterItemCreate( + field_id=number_field.id, type="number", operator="not_empty", value=0.0 + ), + # Date filters + DateEqualsViewFilterItemCreate( + field_id=date_field.id, + type="date", + operator="equal", + value=Date(year=2024, month=1, day=1), + mode="exact_date", + ), + DateNotEqualsViewFilterItemCreate( + field_id=date_field.id, + type="date", + operator="not_equal", + value=None, + mode="today", + ), + DateAfterViewFilterItemCreate( + field_id=date_field.id, + type="date", + operator="after", + value=7, + mode="nr_days_ago", + or_equal=False, + ), + DateBeforeViewFilterItemCreate( + field_id=date_field.id, + type="date", + operator="before", + value=None, + mode="tomorrow", + or_equal=True, + ), + # Select filters + SingleSelectIsAnyViewFilterItemCreate( + field_id=single_select.id, + type="single_select", + operator="is_any_of", + value=["Active"], + ), + SingleSelectIsNoneOfNotViewFilterItemCreate( + field_id=single_select.id, + type="single_select", + operator="is_none_of", + value=["Active"], + ), + MultipleSelectIsAnyViewFilterItemCreate( + field_id=multi_select.id, + type="multiple_select", + operator="is_any_of", + value=["Important"], + ), + MultipleSelectIsNoneOfNotViewFilterItemCreate( + field_id=multi_select.id, + type="multiple_select", + operator="is_none_of", + value=["Important"], + ), + # Link row filters + LinkRowHasViewFilterItemCreate( + field_id=link_field.id, type="link_row", operator="has", value=1 + ), + LinkRowHasNotViewFilterItemCreate( + field_id=link_field.id, type="link_row", operator="has_not", value=2 + ), + # Boolean filter + BooleanIsViewFilterItemCreate( + field_id=boolean_field.id, type="boolean", operator="is", value=True + ), + ] + + # Test that all filters can be created successfully + created_filters = [] + for filter_item in all_filters: + try: + created_filter = create_view_filter(user, view, table_fields, filter_item) + created_filters.append(created_filter) + assert created_filter is not None + assert created_filter.view.id == view.id + except Exception as e: + pytest.fail(f"Failed to create filter {filter_item}: {e}") + + # Verify all filters were created in the database + assert len(created_filters) == len(all_filters) + assert ViewFilter.objects.filter(view=view).count() == len(all_filters) + + # Verify each filter type is represented + filter_types = set(f.type for f in created_filters) + expected_types = { + "equal", + "not_equal", + "contains", + "contains_not", + "empty", + "not_empty", + "higher_than", + "lower_than", + "date_is", + "date_is_not", + "date_is_after", + "date_is_on_or_before", + "single_select_is_any_of", + "single_select_is_none_of", + "multiple_select_has", + "multiple_select_has_not", + "link_row_has", + "link_row_has_not", + "boolean", + } + assert filter_types == expected_types + + # CRITICAL CHECK: Ensure all concrete filter classes are tested + all_concrete_classes = get_all_concrete_filter_classes() + tested_classes = {type(filter_item) for filter_item in all_filters} + + missing_classes = set(all_concrete_classes) - tested_classes + if missing_classes: + missing_names = [cls.__name__ for cls in missing_classes] + pytest.fail( + f"The following filter classes are not tested: {', '.join(missing_names)}. " + f"Please add test instances for these classes to the all_filters list." + ) + + # Ensure we're not testing non-existent classes + extra_classes = tested_classes - set(all_concrete_classes) + if extra_classes: + extra_names = [cls.__name__ for cls in extra_classes] + pytest.fail( + f"The following classes in the test don't exist as concrete filter classes: " + f"{', '.join(extra_names)}. Please remove them from the test." + ) From 0a5a3564074f9ddc2425bd8eba96523d06c76a74 Mon Sep 17 00:00:00 2001 From: Bram Date: Fri, 14 Nov 2025 18:27:54 +0100 Subject: [PATCH 5/6] show specific Jira connection error (#4260) --- .../src/baserow_enterprise/data_sync/jira_issues_data_sync.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py b/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py index 769f4ebefc..0763e759a1 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py @@ -283,8 +283,8 @@ def _fetch_issues(self, instance, progress_builder: ChildProgressBuilder): start_at += max_results if data["total"] <= start_at: break - except (RequestException, UnacceptableAddressException, ConnectionError): - raise SyncError("Error fetching issues from Jira.") + except (RequestException, UnacceptableAddressException, ConnectionError) as e: + raise SyncError(f"Error connecting to Jira: {str(e)}") return issues From f97c21f17138c37f63e1dc4401ab48a77c7390af Mon Sep 17 00:00:00 2001 From: Davide Silvestri <75379892+silvestrid@users.noreply.github.com> Date: Fri, 14 Nov 2025 18:54:18 +0100 Subject: [PATCH 6/6] chore(AI Assistant): improve context awereness (#4261) --- .../baserow_enterprise/assistant/assistant.py | 61 ++++++++++++++----- .../assistant/tools/search_docs/tools.py | 2 +- .../assistant/test_assistant.py | 29 +++------ .../assistant/AssistantMessageActions.vue | 4 -- .../assistant/AssistantMessageList.vue | 13 +++- .../baserow_enterprise/locales/en.json | 8 ++- 6 files changed, 74 insertions(+), 43 deletions(-) diff --git a/enterprise/backend/src/baserow_enterprise/assistant/assistant.py b/enterprise/backend/src/baserow_enterprise/assistant/assistant.py index c5bcfa6cd0..15857e53a2 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/assistant.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/assistant.py @@ -122,9 +122,12 @@ class ChatSignature(udspy.Signature): __doc__ = f"{ASSISTANT_SYSTEM_PROMPT}\n TASK INSTRUCTIONS: \n" question: str = udspy.InputField() + context: str = udspy.InputField( + description="Context and facts extracted from the history to help answer the question." + ) ui_context: dict[str, Any] | None = udspy.InputField( default=None, - desc=( + description=( "The context the user is currently in. " "It contains information about the user, the workspace, open table, view, etc." "Whenever make sense, use it to ground your answer." @@ -133,6 +136,29 @@ class ChatSignature(udspy.Signature): answer: str = udspy.OutputField() +class QuestionContextSummarizationSignature(udspy.Signature): + """ + Extract relevant facts from conversation history that provide context for answering + the current question. Do not answer the question or modify it - only extract and + summarize the relevant historical facts that will help in decision-making. + """ + + question: str = udspy.InputField( + description="The current user question that needs context from history." + ) + previous_messages: list[str] = udspy.InputField( + description="Conversation history as alternating user/assistant messages." + ) + facts: str = udspy.OutputField( + description=( + "Relevant facts extracted from the conversation history as a concise " + "paragraph. Include only information that provides necessary context for " + "answering the question. Do not answer the question itself, do not modify " + "the question, and do not include irrelevant details." + ) + ) + + def get_assistant_cancellation_key(chat_uuid: str) -> str: """ Get the Redis cache key for cancellation tracking. @@ -176,7 +202,7 @@ def _init_assistant(self): ) self.callbacks = AssistantCallbacks(self.tool_helpers) self._assistant = udspy.ReAct(ChatSignature, tools=tools, max_iters=20) - self.history = None + self.history: list[str] = [] async def acreate_chat_message( self, @@ -265,7 +291,6 @@ async def aload_chat_history(self, limit=30): msg async for msg in self._chat.messages.order_by("-created_on")[:limit] ] - self.history = udspy.History() while len(last_saved_messages) >= 2: # Pop the oldest message pair to respect chronological order. first_message = last_saved_messages.pop() @@ -276,9 +301,9 @@ async def aload_chat_history(self, limit=30): ): continue - self.history.add_user_message(first_message.content) + self.history.append(f"Human: {first_message.content}") ai_answer = last_saved_messages.pop() - self.history.add_assistant_message(ai_answer.content) + self.history.append(f"AI: {ai_answer.content}") @lru_cache(maxsize=1) def check_llm_ready_or_raise(self): @@ -376,17 +401,24 @@ def _check_cancellation(self, cache_key: str, message_id: str) -> None: cache.delete(cache_key) raise AssistantMessageCancelled(message_id=message_id) - async def _enhance_question_with_history(self, question: str) -> str: - """Enhance the user question with chat history context if available.""" + async def _summarize_context_from_history(self, question: str) -> str: + """ + Extract relevant facts from chat history to provide context for the question or + return an empty string if there is no history. + + :param question: The current user question that needs context from history. + :return: A string containing relevant facts from the conversation history. + """ - if not self.history.messages: - return question + if not self.history: + return "" - predictor = udspy.Predict("question, context -> enhanced_question") + predictor = udspy.Predict(QuestionContextSummarizationSignature) result = await predictor.aforward( - question=question, context=self.history.messages + question=question, + previous_messages=self.history, ) - return result.enhanced_question + return result.facts async def _process_stream_event( self, @@ -458,12 +490,13 @@ async def astream_messages( if self.history is None: await self.aload_chat_history() - user_question = await self._enhance_question_with_history( + context_from_history = await self._summarize_context_from_history( human_message.content ) output_stream = self._assistant.astream( - question=user_question, + question=human_message.content, + context=context_from_history, ui_context=human_message.ui_context.model_dump_json(exclude_none=True), ) diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/search_docs/tools.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/search_docs/tools.py index f87b57a81e..619902489e 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/search_docs/tools.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/search_docs/tools.py @@ -43,7 +43,7 @@ def __init__(self): self.rag = udspy.ChainOfThought(SearchDocsSignature) def forward(self, question: str, *args, **kwargs): - context = KnowledgeBaseHandler().search(question, num_results=10) + context = KnowledgeBaseHandler().search(question, num_results=7) return self.rag(context=context, question=question) diff --git a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py index 617d9df68c..55eec28adc 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py @@ -207,25 +207,18 @@ def test_aload_chat_history_formats_as_question_answer_pairs( # History should contain user/assistant message pairs assert assistant.history is not None - assert len(assistant.history.messages) == 4 + assert len(assistant.history) == 4 # First pair - assert assistant.history.messages[0]["content"] == "What is Baserow?" - assert assistant.history.messages[0]["role"] == "user" - assert ( - assistant.history.messages[1]["content"] - == "Baserow is a no-code database platform." - ) - assert assistant.history.messages[1]["role"] == "assistant" + assert assistant.history[0] == "Human: What is Baserow?" + assert assistant.history[1] == "AI: Baserow is a no-code database platform." # Second pair - assert assistant.history.messages[2]["content"] == "How do I create a table?" - assert assistant.history.messages[2]["role"] == "user" + assert assistant.history[2] == "Human: How do I create a table?" assert ( - assistant.history.messages[3]["content"] - == "You can create a table by clicking the + button." + assistant.history[3] + == "AI: You can create a table by clicking the + button." ) - assert assistant.history.messages[3]["role"] == "assistant" def test_aload_chat_history_respects_limit(self, enterprise_data_fixture): """Test that history loading respects the limit parameter""" @@ -251,7 +244,7 @@ def test_aload_chat_history_respects_limit(self, enterprise_data_fixture): async_to_sync(assistant.aload_chat_history)(limit=6) # Last 6 messages # Should only load the most recent 6 messages (3 pairs) - assert len(assistant.history.messages) == 6 + assert len(assistant.history) == 6 def test_aload_chat_history_handles_incomplete_pairs(self, enterprise_data_fixture): """ @@ -281,11 +274,9 @@ def test_aload_chat_history_handles_incomplete_pairs(self, enterprise_data_fixtu async_to_sync(assistant.aload_chat_history)() # Should only include the complete pair (2 messages: user + assistant) - assert len(assistant.history.messages) == 2 - assert assistant.history.messages[0]["content"] == "Question 1" - assert assistant.history.messages[0]["role"] == "user" - assert assistant.history.messages[1]["content"] == "Answer 1" - assert assistant.history.messages[1]["role"] == "assistant" + assert len(assistant.history) == 2 + assert assistant.history[0] == "Human: Question 1" + assert assistant.history[1] == "AI: Answer 1" @pytest.mark.django_db diff --git a/enterprise/web-frontend/modules/baserow_enterprise/components/assistant/AssistantMessageActions.vue b/enterprise/web-frontend/modules/baserow_enterprise/components/assistant/AssistantMessageActions.vue index 7204c09813..434e78c78b 100644 --- a/enterprise/web-frontend/modules/baserow_enterprise/components/assistant/AssistantMessageActions.vue +++ b/enterprise/web-frontend/modules/baserow_enterprise/components/assistant/AssistantMessageActions.vue @@ -39,10 +39,6 @@ -
- {{ $t('assistantMessageActions.disclaimer') }} -
-
+
+ {{ $t('assistantMessageList.disclaimer') }} +
@@ -129,6 +134,10 @@ export default { !this.expandedSources[messageId] ) }, + + isLastMessage(index) { + return index === this.messages.length - 1 + }, }, } diff --git a/enterprise/web-frontend/modules/baserow_enterprise/locales/en.json b/enterprise/web-frontend/modules/baserow_enterprise/locales/en.json index 94ff54fa87..88aa9144b7 100644 --- a/enterprise/web-frontend/modules/baserow_enterprise/locales/en.json +++ b/enterprise/web-frontend/modules/baserow_enterprise/locales/en.json @@ -368,13 +368,15 @@ "assistantMessageSources": { "sources": "{count} source | {count} sources" }, + "assistantMessageList": { + "disclaimer": "Kuma can make mistakes, please double-check responses" + }, "assistantMessageActions": { "feedbackContextTitle": "Help us improve", "feedbackContextPlaceholder": "What could we improve? (optional)", "copiedToClipboard": "Copied to clipboard", "copiedContentToast": "The Assistant's response content has been copied to your clipboard", - "copyFailed": "Failed to copy to clipboard", - "disclaimer": "Kuma can make mistakes, please double-check responses" + "copyFailed": "Failed to copy to clipboard" }, "chatwootSupportSidebarWorkspace": { "directSupport": "Direct support" @@ -683,7 +685,7 @@ "fieldInvalidTitle": "Date dependency field error" }, "dateDependency": { - "invalidChildRow": "Successor row is invalid", + "invalidChildRow": "Successor row is invalid", "invalidParentRow": "Predecessor row is invalid", "invalidParentEndDateAfterChildStartDate": "Predecessor row end date is after successor start date", "invalidStartDateEmpty": "Start date is empty",