From 9c26f9b460f33f84ed72e37e9b751da4e983cbb5 Mon Sep 17 00:00:00 2001 From: bramjanssen Date: Thu, 30 Oct 2025 10:44:48 +0100 Subject: [PATCH 1/4] feat: added performance testing --- .gitignore | 2 +- docker-compose.perf.yml | 60 ++++++++++++++++++++++++++++++++ env.example | 23 ++++++------- requirements.txt | 2 ++ tests/performance/auth.py | 23 +++++++++++++ tests/performance/locustfile.py | 61 +++++++++++++++++++++++++++++++++ tests/performance/utils.py | 15 ++++++++ 7 files changed, 173 insertions(+), 13 deletions(-) create mode 100644 docker-compose.perf.yml create mode 100644 tests/performance/auth.py create mode 100644 tests/performance/locustfile.py create mode 100644 tests/performance/utils.py diff --git a/.gitignore b/.gitignore index 7a2f10b..d8ac486 100644 --- a/.gitignore +++ b/.gitignore @@ -128,7 +128,7 @@ celerybeat.pid *.sage.py # Environments -.env +.env* .venv env/ venv/ diff --git a/docker-compose.perf.yml b/docker-compose.perf.yml new file mode 100644 index 0000000..5171590 --- /dev/null +++ b/docker-compose.perf.yml @@ -0,0 +1,60 @@ +version: "3.9" + +volumes: + db-data-perf: + +services: + db: + image: postgres:15 + restart: unless-stopped + environment: + POSTGRES_USER: perf_user + POSTGRES_PASSWORD: perf_pass + POSTGRES_DB: perf_db + ports: + - "5432:5432" + volumes: + - db-data-perf:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U perf_user -d perf_db"] + interval: 5s + timeout: 5s + retries: 10 + + app: + build: + context: . + dockerfile: dockerfile + image: apex-dispatch-api:perf + restart: unless-stopped + environment: + APP_ENV: development + KEYCLOAK_HOST: ${KEYCLOAK_HOST} + KEYCLOAK_REALM: ${KEYCLOAK_REALM} + KEYCLOAK_CLIENT_ID: ${KEYCLOAK_CLIENT_ID} + KEYCLOAK_CLIENT_SECRET: ${KEYCLOAK_CLIENT_SECRET} + DATABASE_URL: postgresql+psycopg2://perf_user:perf_pass@db:5432/perf_db + OPENEO_BACKENDS: ${OPENEO_BACKENDS_PERFOMANCE} + depends_on: + db: + condition: service_healthy + ports: + - "${APP_PORT:-8000}:${APP_PORT:-8000}" + healthcheck: + # adjust path if your health endpoint differs + test: ["CMD-SHELL", "curl -f http://localhost:${APP_PORT:-8000}/health || exit 1"] + interval: 5s + timeout: 3s + retries: 12 + + migrate: + image: apex-dispatch-api:perf + environment: + DATABASE_URL: postgresql+psycopg2://perf_user:perf_pass@db:5432/perf_db + depends_on: + db: + condition: service_healthy + # run the migration only after the app is reachable, then exit + entrypoint: ["/bin/sh", "-c"] + command: > + "alembic upgrade head" \ No newline at end of file diff --git a/env.example b/env.example index efdf77f..fa52b5a 100644 --- a/env.example +++ b/env.example @@ -1,20 +1,19 @@ -# Keycloak -KEYCLOAK_HOST= -KEYCLOAK_REALM= - # App -APP_NAME="APEx Dispatch API" -APP_DESCRIPTION="APEx Dispatch Service API to run jobs and upscaling tasks" -APP_HOST=0.0.0.0 -APP_PORT=8000 -APP_ENV=development +APP_NAME= +APP_DESCRIPTION= +APP_ENV= # CORS -CORS_ALLOWED_ORIGINS=http://localhost:5173 +CORS_ALLOWED_ORIGINS= +# Keycloak +KEYCLOAK_HOST= +KEYCLOAK_REALM= +KEYCLOAK_CLIENT_ID= +KEYCLOAK_CLIENT_SECRET= # Database DATABASE_URL= -# AUTH -OPENEO_AUTH_CLIENT_CREDENTIALS_CDSEFED= \ No newline at end of file +# OPENEO +OPENEO_BACKENDS= \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 734f563..5668b08 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,10 @@ +alembic cryptography fastapi flake8 geojson_pydantic httpx +locust loguru mkdocs mkdocs-jupyter diff --git a/tests/performance/auth.py b/tests/performance/auth.py new file mode 100644 index 0000000..9a48015 --- /dev/null +++ b/tests/performance/auth.py @@ -0,0 +1,23 @@ +import os + +from dotenv import load_dotenv +import requests + +load_dotenv() + +URL = os.getenv("KEYCLOAK_HOST") +REALM = os.getenv("KEYCLOAK_REALM") +CLIENT_ID = os.getenv("KEYCLOAK_CLIENT_PERFORMANCE_ID") +CLIENT_SECRET = os.getenv("KEYCLOAK_CLIENT_PERFORMANCE_SECRET") + + +def get_token_client_credentials(): + url = f"https://{URL}/realms/{REALM}/protocol/openid-connect/token" + data = { + "grant_type": "client_credentials", + "client_id": CLIENT_ID, + "client_secret": CLIENT_SECRET, + } + r = requests.post(url, data=data, timeout=10) + r.raise_for_status() + return r.json()["access_token"] diff --git a/tests/performance/locustfile.py b/tests/performance/locustfile.py new file mode 100644 index 0000000..dd5bc40 --- /dev/null +++ b/tests/performance/locustfile.py @@ -0,0 +1,61 @@ +""" +Run locally: + pip install -r requirements.txt + locust -f locust.py --headless -u 50 -r 5 --run-time 2m --host http://localhost:8000 +""" + +import logging +from locust import HttpUser, task, between +from locust.exception import StopUser +from tests.performance.auth import get_token_client_credentials +from tests.performance.utils import random_temporal_extent + + +class DispatchUser(HttpUser): + + logger = logging.getLogger("user") + wait_time = between(1.0, 3.0) # user "think" time + + def on_start(self): + token = get_token_client_credentials() + self.headers = { + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + } + + # simple health check at start; stop user if service unreachable + with self.client.get("/health", name="health", catch_response=True) as r: + if r.status_code != 200: + r.failure(f"Health check failed: {r.status_code}") + # stop this virtual user if service down + raise StopUser() + + @task + def execute_statistics(self): + extent = random_temporal_extent(2024) + payload = { + "title": "Test Processing Job", + "label": "openeo", + "service": { + "endpoint": "https://openeo.vito.be", + "application": "https://openeo.vito.be/openeo/1.2/processes/u:ff5c137fbbbf409d14a99875b97109874058056a9a02193e6fde8217d2f1f3e8@egi.eu/timeseries_graph", + }, + "format": "json", + "parameters": { + "spatial_extent": { + "type": "Point", + "coordinates": [5.196363779293476, 51.25007554845948], + }, + "collection": "CGLS_NDVI300_V2_GLOBAL", + "band": "NDVI", + "temporal_extent": extent, + }, + } + self.logger.info(f"Requesting statistics for extent: {extent}") + self.client.post( + "/sync_jobs", + json=payload, + name="statistics", + timeout=30, + headers=self.headers, + ) diff --git a/tests/performance/utils.py b/tests/performance/utils.py new file mode 100644 index 0000000..e8ebe57 --- /dev/null +++ b/tests/performance/utils.py @@ -0,0 +1,15 @@ +from datetime import datetime, timedelta +import random + + +def random_temporal_extent(start_year): + # Random start date in 2024 + start_date = datetime.strptime(f"{start_year}-01-01", "%Y-%m-%d") + timedelta( + days=random.randint(0, 364) + ) + + duration_days = random.randint(30, 365) + + end_date = start_date + timedelta(days=duration_days) + + return [start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d")] From 19eb99ba47e4208b0fbae0a6b88cba0d03a68bdf Mon Sep 17 00:00:00 2001 From: bramjanssen Date: Thu, 30 Oct 2025 11:01:09 +0100 Subject: [PATCH 2/4] feat: allowed custom specification of workers in Dockerfile --- dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dockerfile b/dockerfile index 18d52b5..fa14676 100644 --- a/dockerfile +++ b/dockerfile @@ -16,4 +16,6 @@ COPY . . EXPOSE 8000 -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--proxy-headers"] +ENV WORKERS=3 + +CMD ["sh", "-c", "uvicorn app.main:app --host 0.0.0.0 --port 8000 --proxy-headers --workers ${WORKERS}"] From 1ac623be9bb70ccd53e7767d2e46ee5339191468 Mon Sep 17 00:00:00 2001 From: bramjanssen Date: Thu, 30 Oct 2025 11:15:24 +0100 Subject: [PATCH 3/4] doc: added docs for performance testing --- docs/performance.md | 80 +++++++++++++++++++++++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 81 insertions(+) create mode 100644 docs/performance.md diff --git a/docs/performance.md b/docs/performance.md new file mode 100644 index 0000000..2230811 --- /dev/null +++ b/docs/performance.md @@ -0,0 +1,80 @@ +# Executing Performance Tests + +This repository includes tools to execute performance tests for the **APEx Dispatch API**. +Performance testing is useful for analyzing the impact of code changes, database updates, and platform modifications on the system's behavior and responsiveness. + +## Prerequisites + +Before running the performance tests, ensure the following prerequisites are met: + +* **Python environment** (Python 3.10+ recommended) +* **Docker** and **Docker Compose** installed on your system + +## Setting Up the Environment + +Performance tests require both the API and a database to be running locally. Follow these steps to set up your environment: + +1. **Create a `.env` file** in the root of the repository with the following variables: + + * `OPENEO_BACKENDS_PERFORMANCE` → Configuration for the openEO backend authentication. See [configuration guide](./configuration.md#openeo-backend-configuration). + * `KEYCLOAK_CLIENT_PERFORMANCE_ID` → Client ID used for executing performance tests. + * `KEYCLOAK_CLIENT_PERFORMANCE_SECRET` → Client secret used for executing performance tests. + +2. **Start the services using Docker Compose**: + + ```bash + docker compose -f docker-compose.perf.yml up -d db + ``` + + Starts a local database instance. + + ```bash + docker compose -f docker-compose.perf.yml up -d migrate + ``` + + Executes database migrations to ensure all required tables are created. + + ```bash + docker compose -f docker-compose.perf.yml up -d app + ``` + + Starts the API locally. + +> **Tip:** You can check the logs of each service with `docker compose -f docker-compose.perf.yml logs -f `. + +## Executing Performance Tests + +The performance tests are implemented using **[Locust](https://locust.io/)**. Test scenarios are located in `tests/performance/locustfile.py`. + +### Running Tests with a Web Dashboard + +To execute the performance tests and monitor them in a browser dashboard: + +```bash +locust -f tests/performance/locustfile.py -u 10 --host http://localhost:8000 --run-time 1m +``` + +* `-u 10` → Number of simulated concurrent users +* `--host http://localhost:8000` → URL of the API to test +* `--run-time 1m` → Duration of the test + +After starting, open your browser at [http://localhost:8089](http://localhost:8089) to monitor real-time performance metrics, including response times, failure rates, and throughput. + +### Running Tests in Headless Mode + +To execute tests without a web interface (useful for CI/CD pipelines): + +```bash +locust -f tests/performance/locustfile.py -u 10 --host http://localhost:8000 --run-time 1m --headless +``` + +You can also export the results to a CSV file for further analysis: + +```bash +locust -f tests/performance/locustfile.py -u 10 --host http://localhost:8000 --run-time 1m --headless --csv=perf_test_results +``` + +### Recommended Practices + +* Start with a small number of users to validate test scripts before scaling up. +* Combine performance testing with monitoring tools to detect resource bottlenecks. diff --git a/mkdocs.yml b/mkdocs.yml index 068d2f7..1169b10 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -10,6 +10,7 @@ nav: - Home: index.md - Getting Started: getting_started.md - Configuration: configuration.md + - Performance Testing: performance.md - Contributing: contributing.md - Architecture: architecture.md From c7eb80c71e8a287ee0ff6ad620b7afd7dde575ba Mon Sep 17 00:00:00 2001 From: bramjanssen Date: Thu, 30 Oct 2025 11:18:22 +0100 Subject: [PATCH 4/4] chore: fixed linting issues --- tests/performance/locustfile.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/performance/locustfile.py b/tests/performance/locustfile.py index dd5bc40..029130c 100644 --- a/tests/performance/locustfile.py +++ b/tests/performance/locustfile.py @@ -38,7 +38,9 @@ def execute_statistics(self): "label": "openeo", "service": { "endpoint": "https://openeo.vito.be", - "application": "https://openeo.vito.be/openeo/1.2/processes/u:ff5c137fbbbf409d14a99875b97109874058056a9a02193e6fde8217d2f1f3e8@egi.eu/timeseries_graph", + "application": "https://openeo.vito.be/openeo/1.2/processes/" + + "u:ff5c137fbbbf409d14a99875b97109874058056a9a02193e6fde8217d2f1f3e8@egi.eu/" + + "timeseries_graph", }, "format": "json", "parameters": {