diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 000000000..027657ffe
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,28 @@
+.git
+.gitignore
+.gitattributes
+
+*.pyc
+.DS_Store
+Thumbs.db
+.idea/
+.vscode/
+__pycache__/
+.venv/
+env/
+venv/
+.uv-cache
+.env
+.env.*
+
+node_modules/
+WareHouse/
+data/
+temp/
+logs
+.aider*
+
+/frontend
+README*
+compose.yml
+Dockerfile
diff --git a/.env b/.env
deleted file mode 100755
index b18fc6bfb..000000000
--- a/.env
+++ /dev/null
@@ -1,4 +0,0 @@
-BASE_URL=
-API_KEY=
-SERPER_DEV_API_KEY=
-JINA_API_KEY=
\ No newline at end of file
diff --git a/.env.docker b/.env.docker
new file mode 100644
index 000000000..104aed310
--- /dev/null
+++ b/.env.docker
@@ -0,0 +1,10 @@
+BACKEND_BIND=0.0.0.0
+
+FRONTEND_HOST=0.0.0.0
+FRONTEND_PORT=5173
+
+# Frontend points to the backend service name and exposed port
+VITE_API_BASE_URL=http://backend:6400
+
+# Explicit CORS origins for Docker-based dev (comma-separated)
+CORS_ALLOW_ORIGINS=http://localhost:5173,http://127.0.0.1:5173
diff --git a/.env.example b/.env.example
new file mode 100644
index 000000000..088d3f63f
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,24 @@
+# ============================================================================
+# LLM Provider Configuration
+# ============================================================================
+# BASE_URL and API_KEY are the standard configurations for model call authentication.
+# These variables support OpenAI, Gemini, LM Studio, Ollama, and other providers.
+
+BASE_URL=https://api.openai.com/v1
+API_KEY=sk-your-openai-api-key-here
+
+# Example BASE_URL values:
+# - OpenAI: https://api.openai.com/v1
+# - Gemini: https://generativelanguage.googleapis.com/v1beta/openai/
+# - LM Studio: http://localhost:1234/v1
+# - Ollama: http://localhost:11434/v1
+
+# ============================================================================
+# Optional: Web Search and Reading Tools
+# ============================================================================
+
+# SERPER_DEV_API_KEY=your-serper-api-key-here
+# Get from: https://serper.dev
+
+# JINA_API_KEY=your-jina-api-key-here
+# Get from: https://jina.ai
\ No newline at end of file
diff --git a/.github/workflows/validate-yamls.yml b/.github/workflows/validate-yamls.yml
new file mode 100644
index 000000000..87c103862
--- /dev/null
+++ b/.github/workflows/validate-yamls.yml
@@ -0,0 +1,70 @@
+name: Validate YAML Workflows
+
+on:
+ pull_request:
+ paths:
+ - 'yaml_instance/**/*.yaml'
+ - '.github/workflows/**/*.yml'
+ - 'tools/validate_all_yamls.py'
+ - 'check/**/*.py'
+
+ push:
+ branches:
+ - main
+ paths:
+ - 'yaml_instance/**/*.yaml'
+ - '.github/workflows/**/*.yml'
+ - 'tools/validate_all_yamls.py'
+ - 'check/**/*.py'
+
+ workflow_dispatch:
+
+jobs:
+ validate:
+ name: Validate YAML Configuration Files
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.12'
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v4
+ with:
+ enable-cache: true
+
+ - name: Install system dependencies for pycairo
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y libcairo2-dev pkg-config
+
+ - name: Cache uv dependencies
+ uses: actions/cache@v4
+ with:
+ path: |
+ ~/.cache/uv
+ .venv
+ key: ${{ runner.os }}-uv-${{ hashFiles('uv.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-uv-
+
+ - name: Install dependencies
+ run: uv sync
+
+ - name: Run YAML validation
+ run: uv run python tools/validate_all_yamls.py
+
+ - name: Report validation results
+ if: always()
+ run: |
+ if [ $? -eq 0 ]; then
+ echo "All YAML workflow files passed validation"
+ else
+ echo "YAML validation failed - check the logs above for details"
+ exit 1
+ fi
diff --git a/.gitignore b/.gitignore
index 64b57f840..ca4ef755b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,18 +1,29 @@
-*.pyc
-.DS_Store
-.idea
-.vscode
+# Python
__pycache__/
-.env/
+*.pyc
+
+# Virtual environments
.venv/
-env/
venv/
-.idea
-.venv
-.uv-cache
-logs
-node_modules
-frontend/.vscode
-WareHouse/
+env/
+
+# uv
+.uv-cache/
+
+# IDEs
+.idea/
+.vscode/
+frontend/.vscode/
+
+# OS
+.DS_Store
+
+# Environment
+.env
+
+# Project Specific
+logs/
+node_modules/
data/
-temp/
\ No newline at end of file
+temp/
+WareHouse/
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 000000000..89fe5dd35
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,63 @@
+# ---- Builder: install deps with compilers and uv ----
+FROM python:3.12-slim AS builder
+ARG DEBIAN_FRONTEND=noninteractive
+
+WORKDIR /app
+
+# System deps required to build Python packages
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ pkg-config \
+ build-essential \
+ python3-dev \
+ libcairo2-dev \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install uv just for dependency resolution/install
+RUN pip install --no-cache-dir uv
+
+# Install the project virtualenv outside /app so bind-mounts don't hide it
+ENV UV_PROJECT_ENVIRONMENT=/opt/venv
+
+# Copy dependency files first to maximize cache
+COPY pyproject.toml ./
+# Include lockfile for reproducible builds
+COPY uv.lock ./
+
+# Create the project virtualenv and install deps
+RUN uv sync --no-cache --frozen
+
+# ---- Runtime: minimal image with only runtime libs + app ----
+FROM python:3.12-slim AS runtime
+ARG DEBIAN_FRONTEND=noninteractive
+ARG BACKEND_BIND=0.0.0.0
+
+WORKDIR /app
+
+# Install only runtime system libraries (no compilers)
+# Keep libcairo if your deps need it; remove if unnecessary
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ libcairo2 \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy the prebuilt virtualenv from the builder
+COPY --from=builder /opt/venv /opt/venv
+
+# Copy the rest of the application code
+COPY . .
+
+# Use the venv Python by default and keep Python output unbuffered.
+# Bake default bind/port into the image; can be overridden at runtime.
+ENV PATH="/opt/venv/bin:${PATH}" \
+ PYTHONDONTWRITEBYTECODE=1 \
+ PYTHONUNBUFFERED=1 \
+ BACKEND_BIND=${BACKEND_BIND}
+
+# Drop privileges
+RUN useradd -m appuser && chown -R appuser:appuser /app
+USER appuser
+
+# EXPOSE is informational; compose controls published ports
+EXPOSE 6400
+
+# Command to run the backend server, parameterized by env
+CMD ["sh", "-c", "python server_main.py --port 6400 --host ${BACKEND_BIND:-0.0.0.0}"]
diff --git a/Makefile b/Makefile
new file mode 100644
index 000000000..73c7e953a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,42 @@
+# ==============================================================================
+# Development Commands
+# ==============================================================================
+
+.PHONY: dev
+dev: server client ## Run both backend and frontend development servers
+
+.PHONY: server
+server: ## Start the backend server in the background
+ @echo "Starting server in background..."
+ @uv run python server_main.py --port 6400 --reload &
+
+.PHONY: client
+client: ## Start the frontend development server
+ @cd frontend && VITE_API_BASE_URL=http://localhost:6400 npm run dev
+
+.PHONY: stop
+stop: ## Stop backend and frontend servers
+ @echo "Stopping backend server (port 6400)..."
+ @lsof -t -i:6400 | xargs kill -9 2>/dev/null || echo "Backend server not found on port 6400."
+ @echo "Stopping frontend server (port 5173)..."
+ @lsof -t -i:5173 | xargs kill -9 2>/dev/null || echo "Frontend server not found on port 5173."
+
+# ==============================================================================
+# Tools & Maintenance
+# ==============================================================================
+
+.PHONY: sync
+sync: ## Sync Vue graphs to the server database
+ @uv run python tools/sync_vuegraphs.py
+
+.PHONY: validate-yamls
+validate-yamls: ## Validate all YAML configuration files
+ @uv run python tools/validate_all_yamls.py
+
+# ==============================================================================
+# Help
+# ==============================================================================
+
+.PHONY: help
+help: ## Display this help message
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
diff --git a/README-zh.md b/README-zh.md
index 8f2d53cf3..db73c288d 100755
--- a/README-zh.md
+++ b/README-zh.md
@@ -118,7 +118,18 @@ ChatDev 已从一个专门的软件开发多智能体系统演变为一个全面
cd frontend && npm install
```
-### ⚡️ 运行应用
+### ⚡️ 运行应用(本地)
+
+#### 使用 Makefile(推荐)
+
+**同时启动后端与前端**:
+```bash
+make dev
+```
+
+> 然后访问 Web 控制台:**[http://localhost:5173](http://localhost:5173)**。
+
+#### 手动命令
1. **启动后端**:
```bash
@@ -140,6 +151,44 @@ ChatDev 已从一个专门的软件开发多智能体系统演变为一个全面
> * **后端**:启动时指定 `--port 6401`
> * **前端**:设置 `VITE_API_BASE_URL=http://localhost:6401`
+#### 常用命令
+
+* **帮助命令**:
+ ```bash
+ make help
+ ```
+
+* **同步 YAML 工作流到前端**:
+ ```bash
+ make sync
+ ```
+ 将 `yaml_instance/` 中的所有工作流文件上传到数据库。
+
+* **校验所有 YAML 工作流**:
+ ```bash
+ make validate-yamls
+ ```
+ 检查所有 YAML 文件的语法与 schema 错误。
+
+
+### 🐳 使用 Docker 运行
+你也可以通过 Docker Compose 运行整个应用。该方式可简化依赖管理,并提供一致的运行环境。
+
+1. **前置条件**:
+ * 已安装 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/)。
+ * 请确保在项目根目录中存在用于配置 API Key 的 `.env` 文件。
+
+2. **构建并运行**:
+ ```bash
+ # 在项目根目录执行
+ docker compose up --build
+ ```
+
+3. **访问地址**:
+ * **后端**:`http://localhost:6400`
+ * **前端**:`http://localhost:5173`
+
+> 服务在异常退出后会自动重启,本地文件的修改会同步映射到容器中,便于实时开发。
### 🔑 配置
@@ -252,6 +301,7 @@ if result.final_message:
 shiowen |
 kilo2127 |
 AckerlyLau |
+  LaansDole |
## 🤝 致谢
diff --git a/README.md b/README.md
index e4328cc1b..703c2966b 100644
--- a/README.md
+++ b/README.md
@@ -120,9 +120,29 @@ See our paper in [Multi-Agent Collaboration via Evolving Orchestration](https://
cd frontend && npm install
```
+### 🔑 Configuration
+
+* **Environment Variables**:
+ ```bash
+ cp .env.example .env
+ ```
+* **Model Keys**: Set `API_KEY` and `BASE_URL` in `.env` for your LLM provider.
+* **YAML placeholders**: Use `${VAR}`(e.g., `${API_KEY}`)in configuration files to reference these variables.
+
### ⚡️ Run the Application
-1. **Start Backend** :
+#### Using Makefile (Recommended)
+
+**Start both Backend and Frontent**:
+```bash
+make dev
+```
+
+> Then access the Web Console at **[http://localhost:5173](http://localhost:5173)**.
+
+#### Manual Commands
+
+1. **Start Backend**:
```bash
# Run from the project root
uv run python server_main.py --port 6400 --reload
@@ -143,12 +163,62 @@ See our paper in [Multi-Agent Collaboration via Evolving Orchestration](https://
> * **Backend**: start with `--port 6401`
> * **Frontend**: set `VITE_API_BASE_URL=http://localhost:6401`
+### 🐳 Run with Docker
+Alternatively, you can run the entire application using Docker Compose. This method simplifies dependency management and provides a consistent environment.
-### 🔑 Configuration
+1. **Prerequisites**:
+ * [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) installed.
+ * Ensure you have a `.env` file in the project root for your API keys.
-* **Environment Variables**: Create a `.env` file in the project root.
-* **Model Keys**: Set `API_KEY` and `BASE_URL` in `.env` for your LLM provider.
-* **YAML placeholders**: Use `${VAR}`(e.g., `${API_KEY}`)in configuration files to reference these variables.
+2. **Build and Run**:
+ ```bash
+ # From the project root
+ docker compose up --build
+ ```
+
+3. **Access**:
+ * **Backend**: `http://localhost:6400`
+ * **Frontend**: `http://localhost:5173`
+
+> The services will automatically restart if they crash, and local file changes will be reflected inside the containers for live development.
+
+#### Utility Commands
+
+* **Help command**:
+ ```bash
+ make help
+ ```
+
+* **Sync YAML workflows to frontend**:
+ ```bash
+ make sync
+ ```
+ Uploads all workflow files from `yaml_instance/` to the database.
+
+* **Validate all YAML workflows**:
+ ```bash
+ make validate-yamls
+ ```
+ Checks all YAML files for syntax and schema errors.
+
+### 🐳 Run with Docker
+Alternatively, you can run the entire application using Docker Compose. This method simplifies dependency management and provides a consistent environment.
+
+1. **Prerequisites**:
+ * [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) installed.
+ * Ensure you have a `.env` file in the project root for your API keys.
+
+2. **Build and Run**:
+ ```bash
+ # From the project root
+ docker compose up --build
+ ```
+
+3. **Access**:
+ * **Backend**: `http://localhost:6400`
+ * **Frontend**: `http://localhost:5173`
+
+> The services will automatically restart if they crash, and local file changes will be reflected inside the containers for live development.
---
@@ -255,6 +325,7 @@ By contributing to DevAll, you'll be recognized in our **Contributors** list bel
 shiowen |
 kilo2127 |
 AckerlyLau |
+  LaansDole |
## 🤝 Acknowledgments
diff --git a/check/check.py b/check/check.py
index 39ba04079..12dcd6aa6 100755
--- a/check/check.py
+++ b/check/check.py
@@ -118,4 +118,4 @@ def check_config(yaml_content: Any) -> str:
except Exception as e:
return str(e)
- return ""
+ return ""
\ No newline at end of file
diff --git a/compose.yml b/compose.yml
new file mode 100644
index 000000000..d24901166
--- /dev/null
+++ b/compose.yml
@@ -0,0 +1,33 @@
+services:
+ backend:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ target: runtime
+ container_name: chatdev_backend
+ volumes:
+ - .:/app
+ ports:
+ - "6400:6400"
+ env_file:
+ - .env
+ - .env.docker
+ restart: unless-stopped
+
+ frontend:
+ build:
+ context: ./frontend
+ dockerfile: Dockerfile
+ target: dev
+ container_name: chatdev_frontend
+ volumes:
+ - ./frontend:/app
+ - /app/node_modules
+ ports:
+ - "${FRONTEND_PORT:-5173}:5173"
+ env_file:
+ - .env
+ - .env.docker
+ depends_on:
+ - backend
+ restart: unless-stopped
diff --git a/frontend/.dockerignore b/frontend/.dockerignore
new file mode 100644
index 000000000..8a7c39a8a
--- /dev/null
+++ b/frontend/.dockerignore
@@ -0,0 +1,25 @@
+.git
+.gitignore
+.gitattributes
+
+*.pyc
+.DS_Store
+Thumbs.db
+.idea/
+.vscode/
+__pycache__/
+.venv/
+env/
+venv/
+.uv-cache
+.env
+.env.*
+
+node_modules/
+temp/
+logs
+.aider*
+
+README*
+compose.yml
+Dockerfile
diff --git a/frontend/.gitignore b/frontend/.gitignore
index a547bf36d..a0cc46da9 100755
--- a/frontend/.gitignore
+++ b/frontend/.gitignore
@@ -7,7 +7,7 @@ yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
-node_modules
+node_modules/
dist
dist-ssr
*.local
diff --git a/frontend/Dockerfile b/frontend/Dockerfile
new file mode 100644
index 000000000..c0d4804cb
--- /dev/null
+++ b/frontend/Dockerfile
@@ -0,0 +1,15 @@
+# ---- Dependencies: install node_modules once (cached) ----
+FROM node:24-alpine AS deps
+WORKDIR /app
+COPY package*.json ./
+# Prefer reproducible installs; fall back if no lockfile
+RUN npm ci --no-audit --no-fund || npm install --no-audit --no-fund
+
+# ---- Dev runtime: hot-reload server ----
+FROM node:24-alpine AS dev
+WORKDIR /app
+ENV NODE_ENV=development
+COPY --from=deps /app/node_modules /app/node_modules
+COPY . .
+EXPOSE 5173
+CMD ["npm", "run", "dev", "--", "--host"]
diff --git a/frontend/src/pages/BatchRunView.vue b/frontend/src/pages/BatchRunView.vue
index 128ccbdff..209b282b8 100644
--- a/frontend/src/pages/BatchRunView.vue
+++ b/frontend/src/pages/BatchRunView.vue
@@ -969,10 +969,24 @@ const establishWebSocketConnection = () => {
return
}
- const baseUrl = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8000'
- const wsProtocol = baseUrl.startsWith('https') ? 'wss:' : 'ws:'
- const urlObj = new URL(baseUrl)
- const wsUrl = `${wsProtocol}//${urlObj.host}/ws`
+ const apiBase = import.meta.env.VITE_API_BASE_URL || ''
+ // Defaults: same-origin (works with Vite dev proxy)
+ const defaultScheme = window.location.protocol === 'https:' ? 'wss:' : 'ws:'
+ let scheme = defaultScheme
+ let host = window.location.host
+
+ // In production, prefer explicit API base if provided
+ if (!import.meta.env.DEV && apiBase) {
+ try {
+ const api = new URL(apiBase, window.location.origin)
+ scheme = api.protocol === 'https:' ? 'wss:' : 'ws:'
+ host = api.host
+ } catch {
+ // keep defaults
+ }
+ }
+
+ const wsUrl = `${scheme}//${host}/ws`
const socket = new WebSocket(wsUrl)
ws = socket
diff --git a/frontend/src/pages/LaunchView.vue b/frontend/src/pages/LaunchView.vue
index a8683a541..d8361b534 100755
--- a/frontend/src/pages/LaunchView.vue
+++ b/frontend/src/pages/LaunchView.vue
@@ -1363,10 +1363,24 @@ const establishWebSocketConnection = () => {
return
}
- const baseUrl = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8000'
- const wsProtocol = baseUrl.startsWith('https') ? 'wss:' : 'ws:'
- const urlObj = new URL(baseUrl)
- const wsUrl = `${wsProtocol}//${urlObj.host}/ws`
+ const apiBase = import.meta.env.VITE_API_BASE_URL || ''
+ // Defaults: same-origin (works with Vite dev proxy)
+ const defaultScheme = window.location.protocol === 'https:' ? 'wss:' : 'ws:'
+ let scheme = defaultScheme
+ let host = window.location.host
+
+ // In production, prefer explicit API base if provided
+ if (!import.meta.env.DEV && apiBase) {
+ try {
+ const api = new URL(apiBase, window.location.origin)
+ scheme = api.protocol === 'https:' ? 'wss:' : 'ws:'
+ host = api.host
+ } catch {
+ // keep defaults
+ }
+ }
+
+ const wsUrl = `${scheme}//${host}/ws`
const socket = new WebSocket(wsUrl)
ws = socket
diff --git a/frontend/src/utils/apiFunctions.js b/frontend/src/utils/apiFunctions.js
index c1ba9a1f9..47a977b9d 100755
--- a/frontend/src/utils/apiFunctions.js
+++ b/frontend/src/utils/apiFunctions.js
@@ -1,7 +1,6 @@
import yaml from 'js-yaml'
-const API_BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8000'
-const apiUrl = (path) => `${API_BASE_URL}${path}`
+const apiUrl = (path) => path
const addYamlSuffix = (filename) => {
const trimmed = (filename || '').trim()
diff --git a/frontend/vite.config.js b/frontend/vite.config.js
index b8a9b11f1..9413b9175 100755
--- a/frontend/vite.config.js
+++ b/frontend/vite.config.js
@@ -4,15 +4,21 @@ import vue from '@vitejs/plugin-vue'
// https://vite.dev/config/
export default defineConfig(({ mode }) => {
const env = loadEnv(mode, process.cwd(), '')
- const target = env.VITE_API_BASE_URL || 'http://localhost:8000'
+ const target = env.VITE_API_BASE_URL || 'http://localhost:6400'
return {
plugins: [vue()],
server: {
+ host: true,
proxy: {
'/api': {
target: target,
changeOrigin: true,
+ },
+ '/ws': {
+ target: target,
+ ws: true,
+ changeOrigin: true,
}
}
}
diff --git a/server/bootstrap.py b/server/bootstrap.py
index 17942b85e..31020b9d9 100755
--- a/server/bootstrap.py
+++ b/server/bootstrap.py
@@ -1,7 +1,6 @@
"""Application bootstrap helpers for the FastAPI server."""
from fastapi import FastAPI
-from fastapi.middleware.cors import CORSMiddleware
from server import state
from server.config_schema_router import router as config_schema_router
@@ -13,13 +12,6 @@
def init_app(app: FastAPI) -> None:
"""Apply shared middleware, routers, and global state to ``app``."""
- app.add_middleware(
- CORSMiddleware,
- allow_origins=["*"],
- allow_credentials=True,
- allow_methods=["*"],
- allow_headers=["*"],
- )
add_exception_handlers(app)
add_middleware(app)
diff --git a/tools/sync_vuegraphs.py b/tools/sync_vuegraphs.py
new file mode 100644
index 000000000..60bb7ee9f
--- /dev/null
+++ b/tools/sync_vuegraphs.py
@@ -0,0 +1,65 @@
+"""
+Synchronize YAML Configurations to VueGraph Database
+
+This tool uploads local YAML workflow configurations from the yaml_instance/
+directory to the VueGraph database via the API endpoint. This is essential for
+making workflow configurations available to the frontend visualization system.
+
+Purpose:
+- Ensures the database reflects the latest YAML configurations
+- Required after modifying workflow YAML files to see changes in the UI
+- Useful for development and deployment workflows
+
+Usage:
+ python tools/sync_vuegraphs.py
+ # or via Makefile:
+ make sync
+"""
+
+import os
+import glob
+import requests
+import yaml
+from pathlib import Path
+
+# Configuration
+API_URL = "http://localhost:6400/api/vuegraphs/upload/content"
+YAML_DIR = "yaml_instance"
+
+
+def sync_yaml_to_vuegraphs():
+ """Reads all YAML files and uploads them to the VueGraph database."""
+ print(f"Syncing YAML files from {YAML_DIR} to {API_URL}...")
+
+ yaml_files = glob.glob(os.path.join(YAML_DIR, "*.yaml"))
+
+ for file_path in yaml_files:
+ try:
+ filename = Path(file_path).stem # simulation_hospital_lmstudio
+
+ with open(file_path, "r") as f:
+ content = f.read()
+
+ # Basic validation to ensure it's a valid YAML
+ try:
+ yaml.safe_load(content)
+ except yaml.YAMLError as e:
+ print(f"Skipping {filename}: Invalid YAML - {e}")
+ continue
+
+ # Upload to VueGraph API
+ payload = {"filename": filename, "content": content}
+
+ response = requests.post(API_URL, json=payload)
+
+ if response.status_code == 200:
+ print(f"Synced: {filename}")
+ else:
+ print(f"Failed: {filename} - {response.status_code} {response.text}")
+
+ except Exception as e:
+ print(f"Error processing {file_path}: {e}")
+
+
+if __name__ == "__main__":
+ sync_yaml_to_vuegraphs()
diff --git a/tools/validate_all_yamls.py b/tools/validate_all_yamls.py
new file mode 100644
index 000000000..19f89d49b
--- /dev/null
+++ b/tools/validate_all_yamls.py
@@ -0,0 +1,104 @@
+"""
+Validate All YAML Workflow Configurations
+
+This tool performs strict validation on all YAML workflow configuration files
+in the yaml_instance/ directory. It ensures configuration integrity and prevents
+runtime errors by catching issues early in the development process.
+
+Purpose:
+- Validates YAML syntax and schema compliance for all workflow configurations
+- Prevents invalid configurations from causing runtime failures
+- Essential for CI/CD pipelines to ensure code quality
+- Provides detailed error reporting for debugging
+
+Usage:
+ python tools/validate_all_yamls.py
+ # or via Makefile:
+ make validate-yamls
+"""
+
+import sys
+import subprocess
+from pathlib import Path
+
+
+def validate_all():
+ base_dir = Path("yaml_instance")
+ if not base_dir.exists():
+ print(f"Directory {base_dir} not found.")
+ sys.exit(1)
+
+ # Recursive search for all .yaml files
+ files = sorted(list(base_dir.rglob("*.yaml")))
+
+ if not files:
+ print("No YAML files found.")
+ return
+
+ print(
+ f"Found {len(files)} YAML files. Running FULL validation via check.check...\n"
+ )
+
+ passed = 0
+ failed = 0
+ failed_files = []
+
+ for yaml_file in files:
+ # Use relative path for cleaner output
+ try:
+ rel_path = yaml_file.relative_to(Path.cwd())
+ except ValueError:
+ rel_path = yaml_file
+
+ # NOW we run check.check, which we just patched to have a main()
+ # This performs the stricter load_config() validation
+ cmd = [sys.executable, "-m", "check.check", "--path", str(yaml_file)]
+
+ try:
+ result = subprocess.run(cmd, capture_output=True, text=True)
+
+ if result.returncode == 0:
+ print(f"{rel_path}")
+ passed += 1
+ else:
+ print(f"{rel_path}")
+ # Indent error output
+ if result.stdout:
+ print(" stdout:", result.stdout.strip().replace("\n", "\n "))
+ # Validation errors usually print to stdout/stderr depending on impl
+ # Our new main prints to stdout for success/failure message
+ failed += 1
+ failed_files.append(str(rel_path))
+ except Exception as e:
+ print(f"{rel_path} (Execution Failed)")
+ print(f" Error: {e}")
+ failed += 1
+ failed_files.append(str(rel_path))
+
+ print("\n" + "=" * 40)
+ print(f"YAML Validation Summary")
+ print("=" * 40)
+ print(f"Total Files: {len(files)}")
+ print(f"Passed: {passed}")
+ print(f"Failed: {failed}")
+
+ if failed > 0:
+ print("\nFailed Files:")
+ for f in failed_files:
+ print(f"- {f}")
+
+ # Overall validation status
+ print("\n" + "=" * 40)
+ print("Overall Validation Status")
+ print("=" * 40)
+
+ if failed > 0:
+ print("YAML validation: FAILED")
+ sys.exit(1)
+ else:
+ print("All validations passed successfully.")
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ validate_all()
diff --git a/utils/middleware.py b/utils/middleware.py
index 63012cac3..90d4293c2 100755
--- a/utils/middleware.py
+++ b/utils/middleware.py
@@ -2,10 +2,12 @@
import uuid
from typing import Callable, Awaitable
-from fastapi import Request, HTTPException
+from fastapi import Request, HTTPException, FastAPI
from fastapi.responses import JSONResponse
+from fastapi.middleware.cors import CORSMiddleware
import time
import re
+import os
from utils.structured_logger import get_server_logger, LogType
from utils.exceptions import SecurityError
@@ -85,11 +87,42 @@ async def rate_limit_middleware(request: Request, call_next: Callable):
return response
-def add_middleware(app):
+def add_cors_middleware(app: FastAPI) -> None:
+ """Configure and attach CORS middleware."""
+ # Dev defaults; override via CORS_ALLOW_ORIGINS (comma-separated)
+ default_origins = [
+ "http://localhost:5173",
+ "http://127.0.0.1:5173",
+ ]
+ env_origins = os.getenv("CORS_ALLOW_ORIGINS")
+ if env_origins:
+ origins = [o.strip() for o in env_origins.split(",") if o.strip()]
+ origin_regex = None
+ else:
+ origins = default_origins
+ # Helpful in dev: allow localhost/127.0.0.1 on any port
+ origin_regex = r"^https?://(localhost|127\.0\.0\.1)(:\d+)?$"
+
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=origins,
+ allow_origin_regex=origin_regex,
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ expose_headers=["X-Correlation-ID"],
+ max_age=600,
+ )
+
+
+def add_middleware(app: FastAPI):
"""Add all middleware to the FastAPI application."""
- # Add middleware in the appropriate order
+ # Attach CORS first to handle preflight requests and allow origins.
+ add_cors_middleware(app)
+
+ # Add other middleware
app.middleware("http")(correlation_id_middleware)
app.middleware("http")(security_middleware)
# app.middleware("http")(rate_limit_middleware) # Enable if needed
- return app
\ No newline at end of file
+ return app