Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -558,6 +558,12 @@ MS-Agent provides a modern web interface for interacting with agents. Built with
ms-agent ui
```

**Windows tip:** If the console shows garbled text, use the PowerShell helper:

```powershell
webui/scripts/start-webui.ps1
```

The browser will automatically open at http://localhost:7860

**Command Options:**
Expand Down
6 changes: 6 additions & 0 deletions README_ZH.md
Original file line number Diff line number Diff line change
Expand Up @@ -568,6 +568,12 @@ MS-Agent提供了一个简洁轻量的Web界面,用于与智能体进行交互
ms-agent ui
```

**Windows 提示:** 若控制台出现乱码,建议使用 PowerShell 启动脚本:

```powershell
webui/scripts/start-webui.ps1
```

浏览器打开: http://localhost:7860

**命令参数**
Expand Down
2 changes: 1 addition & 1 deletion projects/deep_research/v2/reporter.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ llm:
service: openai
model: qwen-plus
openai_api_key: <OPENAI_API_KEY>
openai_base_url: https://dashscope.aliyuncs.com/compatible-mode/v1
openai_base_url: <OPENAI_BASE_URL>


generation_config:
Expand Down
2 changes: 1 addition & 1 deletion projects/deep_research/v2/researcher.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ llm:
service: openai
model: qwen3-max
openai_api_key: <OPENAI_API_KEY>
openai_base_url: https://dashscope.aliyuncs.com/compatible-mode/v1
openai_base_url: <OPENAI_BASE_URL>


generation_config:
Expand Down
2 changes: 1 addition & 1 deletion projects/deep_research/v2/searcher.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ llm:
service: openai
model: qwen-plus
openai_api_key: <OPENAI_API_KEY>
openai_base_url: https://dashscope.aliyuncs.com/compatible-mode/v1
openai_base_url: <OPENAI_BASE_URL>


generation_config:
Expand Down
1 change: 1 addition & 0 deletions requirements/research.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ pandas
Pillow
python-dotenv
requests
rich
38 changes: 37 additions & 1 deletion webui/backend/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import FileResponse
from pydantic import BaseModel
from pydantic import BaseModel, Field
# Import shared instances
from shared import config_manager, project_discovery, session_manager

Expand Down Expand Up @@ -86,6 +86,29 @@ class SearchKeysConfig(BaseModel):
serpapi_api_key: Optional[str] = None


class DeepResearchAgentConfig(BaseModel):
model: Optional[str] = ''
api_key: Optional[str] = ''
base_url: Optional[str] = ''


class DeepResearchSearchConfig(BaseModel):
summarizer_model: Optional[str] = ''
summarizer_api_key: Optional[str] = ''
summarizer_base_url: Optional[str] = ''
Comment on lines +89 to +98
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

For consistency with other Pydantic models in this file (e.g., LLMConfig) and to make the intent clearer, it's better to use None as the default value for optional fields instead of an empty string ''. This explicitly indicates that a value has not been set, whereas '' could be a valid (though empty) value.

Suggested change
class DeepResearchAgentConfig(BaseModel):
model: Optional[str] = ''
api_key: Optional[str] = ''
base_url: Optional[str] = ''
class DeepResearchSearchConfig(BaseModel):
summarizer_model: Optional[str] = ''
summarizer_api_key: Optional[str] = ''
summarizer_base_url: Optional[str] = ''
class DeepResearchAgentConfig(BaseModel):
model: Optional[str] = None
api_key: Optional[str] = None
base_url: Optional[str] = None
class DeepResearchSearchConfig(BaseModel):
summarizer_model: Optional[str] = None
summarizer_api_key: Optional[str] = None
summarizer_base_url: Optional[str] = None



class DeepResearchConfig(BaseModel):
researcher: DeepResearchAgentConfig = Field(
default_factory=DeepResearchAgentConfig)
searcher: DeepResearchAgentConfig = Field(
default_factory=DeepResearchAgentConfig)
reporter: DeepResearchAgentConfig = Field(
default_factory=DeepResearchAgentConfig)
search: DeepResearchSearchConfig = Field(
default_factory=DeepResearchSearchConfig)


class MCPServer(BaseModel):
name: str
type: str # 'stdio' or 'sse'
Expand Down Expand Up @@ -330,6 +353,19 @@ async def update_search_keys_config(config: SearchKeysConfig):
return {'status': 'updated'}


@router.get('/config/deep_research')
async def get_deep_research_config():
"""Get deep research configuration"""
return config_manager.get_deep_research_config()


@router.put('/config/deep_research')
async def update_deep_research_config(config: DeepResearchConfig):
"""Update deep research configuration"""
config_manager.update_deep_research_config(config.model_dump())
return {'status': 'updated'}


@router.post('/config/mcp/servers')
async def add_mcp_server(server: MCPServer):
"""Add a new MCP server"""
Expand Down
35 changes: 35 additions & 0 deletions webui/backend/config_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,28 @@ class ConfigManager:
'temperature_enabled': False,
'max_tokens': None
},
'deep_research': {
'researcher': {
'model': '',
'api_key': '',
'base_url': ''
},
'searcher': {
'model': '',
'api_key': '',
'base_url': ''
},
'reporter': {
'model': '',
'api_key': '',
'base_url': ''
},
'search': {
'summarizer_model': '',
'summarizer_api_key': '',
'summarizer_base_url': ''
}
Comment on lines +26 to +46
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

To align with the suggested change in api.py to use None as the default for optional config fields, the DEFAULT_CONFIG for deep_research should also be updated to use None instead of empty strings (''). This ensures consistency across the application.

        'deep_research': {
            'researcher': {
                'model': None,
                'api_key': None,
                'base_url': None
            },
            'searcher': {
                'model': None,
                'api_key': None,
                'base_url': None
            },
            'reporter': {
                'model': None,
                'api_key': None,
                'base_url': None
            },
            'search': {
                'summarizer_model': None,
                'summarizer_api_key': None,
                'summarizer_base_url': None
            }
        },

},
'edit_file_config': {
'api_key': '',
'base_url': 'https://api.morphllm.com/v1',
Expand Down Expand Up @@ -169,6 +191,19 @@ def update_search_keys(self, search_keys: Dict[str, Any]):
self._config['search_keys'] = search_keys
self._save_config()

def get_deep_research_config(self) -> Dict[str, Any]:
"""Get deep research configuration"""
config = self._load_config()
return config.get('deep_research',
self.DEFAULT_CONFIG['deep_research'])

def update_deep_research_config(self, deep_research_config: Dict[str,
Any]):
"""Update deep research configuration"""
self._load_config()
self._config['deep_research'] = deep_research_config
self._save_config()

def add_mcp_server(self, name: str, server_config: Dict[str, Any]):
"""Add a new MCP server"""
self._load_config()
Expand Down
134 changes: 107 additions & 27 deletions webui/backend/deep_research_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,44 +52,71 @@ def _load_llm_config() -> Dict[str, Any]:
return {}


def _build_config_override(llm_config: Dict[str, Any],
output_dir: str) -> Optional[Dict[str, Any]]:
def _load_deep_research_config() -> Dict[str, Any]:
raw = os.environ.get('MS_AGENT_DEEP_RESEARCH_CONFIG')
if not raw:
return {}
try:
return json.loads(raw)
except Exception:
return {}
Comment on lines +61 to +62
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Catching a broad Exception can hide unexpected errors. It's better to catch the specific exception that can be raised here, which is json.JSONDecodeError.

Suggested change
except Exception:
return {}
except json.JSONDecodeError:
return {}



def _normalize_agent_override(raw: Optional[Dict[str, Any]]) -> Dict[str, str]:
raw = raw or {}
return {
'model': str(raw.get('model') or ''),
'api_key': str(raw.get('api_key') or ''),
'base_url': str(raw.get('base_url') or ''),
}


def _resolve_agent_llm_config(role: str, llm_config: Dict[str, Any],
dr_config: Dict[str, Any]) -> Dict[str, str]:
overrides = _normalize_agent_override((dr_config or {}).get(role))
return {
'model':
overrides.get('model') or str(llm_config.get('model') or ''),
'api_key':
overrides.get('api_key') or str(llm_config.get('api_key') or ''),
'base_url':
overrides.get('base_url') or str(llm_config.get('base_url') or ''),
}


def _normalize_search_override(
raw: Optional[Dict[str, Any]]) -> Dict[str, str]:
raw = raw or {}
return {
'summarizer_model': str(raw.get('summarizer_model') or ''),
'summarizer_api_key': str(raw.get('summarizer_api_key') or ''),
'summarizer_base_url': str(raw.get('summarizer_base_url') or ''),
}


def _build_config_override(
llm_config: Dict[str, Any], output_dir: str,
dr_config: Dict[str, Any]) -> Optional[Dict[str, Any]]:
override: Dict[str, Any] = {}
if output_dir:
override['output_dir'] = output_dir

llm_override: Dict[str, Any] = {}
provider = (llm_config.get('provider') or '').strip()
model = llm_config.get('model')
api_key = llm_config.get('api_key')
base_url = llm_config.get('base_url')
resolved = _resolve_agent_llm_config('researcher', llm_config, dr_config)
model = resolved.get('model')
api_key = resolved.get('api_key')
base_url = resolved.get('base_url')
temperature = llm_config.get('temperature')
temperature_enabled = bool(llm_config.get('temperature_enabled', False))
max_tokens = llm_config.get('max_tokens')

if provider in {'modelscope', 'openai', 'anthropic', 'dashscope'}:
llm_override['service'] = provider
else:
llm_override['service'] = 'openai'

if model:
llm_override['model'] = model

if llm_override['service'] == 'modelscope':
if api_key:
llm_override['modelscope_api_key'] = api_key
if base_url:
llm_override['modelscope_base_url'] = base_url
elif llm_override['service'] == 'anthropic':
if api_key:
llm_override['anthropic_api_key'] = api_key
if base_url:
llm_override['anthropic_base_url'] = base_url
else:
if api_key:
llm_override['openai_api_key'] = api_key
if base_url:
llm_override['openai_base_url'] = base_url
if api_key:
llm_override['openai_api_key'] = api_key
if base_url:
llm_override['openai_base_url'] = base_url

if llm_override:
override['llm'] = llm_override
Expand Down Expand Up @@ -187,7 +214,9 @@ async def consume_subagent_events():
eventizer.process(history)

llm_config = _load_llm_config()
config_override = _build_config_override(llm_config, args.output_dir)
dr_config = _load_deep_research_config()
config_override = _build_config_override(llm_config, args.output_dir,
dr_config)
config_override = OmegaConf.create(
config_override) if config_override else None

Expand Down Expand Up @@ -215,6 +244,57 @@ async def prepare_tools_with_callback():
updated['output_dir'] = args.output_dir
spec.inline_config = updated

tool_name = str(spec.tool_name or '')
if 'searcher' in tool_name:
resolved = _resolve_agent_llm_config(
'searcher', llm_config, dr_config)
search_override = _normalize_search_override(
(dr_config or {}).get('search'))
elif 'reporter' in tool_name:
resolved = _resolve_agent_llm_config(
'reporter', llm_config, dr_config)
search_override = {}
else:
resolved = {}
search_override = {}

if resolved:
updated = dict(spec.inline_config or {})
llm_cfg = dict(updated.get('llm') or {})
if resolved.get('model'):
llm_cfg['model'] = resolved['model']
if resolved.get('api_key'):
llm_cfg['openai_api_key'] = resolved['api_key']
if resolved.get('base_url'):
llm_cfg['openai_base_url'] = resolved['base_url']
if llm_cfg:
updated['llm'] = llm_cfg
if search_override:
tools_cfg = dict(updated.get('tools') or {})
web_cfg = dict(tools_cfg.get('web_search') or {})
if search_override.get('summarizer_model'):
web_cfg['summarizer_model'] = search_override[
'summarizer_model']
if search_override.get('summarizer_api_key'):
web_cfg[
'summarizer_api_key'] = search_override[
'summarizer_api_key']
if search_override.get('summarizer_base_url'):
web_cfg[
'summarizer_base_url'] = search_override[
'summarizer_base_url']
if web_cfg:
tools_cfg['web_search'] = web_cfg
updated['tools'] = tools_cfg
spec.inline_config = updated

env_cfg = dict(spec.env or {})
if resolved.get('api_key'):
env_cfg['OPENAI_API_KEY'] = resolved['api_key']
if resolved.get('base_url'):
env_cfg['OPENAI_BASE_URL'] = resolved['base_url']
spec.env = env_cfg

agent.prepare_tools = prepare_tools_with_callback

artifact_task = asyncio.create_task(
Expand Down
36 changes: 25 additions & 11 deletions webui/backend/deep_research_worker_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,26 @@ def _get_repo_root(self) -> Path:
def _get_worker_path(self) -> Path:
return Path(__file__).resolve().parent / 'deep_research_worker.py'

def _build_env(self, env_vars: Optional[Dict[str, str]],
llm_config: Optional[Dict[str, Any]]) -> Dict[str, str]:
def _build_env(
self, env_vars: Optional[Dict[str, str]],
llm_config: Optional[Dict[str, Any]],
deep_research_config: Optional[Dict[str, Any]]) -> Dict[str, str]:
env = os.environ.copy()
if env_vars:
env.update({k: v for k, v in env_vars.items() if v})
if llm_config:
env['MS_AGENT_LLM_CONFIG'] = json.dumps(
llm_config, ensure_ascii=False)
if deep_research_config:
env['MS_AGENT_DEEP_RESEARCH_CONFIG'] = json.dumps(
deep_research_config, ensure_ascii=False)

api_key = (llm_config or {}).get('api_key')
base_url = (llm_config or {}).get('base_url')
if api_key and not env.get('OPENAI_API_KEY'):
env['OPENAI_API_KEY'] = api_key
if base_url and not env.get('OPENAI_BASE_URL'):
env['OPENAI_BASE_URL'] = base_url
env['PYTHONUNBUFFERED'] = '1'
repo_root = str(self._get_repo_root())
existing_path = env.get('PYTHONPATH', '')
Expand All @@ -41,14 +53,16 @@ def _build_env(self, env_vars: Optional[Dict[str, str]],
os.pathsep + existing_path if existing_path else '')
return env

async def start(self,
session_id: str,
*,
query: str,
config_path: str,
output_dir: str,
env_vars: Optional[Dict[str, str]] = None,
llm_config: Optional[Dict[str, Any]] = None) -> None:
async def start(
self,
session_id: str,
*,
query: str,
config_path: str,
output_dir: str,
env_vars: Optional[Dict[str, str]] = None,
llm_config: Optional[Dict[str, Any]] = None,
deep_research_config: Optional[Dict[str, Any]] = None) -> None:
if session_id in self._processes:
await self.stop(session_id)

Expand All @@ -69,7 +83,7 @@ async def start(self,
str(output_dir_path),
]

env = self._build_env(env_vars, llm_config)
env = self._build_env(env_vars, llm_config, deep_research_config)

process = await asyncio.create_subprocess_exec(
*cmd,
Expand Down
Loading
Loading