-
Notifications
You must be signed in to change notification settings - Fork 460
fix webui #863
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
fix webui #863
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -13,3 +13,4 @@ pandas | |
| Pillow | ||
| python-dotenv | ||
| requests | ||
| rich | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -23,6 +23,28 @@ class ConfigManager: | |
| 'temperature_enabled': False, | ||
| 'max_tokens': None | ||
| }, | ||
| 'deep_research': { | ||
| 'researcher': { | ||
| 'model': '', | ||
| 'api_key': '', | ||
| 'base_url': '' | ||
| }, | ||
| 'searcher': { | ||
| 'model': '', | ||
| 'api_key': '', | ||
| 'base_url': '' | ||
| }, | ||
| 'reporter': { | ||
| 'model': '', | ||
| 'api_key': '', | ||
| 'base_url': '' | ||
| }, | ||
| 'search': { | ||
| 'summarizer_model': '', | ||
| 'summarizer_api_key': '', | ||
| 'summarizer_base_url': '' | ||
| } | ||
|
Comment on lines
+26
to
+46
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. To align with the suggested change in 'deep_research': {
'researcher': {
'model': None,
'api_key': None,
'base_url': None
},
'searcher': {
'model': None,
'api_key': None,
'base_url': None
},
'reporter': {
'model': None,
'api_key': None,
'base_url': None
},
'search': {
'summarizer_model': None,
'summarizer_api_key': None,
'summarizer_base_url': None
}
}, |
||
| }, | ||
| 'edit_file_config': { | ||
| 'api_key': '', | ||
| 'base_url': 'https://api.morphllm.com/v1', | ||
|
|
@@ -169,6 +191,19 @@ def update_search_keys(self, search_keys: Dict[str, Any]): | |
| self._config['search_keys'] = search_keys | ||
| self._save_config() | ||
|
|
||
| def get_deep_research_config(self) -> Dict[str, Any]: | ||
| """Get deep research configuration""" | ||
| config = self._load_config() | ||
| return config.get('deep_research', | ||
| self.DEFAULT_CONFIG['deep_research']) | ||
|
|
||
| def update_deep_research_config(self, deep_research_config: Dict[str, | ||
| Any]): | ||
| """Update deep research configuration""" | ||
| self._load_config() | ||
| self._config['deep_research'] = deep_research_config | ||
| self._save_config() | ||
|
|
||
| def add_mcp_server(self, name: str, server_config: Dict[str, Any]): | ||
| """Add a new MCP server""" | ||
| self._load_config() | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -52,44 +52,71 @@ def _load_llm_config() -> Dict[str, Any]: | |
| return {} | ||
|
|
||
|
|
||
| def _build_config_override(llm_config: Dict[str, Any], | ||
| output_dir: str) -> Optional[Dict[str, Any]]: | ||
| def _load_deep_research_config() -> Dict[str, Any]: | ||
| raw = os.environ.get('MS_AGENT_DEEP_RESEARCH_CONFIG') | ||
| if not raw: | ||
| return {} | ||
| try: | ||
| return json.loads(raw) | ||
| except Exception: | ||
| return {} | ||
|
Comment on lines
+61
to
+62
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
|
|
||
|
|
||
| def _normalize_agent_override(raw: Optional[Dict[str, Any]]) -> Dict[str, str]: | ||
| raw = raw or {} | ||
| return { | ||
| 'model': str(raw.get('model') or ''), | ||
| 'api_key': str(raw.get('api_key') or ''), | ||
| 'base_url': str(raw.get('base_url') or ''), | ||
| } | ||
|
|
||
|
|
||
| def _resolve_agent_llm_config(role: str, llm_config: Dict[str, Any], | ||
| dr_config: Dict[str, Any]) -> Dict[str, str]: | ||
| overrides = _normalize_agent_override((dr_config or {}).get(role)) | ||
| return { | ||
| 'model': | ||
| overrides.get('model') or str(llm_config.get('model') or ''), | ||
| 'api_key': | ||
| overrides.get('api_key') or str(llm_config.get('api_key') or ''), | ||
| 'base_url': | ||
| overrides.get('base_url') or str(llm_config.get('base_url') or ''), | ||
| } | ||
|
|
||
|
|
||
| def _normalize_search_override( | ||
| raw: Optional[Dict[str, Any]]) -> Dict[str, str]: | ||
| raw = raw or {} | ||
| return { | ||
| 'summarizer_model': str(raw.get('summarizer_model') or ''), | ||
| 'summarizer_api_key': str(raw.get('summarizer_api_key') or ''), | ||
| 'summarizer_base_url': str(raw.get('summarizer_base_url') or ''), | ||
| } | ||
|
|
||
|
|
||
| def _build_config_override( | ||
| llm_config: Dict[str, Any], output_dir: str, | ||
| dr_config: Dict[str, Any]) -> Optional[Dict[str, Any]]: | ||
| override: Dict[str, Any] = {} | ||
| if output_dir: | ||
| override['output_dir'] = output_dir | ||
|
|
||
| llm_override: Dict[str, Any] = {} | ||
| provider = (llm_config.get('provider') or '').strip() | ||
| model = llm_config.get('model') | ||
| api_key = llm_config.get('api_key') | ||
| base_url = llm_config.get('base_url') | ||
| resolved = _resolve_agent_llm_config('researcher', llm_config, dr_config) | ||
| model = resolved.get('model') | ||
| api_key = resolved.get('api_key') | ||
| base_url = resolved.get('base_url') | ||
| temperature = llm_config.get('temperature') | ||
| temperature_enabled = bool(llm_config.get('temperature_enabled', False)) | ||
| max_tokens = llm_config.get('max_tokens') | ||
|
|
||
| if provider in {'modelscope', 'openai', 'anthropic', 'dashscope'}: | ||
| llm_override['service'] = provider | ||
| else: | ||
| llm_override['service'] = 'openai' | ||
|
|
||
| if model: | ||
| llm_override['model'] = model | ||
|
|
||
| if llm_override['service'] == 'modelscope': | ||
| if api_key: | ||
| llm_override['modelscope_api_key'] = api_key | ||
| if base_url: | ||
| llm_override['modelscope_base_url'] = base_url | ||
| elif llm_override['service'] == 'anthropic': | ||
| if api_key: | ||
| llm_override['anthropic_api_key'] = api_key | ||
| if base_url: | ||
| llm_override['anthropic_base_url'] = base_url | ||
| else: | ||
| if api_key: | ||
| llm_override['openai_api_key'] = api_key | ||
| if base_url: | ||
| llm_override['openai_base_url'] = base_url | ||
| if api_key: | ||
| llm_override['openai_api_key'] = api_key | ||
| if base_url: | ||
| llm_override['openai_base_url'] = base_url | ||
alcholiclg marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| if llm_override: | ||
| override['llm'] = llm_override | ||
|
|
@@ -187,7 +214,9 @@ async def consume_subagent_events(): | |
| eventizer.process(history) | ||
|
|
||
| llm_config = _load_llm_config() | ||
| config_override = _build_config_override(llm_config, args.output_dir) | ||
| dr_config = _load_deep_research_config() | ||
| config_override = _build_config_override(llm_config, args.output_dir, | ||
| dr_config) | ||
| config_override = OmegaConf.create( | ||
| config_override) if config_override else None | ||
|
|
||
|
|
@@ -215,6 +244,57 @@ async def prepare_tools_with_callback(): | |
| updated['output_dir'] = args.output_dir | ||
| spec.inline_config = updated | ||
|
|
||
| tool_name = str(spec.tool_name or '') | ||
| if 'searcher' in tool_name: | ||
| resolved = _resolve_agent_llm_config( | ||
| 'searcher', llm_config, dr_config) | ||
| search_override = _normalize_search_override( | ||
| (dr_config or {}).get('search')) | ||
| elif 'reporter' in tool_name: | ||
| resolved = _resolve_agent_llm_config( | ||
| 'reporter', llm_config, dr_config) | ||
| search_override = {} | ||
| else: | ||
| resolved = {} | ||
| search_override = {} | ||
|
|
||
| if resolved: | ||
| updated = dict(spec.inline_config or {}) | ||
| llm_cfg = dict(updated.get('llm') or {}) | ||
| if resolved.get('model'): | ||
| llm_cfg['model'] = resolved['model'] | ||
| if resolved.get('api_key'): | ||
| llm_cfg['openai_api_key'] = resolved['api_key'] | ||
| if resolved.get('base_url'): | ||
| llm_cfg['openai_base_url'] = resolved['base_url'] | ||
| if llm_cfg: | ||
| updated['llm'] = llm_cfg | ||
| if search_override: | ||
| tools_cfg = dict(updated.get('tools') or {}) | ||
| web_cfg = dict(tools_cfg.get('web_search') or {}) | ||
| if search_override.get('summarizer_model'): | ||
| web_cfg['summarizer_model'] = search_override[ | ||
| 'summarizer_model'] | ||
| if search_override.get('summarizer_api_key'): | ||
| web_cfg[ | ||
| 'summarizer_api_key'] = search_override[ | ||
| 'summarizer_api_key'] | ||
| if search_override.get('summarizer_base_url'): | ||
| web_cfg[ | ||
| 'summarizer_base_url'] = search_override[ | ||
| 'summarizer_base_url'] | ||
| if web_cfg: | ||
| tools_cfg['web_search'] = web_cfg | ||
| updated['tools'] = tools_cfg | ||
| spec.inline_config = updated | ||
|
|
||
| env_cfg = dict(spec.env or {}) | ||
| if resolved.get('api_key'): | ||
| env_cfg['OPENAI_API_KEY'] = resolved['api_key'] | ||
| if resolved.get('base_url'): | ||
| env_cfg['OPENAI_BASE_URL'] = resolved['base_url'] | ||
| spec.env = env_cfg | ||
|
|
||
| agent.prepare_tools = prepare_tools_with_callback | ||
|
|
||
| artifact_task = asyncio.create_task( | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
For consistency with other Pydantic models in this file (e.g.,
LLMConfig) and to make the intent clearer, it's better to useNoneas the default value for optional fields instead of an empty string''. This explicitly indicates that a value has not been set, whereas''could be a valid (though empty) value.