From 4eea1f9be9cf7fec13f6dc6903bb11b41bea3108 Mon Sep 17 00:00:00 2001 From: Yurii Chukhlib Date: Sat, 17 Jan 2026 11:09:40 +0100 Subject: [PATCH] docs: Update return types for arun() and arun_many() to RunManyReturn Fixes #1543 The documentation was incorrectly stating that: - arun() returns CrawlResult - arun_many() returns Union[List[CrawlResult], AsyncGenerator[CrawlResult, None]] However, both methods actually return RunManyReturn. This caused issues with IDE auto-completion and type hints. Updated: - docs/md_v2/api/async-webcrawler.md: Fixed return type for arun() and arun_many() - docs/md_v2/api/arun_many.md: Fixed return type and related descriptions Co-Authored-By: Claude --- docs/md_v2/api/arun_many.md | 14 +++++++------- docs/md_v2/api/async-webcrawler.md | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/md_v2/api/arun_many.md b/docs/md_v2/api/arun_many.md index 146584c36..b609e9056 100644 --- a/docs/md_v2/api/arun_many.md +++ b/docs/md_v2/api/arun_many.md @@ -10,7 +10,7 @@ async def arun_many( config: Optional[Union[CrawlerRunConfig, List[CrawlerRunConfig]]] = None, dispatcher: Optional[BaseDispatcher] = None, ... -) -> Union[List[CrawlResult], AsyncGenerator[CrawlResult, None]]: +) -> RunManyReturn: """ Crawl multiple URLs concurrently or in batches. @@ -20,16 +20,16 @@ async def arun_many( - A list of `CrawlerRunConfig` objects with url_matcher patterns :param dispatcher: (Optional) A concurrency controller (e.g. MemoryAdaptiveDispatcher). ... - :return: Either a list of `CrawlResult` objects, or an async generator if streaming is enabled. + :return: RunManyReturn containing either a list of `CrawlResult` objects or an async generator if streaming is enabled. """ ``` ## Differences from `arun()` -1. **Multiple URLs**: - - - Instead of crawling a single URL, you pass a list of them (strings or tasks).  - - The function returns either a **list** of `CrawlResult` or an **async generator** if streaming is enabled. +1. **Multiple URLs**: + + - Instead of crawling a single URL, you pass a list of them (strings or tasks). + - The function returns `RunManyReturn` which contains either a **list** of `CrawlResult` or an **async generator** if streaming is enabled. 2. **Concurrency & Dispatchers**: @@ -164,7 +164,7 @@ results = await crawler.arun_many( ### Return Value -Either a **list** of [`CrawlResult`](./crawl-result.md) objects, or an **async generator** if streaming is enabled. You can iterate to check `result.success` or read each item’s `extracted_content`, `markdown`, or `dispatch_result`. +Returns a **`RunManyReturn`** object which contains either a **list** of [`CrawlResult`](./crawl-result.md) objects, or an **async generator** if streaming is enabled. You can iterate to check `result.success` or read each item's `extracted_content`, `markdown`, or `dispatch_result`. --- diff --git a/docs/md_v2/api/async-webcrawler.md b/docs/md_v2/api/async-webcrawler.md index b8f105fc8..292630553 100644 --- a/docs/md_v2/api/async-webcrawler.md +++ b/docs/md_v2/api/async-webcrawler.md @@ -103,7 +103,7 @@ async def arun( url: str, config: Optional[CrawlerRunConfig] = None, # Legacy parameters for backward compatibility... -) -> CrawlResult: +) -> RunManyReturn: ... ``` @@ -143,7 +143,7 @@ async def arun_many( urls: List[str], config: Optional[CrawlerRunConfig] = None, # Legacy parameters maintained for backwards compatibility... -) -> List[CrawlResult]: +) -> RunManyReturn: """ Process multiple URLs with intelligent rate limiting and resource monitoring. """