Skip to content
Merged
Show file tree
Hide file tree
Changes from 24 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
4018157
mark1 with example working
varadsrivastava May 25, 2025
3e4f7c6
duckduckgo agent mark 2 example updated
varadsrivastava May 28, 2025
fcbc97e
Update _duckduckgo_agent.py
varadsrivastava May 28, 2025
b8d8357
Merge branch 'microsoft:main' into main
varadsrivastava May 28, 2025
7473bd7
mark 2 updates: final examples added
varadsrivastava Jun 3, 2025
d61b4b7
Merge branch 'microsoft:main' into main
varadsrivastava Jun 3, 2025
72fadef
Merge branch 'microsoft:main' into main
varadsrivastava Jun 15, 2025
5da3d8d
Merge branch 'microsoft:main' into main
varadsrivastava Jun 16, 2025
e65ef87
added duckduckgo search agent
varadsrivastava Jun 16, 2025
3f42bf6
Merge branch 'main' of https://github.com/varadsrivastava/autogen
varadsrivastava Jun 16, 2025
30b037d
duckduckgo search agent dependency update
varadsrivastava Jun 16, 2025
1d697f3
Updated prompt variables and RST code blocks
varadsrivastava Jun 18, 2025
ef7cd84
Merge branch 'main' into main
varadsrivastava Jun 18, 2025
e59f215
updated readme and api doc
varadsrivastava Jun 19, 2025
1019f6e
Merge branch 'main' of https://github.com/varadsrivastava/autogen
varadsrivastava Jun 19, 2025
d81b398
Added return_value_as_string function
varadsrivastava Jun 20, 2025
0db6540
minor update to return_value_as_string
varadsrivastava Jun 20, 2025
d2da914
updated tool path
varadsrivastava Jun 27, 2025
53ed83d
Merge branch 'main' into main
ekzhu Jun 27, 2025
24e3e20
Merge branch 'main' into main
varadsrivastava Jun 27, 2025
4414cba
Merge branch 'main' into main
ekzhu Jul 6, 2025
9c5399a
update deps
ekzhu Jul 6, 2025
97c3460
fix tests
ekzhu Jul 6, 2025
6febb60
update test
ekzhu Jul 6, 2025
07a3eac
add test to increase coverage
ekzhu Jul 6, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions python/packages/autogen-ext/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,13 @@ canvas = [
"unidiff>=0.7.5",
]

duckduckgo-search = [
"httpx>=0.27.0",
"beautifulsoup4>=4.12.0",
"html2text>=2024.2.26",
"autogen-agentchat==0.6.2",
]

[tool.hatch.build.targets.wheel]
packages = ["src/autogen_ext"]

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
# DuckDuckGo Search Agent created by Varad Srivastava
from typing import Any, Optional

from autogen_agentchat.agents import AssistantAgent
from autogen_core.models import ChatCompletionClient
from autogen_ext.tools.web_search.duckduckgo._duckduckgo_search import DuckDuckGoSearchTool


class DuckDuckGoSearchAgent(AssistantAgent):
"""
A specialized AssistantAgent that uses DuckDuckGo for web searches.

This agent is designed to perform web searches using DuckDuckGo and provide
relevant information based on the search results. It can be used in group chats
or as a standalone agent for research and information gathering tasks.

The agent comes pre-configured with a DuckDuckGo search tool and a system message
optimized for research tasks.

Example:
.. code-block:: python

from autogen_ext.agents.duckduckgo_search import DuckDuckGoSearchAgent
from autogen_ext.models.openai import OpenAIChatCompletionClient

# Create a model client
model_client = OpenAIChatCompletionClient(model="gpt-4")

# Create a DuckDuckGo search agent
search_agent = DuckDuckGoSearchAgent(
name="researcher",
model_client=model_client,
)

# Use the agent
result = await search_agent.run(task="What are the latest developments in AI?")
print(result.messages[-1].content)
"""

DEFAULT_DESCRIPTION = "A research assistant that uses DuckDuckGo to find and analyze information from the web."

DEFAULT_SYSTEM_MESSAGE = """You are a research assistant that uses DuckDuckGo to find accurate information.

When conducting research:
1. Break down complex queries into specific, targeted search terms
2. Use the duckduckgo_search tool to find relevant information
3. Analyze and synthesize information from multiple sources when possible
4. Explain why the information is relevant and how it connects to the query
5. Cite your sources when providing information
6. If you're unsure about something, say so and explain why
7. Provide clear, well-structured responses with key findings highlighted
"""

def __init__(
self,
name: str,
model_client: ChatCompletionClient,
description: Optional[str] = None,
system_message: Optional[str] = None,
**kwargs: Any,
) -> None:
"""
Initialize a DuckDuckGo Search Agent.

Args:
name (str): The name of the agent
model_client (ChatCompletionClient): The model client to use for generating responses
description (Optional[str]): A description of the agent's capabilities. If not provided,
a default description will be used.
system_message (Optional[str]): The system message to use for the agent. If not provided,
a default message will be used.
**kwargs: Additional keyword arguments passed to the parent AssistantAgent
"""
if description is None:
description = self.DEFAULT_DESCRIPTION

if system_message is None:
system_message = self.DEFAULT_SYSTEM_MESSAGE

# Create the DuckDuckGo search tool
search_tool = DuckDuckGoSearchTool()

# Initialize the parent AssistantAgent with the search tool
super().__init__(
name=name,
model_client=model_client,
description=description,
system_message=system_message,
tools=[search_tool],
**kwargs,
)
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@

from autogen_core import CancellationToken, Component
from autogen_core.tools import BaseTool, ToolSchema
from pydantic import BaseModel, Field

from azure.core.credentials import AzureKeyCredential
from azure.core.credentials_async import AsyncTokenCredential
from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
from azure.search.documents.aio import SearchClient
from pydantic import BaseModel, Field

from ._config import (
DEFAULT_API_VERSION,
Expand Down Expand Up @@ -150,8 +151,9 @@ async def _get_embedding(self, query: str) -> List[float]:

if embedding_provider.lower() == "azure_openai":
try:
from azure.identity import DefaultAzureCredential
from openai import AsyncAzureOpenAI

from azure.identity import DefaultAzureCredential
except ImportError:
raise ImportError(
"Azure OpenAI SDK is required for client-side embedding generation. "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@
Union,
)

from pydantic import BaseModel, Field, field_validator, model_validator

from azure.core.credentials import AzureKeyCredential
from azure.core.credentials_async import AsyncTokenCredential
from pydantic import BaseModel, Field, field_validator, model_validator

T = TypeVar("T", bound="AzureAISearchConfig")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import tiktoken
from autogen_core import CancellationToken
from autogen_core.tools import BaseTool
from pydantic import BaseModel, Field

from graphrag.config.config_file_loader import load_config_from_file
from graphrag.query.indexer_adapters import (
read_indexer_communities,
Expand All @@ -15,7 +17,6 @@
from graphrag.query.llm.get_client import get_llm
from graphrag.query.structured_search.global_search.community_context import GlobalCommunityContext
from graphrag.query.structured_search.global_search.search import GlobalSearch
from pydantic import BaseModel, Field

from ._config import GlobalContextConfig as ContextConfig
from ._config import GlobalDataConfig as DataConfig
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import tiktoken
from autogen_core import CancellationToken
from autogen_core.tools import BaseTool
from pydantic import BaseModel, Field

from graphrag.config.config_file_loader import load_config_from_file
from graphrag.query.indexer_adapters import (
read_indexer_entities,
Expand All @@ -17,7 +19,6 @@
from graphrag.query.structured_search.local_search.mixed_context import LocalSearchMixedContext
from graphrag.query.structured_search.local_search.search import LocalSearch
from graphrag.vector_stores.lancedb import LanceDBVectorStore
from pydantic import BaseModel, Field

from ._config import LocalContextConfig, SearchConfig
from ._config import LocalDataConfig as DataConfig
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
from typing import Any, Coroutine, Dict, Mapping, TypedDict

from autogen_core import Component, ComponentBase
from mcp.types import CallToolResult, ListToolsResult
from pydantic import BaseModel
from typing_extensions import Self

from mcp.types import CallToolResult, ListToolsResult

from ._config import McpServerParams
from ._session import create_mcp_server_session

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,12 @@
from autogen_core import CancellationToken
from autogen_core.tools import BaseTool
from autogen_core.utils import schema_to_pydantic_model
from mcp import ClientSession, Tool
from mcp.types import EmbeddedResource, ImageContent, TextContent
from pydantic import BaseModel
from pydantic.networks import AnyUrl

from mcp import ClientSession, Tool
from mcp.types import EmbeddedResource, ImageContent, TextContent

from ._config import McpServerParams
from ._session import create_mcp_server_session

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from typing import Any, Literal

from mcp import StdioServerParameters
from pydantic import BaseModel, Field
from typing_extensions import Annotated

from mcp import StdioServerParameters


class StdioServerParams(StdioServerParameters):
"""Parameters for connecting to an MCP server over STDIO."""
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from autogen_core import Component
from mcp import ClientSession, Tool
from pydantic import BaseModel
from typing_extensions import Self

from mcp import ClientSession, Tool

from ._base import McpToolAdapter
from ._config import SseServerParams

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from autogen_core import Component
from mcp import ClientSession, Tool
from pydantic import BaseModel
from typing_extensions import Self

from mcp import ClientSession, Tool

from ._base import McpToolAdapter
from ._config import StdioServerParams

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from autogen_core import Component
from mcp import ClientSession, Tool
from pydantic import BaseModel
from typing_extensions import Self

from mcp import ClientSession, Tool

from ._base import McpToolAdapter
from ._config import StreamableHttpServerParams

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@
ToolSchema,
Workbench,
)
from mcp.types import CallToolResult, EmbeddedResource, ImageContent, ListToolsResult, TextContent
from pydantic import BaseModel, Field
from typing_extensions import Self

from mcp.types import CallToolResult, EmbeddedResource, ImageContent, ListToolsResult, TextContent

from ._actor import McpSessionActor
from ._config import McpServerParams, SseServerParams, StdioServerParams, StreamableHttpServerParams

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from autogen_core import CancellationToken
from autogen_core.tools import BaseTool, ToolSchema
from pydantic import BaseModel

from semantic_kernel.functions import KernelFunctionFromMethod, KernelFunctionFromPrompt, kernel_function
from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata
from semantic_kernel.prompt_template.input_variable import InputVariable
Expand Down
Loading
Loading