Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 61 additions & 62 deletions cookbook/en/tools/tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ Use this pattern for every custom tool: define Pydantic models, extend `Tool`, i

## AgentScope Integration Example

We use `tool_adapter` to add tools to AgentScope's `Toolkit`:
We use `agentscope_tool_adapter` to add tools to AgentScope's `Toolkit`:

```python
import asyncio
Expand All @@ -87,42 +87,18 @@ from agentscope.tool import Toolkit
from agentscope.message import Msg

from agentscope_runtime.tools.searches import (
ModelstudioSearch,
ModelstudioSearchLite,
SearchInput,
SearchOptions,
)
from agentscope_runtime.adapters.agentscope.tool import tool_adapter

search_tool = ModelstudioSearch()


@tool_adapter(description=search_tool.description)
def modelstudio_search_tool(
messages: list[dict],
search_options: dict | None = None,
search_timeout: int | None = None,
_type: str | None = None,
):
payload_kwargs = {
"messages": messages,
"search_options": SearchOptions(**(search_options or {})),
}
if search_timeout is not None:
payload_kwargs["search_timeout"] = search_timeout
if _type is not None:
payload_kwargs["type"] = _type

payload = SearchInput(**payload_kwargs)
result = search_tool.run(
payload,
user_id=os.environ["MODELSTUDIO_USER_ID"],
)
return ModelstudioSearch.return_value_as_string(result)
from agentscope_runtime.adapters.agentscope.tool import agentscope_tool_adapter

search_tool = ModelstudioSearchLite()
search_tool = agentscope_tool_adapter(search_tool)

toolkit = Toolkit()

toolkit.register_tool_function(modelstudio_search_tool)
toolkit = Toolkit()
toolkit.tools[search_tool.name] = search_tool

agent = ReActAgent(
name="Friday",
Expand Down Expand Up @@ -150,7 +126,10 @@ if __name__ == "__main__":

## LangGraph Integration Example

To reproduce the “Apply to existing LangGraph project” flow, wrap the tool as a LangChain `StructuredTool`, bind it to a model, and wire it into a LangGraph workflow. The tool schema comes directly from the tool’s input model, so tool calls remain type-safe.
To reproduce the “Apply to existing LangGraph project” flow, wrap the tool as a Langgraph Node
by `LanggraphNodeAdapter`, and bind it to a model, and wire it into a LangGraph workflow.

The tool schema comes directly from the tool’s input model, so tool calls remain type-safe.

```python
import os
Expand All @@ -161,47 +140,28 @@ from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import START, END, MessagesState, StateGraph
from langgraph.prebuilt import ToolNode

from agentscope_runtime.common.tools.searches import (
ModelstudioSearch,
from agentscope_runtime.tools.searches import (
ModelstudioSearchLite,
SearchInput,
SearchOptions,
)

search_tool = ModelstudioSearch()


def search_tool_func(
messages: list[dict],
search_options: dict | None = None,
search_timeout: int | None = None,
type: str | None = None,
):
kwargs = {
"messages": messages,
"search_options": SearchOptions(**(search_options or {})),
}
if search_timeout is not None:
kwargs["search_timeout"] = search_timeout
if type is not None:
kwargs["type"] = type
result = search_tool.run(
SearchInput(**kwargs),
user_id=os.environ["MODELSTUDIO_USER_ID"],
)
return ModelstudioSearch.return_value_as_string(result)
from agentscope_runtime.adapters.langgraph.tool import LanggraphNodeAdapter
search_tool = ModelstudioSearchLite()


search_tool = StructuredTool.from_function(
func=search_tool_func,
name=search_tool.name,
description=search_tool.description,
tool_node = LanggraphNodeAdapter(
[
ModelstudioSearchLite(),
],
)

api_key = os.getenv("DASHSCOPE_API_KEY")

llm = ChatOpenAI(
model="qwen-turbo",
openai_api_key=os.environ["DASHSCOPE_API_KEY"],
openai_api_base="https://dashscope.aliyuncs.com/compatible-mode/v1",
).bind_tools([search_tool])
).bind_tools(tool_node.tool_schemas)


def should_continue(state: MessagesState):
Expand Down Expand Up @@ -229,6 +189,45 @@ final_state = app.invoke(
print(final_state["messages"][-1].content)
```

## AutoGen Integration Example

We use `AutogenToolAdapter` to convert our tool to AutogenTool:

```python
import asyncio
from agentscope_runtime.tools.searches import ModelstudioSearchLite
from agentscope_runtime.adapters.autogen.tool import AutogenToolAdapter
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.messages import TextMessage
from autogen_core import CancellationToken

async def main():
# Create the search tool
search_tool = ModelstudioSearchLite()

# Create the autogen tool adapter
search_tool = AutogenToolAdapter(search_tool)

# Create an agents with the search tool
model = OpenAIChatCompletionClient(model="gpt-4")
agents = AssistantAgent(
"assistant",
tools=[search_tool],
model_client=model,
)

# Use the agents
response = await agents.on_messages(
[TextMessage(content="What's the weather in Beijing?",
source="user")],
CancellationToken(),
)
print(response.chat_message)

asyncio.run(main())
```

## Using Tools inside Agents
1. **Configure credentials**: declare environment variables (DashScope keys, Alipay secrets, etc.) before running the agent process so tools can authenticate.
2. **Instantiate once**: create tool objects during agent initialization; reuse them instead of re-instantiating per call to keep connections warm.
Expand Down
122 changes: 60 additions & 62 deletions cookbook/zh/tools/tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ asyncio.run(main())

## AgentScope集成示例

我们使用`tool_adapter`将工具添加到AgentScope的`Toolkit`中:
我们使用`agentscope_tool_adapter`将工具添加到AgentScope的`Toolkit`中:

```python
import asyncio
Expand All @@ -87,42 +87,18 @@ from agentscope.tool import Toolkit
from agentscope.message import Msg

from agentscope_runtime.tools.searches import (
ModelstudioSearch,
ModelstudioSearchLite,
SearchInput,
SearchOptions,
)
from agentscope_runtime.adapters.agentscope.tool import tool_adapter

search_tool = ModelstudioSearch()


@tool_adapter(description=search_tool.description)
def modelstudio_search_tool(
messages: list[dict],
search_options: dict | None = None,
search_timeout: int | None = None,
_type: str | None = None,
):
payload_kwargs = {
"messages": messages,
"search_options": SearchOptions(**(search_options or {})),
}
if search_timeout is not None:
payload_kwargs["search_timeout"] = search_timeout
if _type is not None:
payload_kwargs["type"] = _type

payload = SearchInput(**payload_kwargs)
result = search_tool.run(
payload,
user_id=os.environ["MODELSTUDIO_USER_ID"],
)
return ModelstudioSearch.return_value_as_string(result)
from agentscope_runtime.adapters.agentscope.tool import agentscope_tool_adapter

search_tool = ModelstudioSearchLite()
search_tool = agentscope_tool_adapter(search_tool)

toolkit = Toolkit()

toolkit.register_tool_function(modelstudio_search_tool)
toolkit = Toolkit()
toolkit.tools[search_tool.name] = search_tool

agent = ReActAgent(
name="Friday",
Expand Down Expand Up @@ -150,7 +126,8 @@ if __name__ == "__main__":

## LangGraph 集成示例

如果要在 LangGraph 项目中沿用上一示例,只需把 Tool 包成 LangChain `StructuredTool`,绑定模型并接入 LangGraph 工作流。工具 Schema 直接来自 Tool 的输入模型,保证调用依旧类型安全。
如果要在 LangGraph 项目中沿用上一示例,只需用`LanggraphNodeAdapter`把 Tool 包成 Langgraph Node,
绑定模型并接入 LangGraph 工作流。工具 Schema 直接来自 Tool 的输入模型,保证调用依旧类型安全。

```python
import os
Expand All @@ -161,47 +138,28 @@ from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import START, END, MessagesState, StateGraph
from langgraph.prebuilt import ToolNode

from agentscope_runtime.common.tools.searches import (
ModelstudioSearch,
from agentscope_runtime.tools.searches import (
ModelstudioSearchLite,
SearchInput,
SearchOptions,
)

search_tool = ModelstudioSearch()


def search_tool_func(
messages: list[dict],
search_options: dict | None = None,
search_timeout: int | None = None,
type: str | None = None,
):
kwargs = {
"messages": messages,
"search_options": SearchOptions(**(search_options or {})),
}
if search_timeout is not None:
kwargs["search_timeout"] = search_timeout
if type is not None:
kwargs["type"] = type
result = search_tool.run(
SearchInput(**kwargs),
user_id=os.environ["MODELSTUDIO_USER_ID"],
)
return ModelstudioSearch.return_value_as_string(result)

from agentscope_runtime.adapters.langgraph.tool import LanggraphNodeAdapter
search_tool = ModelstudioSearchLite()

search_tool = StructuredTool.from_function(
func=search_tool_func,
name=search_tool.name,
description=search_tool.description,
tool_node = LanggraphNodeAdapter(
[
ModelstudioSearchLite(),
],
)

api_key = os.getenv("DASHSCOPE_API_KEY")

llm = ChatOpenAI(
model="qwen-turbo",
openai_api_key=os.environ["DASHSCOPE_API_KEY"],
openai_api_base="https://dashscope.aliyuncs.com/compatible-mode/v1",
).bind_tools([search_tool])
).bind_tools(tool_node.tool_schemas)


def should_continue(state: MessagesState):
Expand Down Expand Up @@ -229,6 +187,46 @@ final_state = app.invoke(
print(final_state["messages"][-1].content)
```

## AutoGen 集成示例

利用 `AutogenToolAdapter` 把 Tool 转换成AutogenTool

```python
import asyncio
from agentscope_runtime.tools.searches import ModelstudioSearchLite
from agentscope_runtime.adapters.autogen.tool import AutogenToolAdapter
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.messages import TextMessage
from autogen_core import CancellationToken

async def main():
# Create the search tool
search_tool = ModelstudioSearchLite()

# Create the autogen tool adapter
search_tool = AutogenToolAdapter(search_tool)

# Create an agents with the search tool
model = OpenAIChatCompletionClient(model="gpt-4")
agents = AssistantAgent(
"assistant",
tools=[search_tool],
model_client=model,
)

# Use the agents
response = await agents.on_messages(
[TextMessage(content="What's the weather in Beijing?",
source="user")],
CancellationToken(),
)
print(response.chat_message)

asyncio.run(main())
```


## 在 Agent 中使用 Tool 的操作建议
1. **配置凭证**:在启动 Agent 前准备好 DashScope Key、支付宝密钥等环境变量,以便 Tool 读取并完成鉴权。
2. **实例化一次**:在 Agent 初始化阶段创建 Tool 对象并复用,避免每次调用重新实例化导致连接冷启动。
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ dependencies = [
"shortuuid>=1.0.13",
"celery[redis]>=5.3.1",
"a2a-sdk>=0.3.0",
"jsonref",
"asgiref",
]

[tool.setuptools]
Expand Down
5 changes: 3 additions & 2 deletions src/agentscope_runtime/adapters/agentscope/tool/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
# -*- coding: utf-8 -*-
from .tool import tool_adapter
from .tool import agentscope_tool_adapter, agentscope_toolkit_adapter
from .sandbox_tool import sandbox_tool_adapter

__all__ = [
"tool_adapter",
"agentscope_tool_adapter",
"agentscope_toolkit_adapter",
"sandbox_tool_adapter",
]
Loading