Skip to content

Commit

Permalink
add test case for function tool, and add support for using component …
Browse files Browse the repository at this point in the history
…class method
  • Loading branch information
liyin2015 committed Dec 26, 2024
1 parent 50cfbf2 commit d0c8bc4
Show file tree
Hide file tree
Showing 10 changed files with 316 additions and 69 deletions.
31 changes: 16 additions & 15 deletions adalflow/adalflow/components/agent/react_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@


react_agent_task_desc = r"""{# role/task description #}
You are a helpful assistant.
Answer the user's query using the tools provided below with minimal steps and maximum accuracy.
{# REACT instructions #}
Each step you will read the previous Thought, Action, and Observation(execution result of the action) and then provide the next Thought and Action.
Expand Down Expand Up @@ -59,6 +58,8 @@
{{tool}}
------------------------
{% endfor %}
RULES:
- When the function is a class method and when class_instance exists, use <class_instance_value>.<func_name> to call instead (NOT the CLASS NAME)
<END_OF_TOOLS>
{% endif %}
{# Context Variables #}
Expand Down Expand Up @@ -422,15 +423,15 @@ def call(
printc(f"tool_manager: {self.tool_manager.training}", color="red")
if not isinstance(func, Parameter):
raise ValueError(f"Expected Parameter, but got {type(func)}: {func}")
printc(f"func: {func}", color="yellow")
# printc(f"func: {func}", color="yellow")
# replace the id
if isinstance(func, Parameter):
func.data.kwargs["id"] = id

func.add_successor_map_fn(self.tool_manager, lambda x: x.data)

result: Parameter = self.tool_manager(expr_or_fun=func, step="execute")
printc(f"result: {result}", color="red")
# printc(f"result: {result}", color="red")
result.add_successor_map_fn(
successor=tmp_action_str_to_step_output, map_fn=lambda x: x.data
)
Expand Down Expand Up @@ -470,12 +471,13 @@ def _run_one_step(
printc("start running one step", color="yellow")

prompt_kwargs["step_history"] = step_history
printc(
f"prompt_kwargs 1: {prompt_kwargs}, training: {self.planner.training}",
color="yellow",
)
# printc(
# f"prompt_kwargs 1: {prompt_kwargs}, training: {self.planner.training}",
# color="yellow",
# )

# prompt_str = self.planner.get_prompt(**prompt_kwargs)
prompt_str = self.planner.get_prompt(**prompt_kwargs)
printc(f"prompt_str: {prompt_str}", color="red")
# return [StepOutput(step=step, action=None, observation="test")]

log.debug(
Expand All @@ -495,7 +497,7 @@ def _run_one_step(

# connecting two generators in the computation graph, it will set up self.step_history
if isinstance(response, Parameter):
printc(f"response: {response}", color="yellow")
# printc(f"response: {response}", color="yellow")

step_output: Parameter = self._execute_action(step_output, response, id)

Expand All @@ -513,7 +515,7 @@ def _run_one_step(
successor=self.planner, map_fn=lambda x: x.data
)
# convert step history back to data
printc(f"step_history: {step_history.data}", color="yellow")
# printc(f"step_history: {step_history.data}", color="yellow")
return step_history

else:
Expand All @@ -535,9 +537,7 @@ def _check_last_step(
last_step: StepOutput = None
if isinstance(step_history, Parameter):
# try:
printc(f"step_history: {step_history}", color="yellow")
step_history_data = step_history.data
printc(f"step_history: {step_history}", color="yellow")
last_step = step_history_data[-1]

# except Exception as e:
Expand All @@ -560,15 +560,15 @@ def _get_answer(
last_step: StepOutput = None
if isinstance(step_history, Parameter):
try:
printc(f"step_history: {step_history}", color="yellow")
# printc(f"step_history: {step_history}", color="yellow")
return step_history

except Exception as e:
log.error(f"Error getting data from Parameter: {e}")
return None
else:
last_step = step_history[-1]
printc(f"last_step: {last_step}", color="yellow")
# printc(f"last_step: {last_step}", color="yellow")

return last_step.observation

Expand Down Expand Up @@ -639,11 +639,12 @@ def bicall(
# step_history.append(step_output)

answer = self._get_answer(step_history)
printc(f"answer: {answer}", color="yellow")
if self.training:
return answer
# wrap the output
output = ReActOutput(step_history=step_history, id=id, answer=answer)
# printc(f"output: {output}", color="yellow")

return output

def _extra_repr(self) -> str:
Expand Down
92 changes: 87 additions & 5 deletions adalflow/adalflow/core/func_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
"""

from typing import Any, Optional, Callable, Awaitable, Union
from inspect import iscoroutinefunction
from inspect import iscoroutinefunction, ismethod, isfunction
import inspect
import logging
import asyncio
import nest_asyncio
Expand Down Expand Up @@ -39,6 +40,21 @@ def is_running_in_event_loop() -> bool:
return False


def find_instance_name_from_self(instance):
"""
Attempt to find the variable name of the instance in the calling context.
:param instance: The instance to find the name for.
:return: The variable name of the instance, if found; otherwise, None.
"""
# Inspect the calling stack frame
frame = inspect.stack()[2].frame
for var_name, var_obj in frame.f_locals.items():
if var_obj is instance:
return var_name
return None


FunctionType = Union[Callable[..., Any], Awaitable[Callable[..., Any]]]


Expand All @@ -51,6 +67,32 @@ class FunctionTool(GradComponent):
Function be used by LLM as a tool to achieve a specific task.
What function can you pass as a tool?
1. Any unbound function you wrote outside of a class.
2. Any class method you wrote in your component. It can call `self` and other methods inside of your component.
3. When the function is using a trainable component, and you can directly use the component's method as a tool or wrap it in a function. But you need to make sure to pass the component to the tool.
Here are some examples:
.. code-block:: python
from adalflow.core.func_tool import FunctionTool
class AgenticRAG(GradComponent):
def __init__(self, ...):
super().__init__()
self.retriever = Retriever()
self.llm = Generator()
def retriever_as_tool(input: str) -> str:
r"Used as a retriever tool."
return self.retriever(input)
tools = [FunctionTool(retriever_as_tool, component=self.retriever),
FunctionTool(self.llm.__call__, component=self.llm)]
# if you have trainable component, this will ensure it can be trained together with your whole task pipeline
# if you dont want to train them and simply treating them as a tool, you can call like this
# tools = [FunctionTool(retriever_as_tool), FunctionTool(self.llm.__call__, component=self.llm)]
Features:
- Supports both synchronous and asynchronous functions via ``call`` and ``acall``.
- Creates a FunctionDefinition from the function using ``get_fun_schema``.
Expand Down Expand Up @@ -89,15 +131,55 @@ def __init__(
def is_async(self) -> bool:
return self._is_async

# def _create_fn_definition(self) -> FunctionDefinition:
# name = self.fn.__name__
# docstring = self.fn.__doc__
# description = f"{docstring}"
# description = f"{name}{signature(self.fn)}\n{docstring}"
# # description = f"{name}{signature(self.fn)}\n{docstring}"
# fn_parameters = get_fun_schema(name, self.fn)
# return FunctionDefinition(
# func_name=name, func_desc=description, func_parameters=fn_parameters
# )

def _create_fn_definition(self) -> FunctionDefinition:
name = self.fn.__name__
docstring = self.fn.__doc__
description = f"{docstring}"
description = f"{name}{signature(self.fn)}\n{docstring}"
# description = f"{name}{signature(self.fn)}\n{docstring}"
signature_str = str(signature(self.fn))

# Get the class that owns the method, if applicable
cls_name = None
# cls_docstring = None
instance = None
if ismethod(self.fn): # Check if it’s a bound method
instance = self.fn.__self__
instance = find_instance_name_from_self(instance)
if name == "__call__" and not instance:
raise ValueError(
"Please provide a name for the instance in the calling context"
)
cls_name = self.fn.__self__.__class__.__name__
# cls_docstring = getdoc(self.fn.__self__.__class__)
elif isfunction(self.fn): # Unbound method
cls_name = self.fn.__qualname__.split(".")[0]

# Build the description
description = f"{name}{signature_str}\n"
if cls_name:
description += f"Belongs to class: {cls_name}\n"
if docstring:
description += f"Method docstring: {docstring}\n"
# if cls_docstring:
# description += f"Class docstring: {cls_docstring}\n"

# Get function parameters schema
fn_parameters = get_fun_schema(name, self.fn)

return FunctionDefinition(
func_name=name, func_desc=description, func_parameters=fn_parameters
func_name=name,
func_desc=description,
func_parameters=fn_parameters,
class_instance=instance,
)

def forward(self, *args, **kwargs) -> Parameter:
Expand Down
67 changes: 45 additions & 22 deletions adalflow/adalflow/core/tool_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
overload,
Literal,
)
import warnings
import logging
from copy import deepcopy
import asyncio
Expand All @@ -29,6 +28,8 @@
Function,
FunctionExpression,
)
from adalflow.utils import printc


from adalflow.core.functional import (
parse_function_call_expr,
Expand Down Expand Up @@ -62,9 +63,6 @@ class ToolManager(GradComponent):
yaml and json definitions are for quick access to the definitions of the tools.
If you need more specification, such as using exclude field, you can use the function_definitions.
Args:
"""

def __init__(
Expand All @@ -76,7 +74,6 @@ def __init__(
):
super().__init__()
nest_asyncio.apply() # Apply nest_asyncio to handle nested loops
# super(LocalDB, self).__init__()
tools = [
(
FunctionTool(fn=deepcopy(tool))
Expand All @@ -86,20 +83,56 @@ def __init__(
for tool in tools
]
self.tools = ComponentList(tools)
self._context_map = {tool.definition.func_name: tool for tool in self.tools}
self._context_map = self.create_context_map_from_tools(self.tools)
self._additional_context = additional_context or {}
self.context = {**self._context_map, **self._additional_context}
log.info(
f"Initialized ToolManager with {len(self.tools)} tools and additional context {self._additional_context}"
)

@staticmethod
def get_context_index(tool: FunctionTool) -> Dict[str, object]:
index = tool.definition.func_name
if tool.definition.class_instance:
index = f"{tool.definition.class_instance}.{index}"
output = {index: tool}
if tool.definition.func_name == "__call__":
# add another index of directly using the classinstance
output[f"{tool.definition.class_instance}"] = tool
return output

@staticmethod
def create_context_map_from_tools(tools: List[FunctionTool]) -> Dict[str, object]:
output: Dict[str, object] = {}
for tool in tools:
tool_map = ToolManager.get_context_index(tool)
for k, v in tool_map.items():
if k in output:
raise ValueError(f"Duplicate key {k} in the context map.")
output[k] = v
return output

@property
def yaml_definitions(self) -> List[str]:
return [tool.definition.to_yaml() for tool in self.tools]
output = []
for tool in self.tools:
if not tool.definition.class_instance:
output.append(tool.definition.to_yaml(exclude=["class_instance"]))
else:
output.append(tool.definition.to_yaml())
output.append(tool.definition.to_yaml(exclude=["class_instance"]))
return output

@property
def json_definitions(self) -> List[str]:
return [tool.definition.to_json() for tool in self.tools]
output = []
for tool in self.tools:
if not tool.definition.class_instance:
output.append(tool.definition.to_json(exclude=["class_instance"]))
else:
output.append(tool.definition.to_json())
output.append(tool.definition.to_json(exclude=["class_instance"]))
return output

@property
def function_definitions(self) -> List[FunctionDefinition]:
Expand Down Expand Up @@ -214,17 +247,6 @@ def forward(
else:
return self.call(expr_or_fun=expr_or_fun, step=step)

# def forward(
# self, *, expr_or_fun: Parameter, step: Literal["execute"] = "execute"
# ) -> Parameter:
# if not isinstance(expr_or_fun, Parameter):
# expr_or_fun = expr_or_fun.data

# if isinstance(expr_or_fun, Function) and step == "execute":
# tool: FunctionTool = self.context[expr_or_fun.data.name]

# output = self.call(expr_or_fun, step=step)

def execute_func(
self, func: Union[Function, Parameter]
) -> Union[FunctionOutput, Parameter]:
Expand All @@ -248,14 +270,15 @@ def bicall(
context: Dict[str, object] = {},
):
if isinstance(func, Parameter):
printc(f"context: {context}", color="yellow")
tool: FunctionTool = context[func.data.name]
print(f"tool training: {tool.training}")
output = tool.forward(*func.data.args, **func.data.kwargs)
# handle the untainable function
if not isinstance(output, Parameter):
warnings.warn(
f"Error executing function: {output}", UserWarning
)
# warnings.info(
# f"Error executing function: {output}", UserWarning
# )
output = Parameter(
name=func.data.name,
data=output,
Expand Down
9 changes: 7 additions & 2 deletions adalflow/adalflow/core/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,8 +305,13 @@ class RetrieverOutput(DataClass):
@dataclass
class FunctionDefinition(DataClass):
__doc__ = r"""The data modeling of a function definition, including the name, description, and parameters."""

func_name: str = field(metadata={"desc": "The name of the tool"})
class_instance: Optional[Any] = field(
default=None,
metadata={"desc": "The instance of the class this function belongs to"},
)
func_name: str = field(
metadata={"desc": "The name of the tool"}, default=required_field
)
func_desc: Optional[str] = field(
default=None, metadata={"desc": "The description of the tool"}
)
Expand Down
2 changes: 2 additions & 0 deletions adalflow/adalflow/optim/grad_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ class GradComponent(Component):
The __call__ method will check if the component is in training mode,
and call the `forward` method to return a `Parameter` object if it is in training mode,
otherwise, it will call the `call` method to return the output such as "GeneratorOutput", "RetrieverOutput", etc.
Note: Avoid using the attributes and methods that are defined here and in the `Component` class unless you are overriding them.
"""
backward_engine: "BackwardEngine"
_component_type = "grad"
Expand Down
Loading

0 comments on commit d0c8bc4

Please sign in to comment.