Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
ysolanky committed Sep 19, 2024
1 parent b8d47fd commit b724ec0
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 10 deletions.
11 changes: 11 additions & 0 deletions cookbook/providers/openai/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from phi.agent import Agent
from phi.model.openai import OpenAIChat
from phi.tools.duckduckgo import DuckDuckGo

agent = Agent(
model=OpenAIChat(model="gpt-4o"),
tools=[DuckDuckGo()],
show_tool_calls=True,
# debug_mode=True,
)
agent.print_response("Whats happening in France?", markdown=True, stream=True)
2 changes: 1 addition & 1 deletion cookbook/providers/openai/agent_stream_off.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@
model=OpenAIChat(model="gpt-4o"),
tools=[DuckDuckGo()],
show_tool_calls=True,
debug_mode=True,
# debug_mode=True,
)
agent.print_response("Whats happening in France?", markdown=True, stream=False)
27 changes: 18 additions & 9 deletions phi/model/anthropic/claude.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
from typing import Optional, List, Iterator, Dict, Any, Union
from typing import Optional, List, Iterator, Dict, Any, Union, cast

from phi.model.base import Model
from phi.model.message import Message
Expand Down Expand Up @@ -168,10 +168,19 @@ def response(self, messages: List[Message]) -> ModelResponse:
response_timer.stop()
logger.debug(f"Time to generate response: {response_timer.elapsed:.4f}s")

# -*- Parse response
response_content: TextBlock = response.content[0].text # type: ignore
# logger.debug(f"Response: {response}")
# logger.debug(f"Response content: {response.content[0]}")

# -*- Create assistant message
# -*- Parse response
response_content: str = ""
response_block: Union[TextBlock, ToolUseBlock] = response.content[0]
if isinstance(response_block, TextBlock):
response_content = response_block.text
elif isinstance(response_block, ToolUseBlock):
tool_block = cast(dict[str, Any], response_block.input)
response_content = tool_block.get("query", "")

# -*- Create agent message
agent_message = Message(
role=response.role or "assistant",
content=response_content,
Expand All @@ -197,7 +206,7 @@ def response(self, messages: List[Message]) -> ModelResponse:
"function": function_def,
}
)
agent_message.content = response.content # type: ignore
agent_message.content = response.content

if len(tool_calls) > 0:
agent_message.tool_calls = tool_calls
Expand Down Expand Up @@ -227,7 +236,7 @@ def response(self, messages: List[Message]) -> ModelResponse:
agent_message.metrics["total_tokens"] = input_tokens + output_tokens
self.metrics["total_tokens"] = self.metrics.get("total_tokens", 0) + input_tokens + output_tokens

# -*- Add assistant message to messages
# -*- Add agent message to messages
messages.append(agent_message)
agent_message.log()

Expand Down Expand Up @@ -330,12 +339,12 @@ def response_stream(self, messages: List[Message]) -> Iterator[ModelResponse]:
response_timer.stop()
logger.debug(f"Time to generate response: {response_timer.elapsed:.4f}s")

# -*- Create assistant message
# -*- Create agent message
agent_message = Message(
role="assistant",
content="",
)
agent_message.content = response_content # type: ignore
agent_message.content = response_content

if len(tool_calls) > 0:
agent_message.tool_calls = tool_calls
Expand Down Expand Up @@ -364,7 +373,7 @@ def response_stream(self, messages: List[Message]) -> Iterator[ModelResponse]:
agent_message.metrics["total_tokens"] = input_tokens + output_tokens
self.metrics["total_tokens"] = self.metrics.get("total_tokens", 0) + input_tokens + output_tokens

# -*- Add assistant message to messages
# -*- Add agent message to messages
messages.append(agent_message)
agent_message.log()

Expand Down

0 comments on commit b724ec0

Please sign in to comment.