From 083ee0a4c1c96ea26a3ce3c4eee1c47fef0e6b1d Mon Sep 17 00:00:00 2001 From: Ashpreet Bedi Date: Thu, 30 May 2024 22:12:03 -0700 Subject: [PATCH] v2.4.17 --- cookbook/llms/groq/finance.py | 7 +++---- phi/knowledge/llamaindex.py | 4 +++- phi/llm/groq/groq.py | 16 +++++++--------- phi/tools/pubmed.py | 2 +- pyproject.toml | 2 +- 5 files changed, 15 insertions(+), 16 deletions(-) diff --git a/cookbook/llms/groq/finance.py b/cookbook/llms/groq/finance.py index 77f3260e4..0b8066205 100644 --- a/cookbook/llms/groq/finance.py +++ b/cookbook/llms/groq/finance.py @@ -7,7 +7,6 @@ tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, stock_fundamentals=True, company_news=True)], show_tool_calls=True, ) -assistant.cli_app(markdown=True, stream=False, user="Groq") -# assistant.print_response("What's the NVDA stock price", markdown=True, stream=False) -# assistant.print_response("Share NVDA analyst recommendations", markdown=True, stream=False) -# assistant.print_response("Summarize fundamentals for TSLA", markdown=True, stream=False) +assistant.print_response("What's the NVDA stock price", markdown=True) +assistant.print_response("Share NVDA analyst recommendations", markdown=True) +assistant.print_response("Summarize fundamentals for TSLA", markdown=True) diff --git a/phi/knowledge/llamaindex.py b/phi/knowledge/llamaindex.py index f20e892fa..043019b36 100644 --- a/phi/knowledge/llamaindex.py +++ b/phi/knowledge/llamaindex.py @@ -8,7 +8,9 @@ from llama_index.core.schema import NodeWithScore from llama_index.core.retrievers import BaseRetriever except ImportError: - raise ImportError("The `llama-index-core` package is not installed. Please install it via `pip install llama-index-core`.") + raise ImportError( + "The `llama-index-core` package is not installed. Please install it via `pip install llama-index-core`." + ) class LlamaIndexKnowledgeBase(AssistantKnowledge): diff --git a/phi/llm/groq/groq.py b/phi/llm/groq/groq.py index d163381fa..d1020f28f 100644 --- a/phi/llm/groq/groq.py +++ b/phi/llm/groq/groq.py @@ -10,8 +10,6 @@ try: from groq import Groq as GroqClient - from groq.types.chat.chat_completion import ChatCompletion, ChoiceMessage - from groq.lib.chat_completion_chunk import ChatCompletionChunk, ChoiceDelta, ChoiceDeltaToolCall except ImportError: logger.error("`groq` not installed") raise @@ -148,14 +146,14 @@ def to_dict(self) -> Dict[str, Any]: _dict["tool_choice"] = self.tool_choice return _dict - def invoke(self, messages: List[Message]) -> ChatCompletion: + def invoke(self, messages: List[Message]) -> Any: return self.client.chat.completions.create( model=self.model, messages=[m.to_dict() for m in messages], # type: ignore **self.api_kwargs, ) - def invoke_stream(self, messages: List[Message]) -> Iterator[ChatCompletionChunk]: + def invoke_stream(self, messages: List[Message]) -> Iterator[Any]: yield from self.client.chat.completions.create( model=self.model, messages=[m.to_dict() for m in messages], # type: ignore @@ -171,14 +169,14 @@ def response(self, messages: List[Message]) -> str: response_timer = Timer() response_timer.start() - response: ChatCompletion = self.invoke(messages=messages) + response = self.invoke(messages=messages) response_timer.stop() logger.debug(f"Time to generate response: {response_timer.elapsed:.4f}s") # logger.debug(f"Groq response type: {type(response)}") # logger.debug(f"Groq response: {response}") # -*- Parse response - response_message: ChoiceMessage = response.choices[0].message + response_message = response.choices[0].message # -*- Create assistant message assistant_message = Message( @@ -248,18 +246,18 @@ def response_stream(self, messages: List[Message]) -> Iterator[str]: assistant_message_role = None assistant_message_content = "" - assistant_message_tool_calls: Optional[List[ChoiceDeltaToolCall]] = None + assistant_message_tool_calls: Optional[List[Any]] = None response_timer = Timer() response_timer.start() for response in self.invoke_stream(messages=messages): # logger.debug(f"Groq response type: {type(response)}") # logger.debug(f"Groq response: {response}") # -*- Parse response - response_delta: ChoiceDelta = response.choices[0].delta + response_delta = response.choices[0].delta if assistant_message_role is None and response_delta.role is not None: assistant_message_role = response_delta.role response_content: Optional[str] = response_delta.content - response_tool_calls: Optional[List[ChoiceDeltaToolCall]] = response_delta.tool_calls + response_tool_calls: Optional[List[Any]] = response_delta.tool_calls # -*- Return content if present, otherwise get tool call if response_content is not None: diff --git a/phi/tools/pubmed.py b/phi/tools/pubmed.py index 61e79d30c..0cab129c9 100644 --- a/phi/tools/pubmed.py +++ b/phi/tools/pubmed.py @@ -26,7 +26,7 @@ def fetch_pubmed_ids(self, query: str, max_results: int, email: str) -> List[str "email": email, "usehistory": "y", } - response = httpx.get(url, params=params) + response = httpx.get(url, params=params) # type: ignore root = ElementTree.fromstring(response.content) return [id_elem.text for id_elem in root.findall(".//Id") if id_elem.text is not None] diff --git a/pyproject.toml b/pyproject.toml index 0a2c8c0fd..1f99a3cd9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "phidata" -version = "2.4.16" +version = "2.4.17" description = "Memory, knowledge and tools for LLMs." requires-python = ">=3.7" readme = "README.md"