From ad85c14ad2690fecf392789ae11dbee1783265dd Mon Sep 17 00:00:00 2001 From: lucaferrario Date: Wed, 13 Mar 2024 15:32:38 +0100 Subject: [PATCH 1/2] fix(openai): changed openai api response parsing --- keybert/llm/_openai.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keybert/llm/_openai.py b/keybert/llm/_openai.py index 5c8c078c..1cdf4ae8 100644 --- a/keybert/llm/_openai.py +++ b/keybert/llm/_openai.py @@ -178,7 +178,7 @@ def extract_keywords(self, documents: List[str], candidate_keywords: List[List[s response = chat_completions_with_backoff(self.client, **kwargs) else: response = self.client.chat.completions.create(**kwargs) - keywords = response.choices[0].message.content.strip() + keywords = response.choices[0].text.strip() # Use a non-chat model else: @@ -186,7 +186,7 @@ def extract_keywords(self, documents: List[str], candidate_keywords: List[List[s response = completions_with_backoff(self.client, model=self.model, prompt=prompt, **self.generator_kwargs) else: response = self.client.completions.create(model=self.model, prompt=prompt, **self.generator_kwargs) - keywords = response.choices[0].message.content.strip() + keywords = response.choices[0].text.strip() keywords = [keyword.strip() for keyword in keywords.split(",")] all_keywords.append(keywords) From 5132c35d2eb70318b295919c158545bec8fc20c9 Mon Sep 17 00:00:00 2001 From: lucaferrario Date: Fri, 15 Mar 2024 10:41:14 +0100 Subject: [PATCH 2/2] fix(openai): rolled back response parsing for chat model --- keybert/llm/_openai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keybert/llm/_openai.py b/keybert/llm/_openai.py index 1cdf4ae8..f5c0bdc0 100644 --- a/keybert/llm/_openai.py +++ b/keybert/llm/_openai.py @@ -178,7 +178,7 @@ def extract_keywords(self, documents: List[str], candidate_keywords: List[List[s response = chat_completions_with_backoff(self.client, **kwargs) else: response = self.client.chat.completions.create(**kwargs) - keywords = response.choices[0].text.strip() + keywords = response.choices[0].message.content.strip() # Use a non-chat model else: