Skip to content

Commit bf3ae09

Browse files
authored
Merge pull request #62 from daily-co/anthropic-support
Anthropic LLM service
2 parents 34ac796 + c91fa39 commit bf3ae09

File tree

2 files changed

+37
-1
lines changed

2 files changed

+37
-1
lines changed

pyproject.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "dailyai"
7-
version = "0.0.3"
7+
version = "0.0.3.1"
88
description = "An open source framework for real-time, multi-modal, conversational AI applications"
99
license = { text = "BSD 2-Clause License" }
1010
readme = "README.md"
@@ -21,6 +21,7 @@ classifiers = [
2121
]
2222
dependencies = [
2323
"aiohttp",
24+
"anthropic",
2425
"azure-cognitiveservices-speech",
2526
"daily-python",
2627
"fal",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import asyncio
2+
import os
3+
from typing import AsyncGenerator
4+
from anthropic import AsyncAnthropic
5+
from dailyai.pipeline.frames import Frame, LLMMessagesQueueFrame, TextFrame
6+
7+
from dailyai.services.ai_services import LLMService
8+
9+
10+
class AnthropicLLMService(LLMService):
11+
12+
def __init__(self, api_key, model="claude-3-opus-20240229", max_tokens=1024):
13+
super().__init__()
14+
self.client = AsyncAnthropic(api_key=api_key)
15+
self.model = model
16+
self.max_tokens = max_tokens
17+
18+
async def process_frame(self, frame: Frame) -> AsyncGenerator[Frame, None]:
19+
if not isinstance(frame, LLMMessagesQueueFrame):
20+
yield frame
21+
22+
stream = await self.client.messages.create(
23+
max_tokens=self.max_tokens,
24+
messages=[
25+
{
26+
"role": "user",
27+
"content": "Hello, Claude",
28+
}
29+
],
30+
model=self.model,
31+
stream=True,
32+
)
33+
async for event in stream:
34+
if event.type == "content_block_delta":
35+
yield TextFrame(event.delta.text)

0 commit comments

Comments
 (0)