Skip to content

Commit

Permalink
Adding a Streaming demo as well as the Regular one (#15)
Browse files Browse the repository at this point in the history
jrgleason authored Dec 1, 2024
1 parent 94e371e commit e8fcb6c
Showing 21 changed files with 865 additions and 342 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -40,3 +40,4 @@ app/src/main/resources/static/assets/*
app/src/main/resources/static/index.html
app/src/main/resources/static/fav*
.idea
.DS_Store
7 changes: 0 additions & 7 deletions app/build.gradle
Original file line number Diff line number Diff line change
@@ -39,17 +39,12 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-web'
implementation 'org.springframework.boot:spring-boot-starter-webflux'
implementation 'org.springframework.boot:spring-boot-starter-actuator'
implementation 'org.springframework.boot:spring-boot-starter-data-redis'

implementation 'org.springframework.boot:spring-boot-starter-data-redis'
implementation 'org.springframework.boot:spring-boot-starter-cache'

implementation 'org.springframework.boot:spring-boot-starter-integration'
implementation 'org.springframework.integration:spring-integration-mqtt:6.3.4'

implementation 'org.springframework.ai:spring-ai-openai-spring-boot-starter:1.0.0-M3'
implementation 'org.springframework.ai:spring-ai-anthropic-spring-boot-starter:1.0.0-M3'
// implementation 'org.springframework.ai:spring-ai-mongodb-atlas-store-spring-boot-starter:1.0.0-M3'
implementation 'org.springframework.ai:spring-ai-pinecone-store-spring-boot-starter:1.0.0-M3'

implementation 'org.springframework.boot:spring-boot-starter-websocket:3.3.5'
@@ -61,8 +56,6 @@ dependencies {
testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
testImplementation 'org.springframework.boot:spring-boot-starter-test'

// This dependency is used by the application.
implementation libs.guava
}

testing {
3 changes: 2 additions & 1 deletion app/src/main/java/org/example/App.java
Original file line number Diff line number Diff line change
@@ -5,8 +5,9 @@

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;

@SpringBootApplication
@SpringBootApplication(exclude = {RedisAutoConfiguration.class})
public class App {
public static void main(String[] args) {
SpringApplication.run(App.class, args);
12 changes: 6 additions & 6 deletions app/src/main/java/org/example/advisors/ReReadingAdvisor.java
Original file line number Diff line number Diff line change
@@ -13,12 +13,12 @@ private AdvisedRequest before(AdvisedRequest advisedRequest) {
advisedUserParams.put("re2_input_query", advisedRequest.userText());

return AdvisedRequest.from(advisedRequest)
.withUserText("""
{re2_input_query}
Read the question again: {re2_input_query}
""")
.withUserParams(advisedUserParams)
.build();
.withUserText("""
{re2_input_query}
Read the question again: {re2_input_query}
""")
.withUserParams(advisedUserParams)
.build();
}

@Override
6 changes: 3 additions & 3 deletions app/src/main/java/org/example/config/AuthConfig.java
Original file line number Diff line number Diff line change
@@ -13,9 +13,9 @@ public SecurityFilterChain filterChain(
HttpSecurity http
) throws Exception {
http.csrf(AbstractHttpConfigurer::disable)
.authorizeHttpRequests((authz) -> authz
.anyRequest().permitAll()
);
.authorizeHttpRequests((authz) -> authz
.anyRequest().permitAll()
);
return http.build();
}
}
Original file line number Diff line number Diff line change
@@ -18,17 +18,17 @@ public AnthropicController(
this.chatClient = chatClient;
}

@GetMapping("/anthropic")
@GetMapping
public ResponseEntity<String> questionAnthropic(
@RequestParam(
value = "message",
defaultValue = "How to analyze time-series data with Python and MongoDB?"
) String message
) {
String responseContent = chatClient.prompt()
.user(message)
.call()
.content();
.user(message)
.call()
.content();
return ResponseEntity.ok(responseContent);
}
}
29 changes: 19 additions & 10 deletions app/src/main/java/org/example/controller/OpenAIController.java
Original file line number Diff line number Diff line change
@@ -15,6 +15,7 @@
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
import reactor.core.publisher.Flux;

@RestController
@RequestMapping("openai")
@@ -41,13 +42,21 @@ public ResponseEntity<String> question(
) String message
) {
String responseContent = chatClient.prompt()
.user(message)
.call()
.content();
.user(message)
.call()
.content();
return ResponseEntity.ok(responseContent);
}

@GetMapping(value = "/stream", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE)
@GetMapping("/stream")
public Flux<String> chatWithStream(@RequestParam String message) {
return chatClient.prompt()
.user(message)
.stream()
.content();
}

@GetMapping(value = "/audio", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE)
public ResponseEntity<StreamingResponseBody> streamAudio(
@RequestParam(value = "message", defaultValue = "Today is a wonderful day!") String message) {

@@ -59,17 +68,17 @@ public ResponseEntity<StreamingResponseBody> streamAudio(
};

return ResponseEntity.ok()
.contentType(MediaType.parseMediaType("audio/mp3"))
.body(responseBody);
.contentType(MediaType.parseMediaType("audio/mp3"))
.body(responseBody);
}

@GetMapping("/image")
public String generate(@RequestParam(value = "message") String message) {
ImageOptions options = ImageOptionsBuilder.builder()
.withModel(OpenAiImageApi.ImageModel.DALL_E_3.getValue())
.withHeight(1024)
.withWidth(1024)
.build();
.withModel(OpenAiImageApi.ImageModel.DALL_E_3.getValue())
.withHeight(1024)
.withWidth(1024)
.build();

ImagePrompt imagePrompt = new ImagePrompt(message, options);
ImageResponse response = imageModel.call(imagePrompt);
8 changes: 0 additions & 8 deletions gradle/libs.versions.toml

This file was deleted.

2 changes: 1 addition & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.11-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
38 changes: 19 additions & 19 deletions ui/index.html
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" content="initial-scale=1, width=device-width" name="viewport"/>
<meta content="width=device-width, initial-scale=1" name="viewport"/>
<meta content="The main webpage of Jackie Gleason" name="description">
<meta content="Software Engineer" name="keywords">
<meta content="Jackie Gleason" name="author">
<title>THE Jackie Gleason</title>
<link href="/favicon.ico" rel="icon"/>
<meta content="width=device-width, initial-scale=1" name="viewport"/>
<meta content="#000000" name="theme-color"/>
<meta content="Web site created using create-react-app" name="description"/>
<link href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700&display=swap" rel="stylesheet"/>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet"/>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<script src="/src/index.jsx" type="module"></script>
</body>
<head>
<meta charset="utf-8" content="initial-scale=1, width=device-width" name="viewport"/>
<meta content="width=device-width, initial-scale=1" name="viewport"/>
<meta content="The main webpage of Jackie Gleason" name="description">
<meta content="Software Engineer" name="keywords">
<meta content="Jackie Gleason" name="author">
<title>THE Jackie Gleason</title>
<link href="/favicon.ico" rel="icon"/>
<meta content="width=device-width, initial-scale=1" name="viewport"/>
<meta content="#000000" name="theme-color"/>
<meta content="Web site created using create-react-app" name="description"/>
<link href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700&display=swap" rel="stylesheet"/>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet"/>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<script src="/src/index.jsx" type="module"></script>
</body>
</html>
182 changes: 181 additions & 1 deletion ui/package-lock.json
10 changes: 7 additions & 3 deletions ui/package.json
Original file line number Diff line number Diff line change
@@ -12,17 +12,21 @@
"license": "ISC",
"description": "",
"dependencies": {
"@emotion/react": "latest",
"@emotion/styled": "latest",
"@monaco-editor/react": "latest",
"@mui/material": "latest",
"@vitejs/plugin-react": "latest",
"@xstate/react": "latest",
"lucide-react": "latest",
"react": "latest",
"react-dom": "latest",
"uuid": "latest",
"vite": "latest",
"@mui/material": "latest",
"@emotion/react": "latest",
"@emotion/styled": "latest"
"xstate": "latest"
},
"devDependencies": {
"@rollup/plugin-commonjs": "latest",
"autoprefixer": "latest",
"postcss": "latest",
"tailwindcss": "latest"
101 changes: 90 additions & 11 deletions ui/src/App.jsx
Original file line number Diff line number Diff line change
@@ -1,19 +1,98 @@
import React from 'react';
import React, { useState } from 'react';
import ChatInterface from './components/ChatInterface.jsx';
import DeviceStateComponent from "./components/DeviceStateComponent.jsx";
import { DocumentGrid } from './components/DocumentGrid.jsx';
import { AddDocumentModal } from './components/AddDocumentModal.jsx';
import { StateProvider } from "./state/StateProvider.jsx";
import { AppBar, Box, Button, Tab, Tabs, Toolbar, Typography } from '@mui/material';
import { FileText, MessageSquare, Upload } from 'lucide-react';

function TabPanel(props) {
const { children, value, index, ...other } = props;

return (
<div
role="tabpanel"
hidden={value !== index}
id={`simple-tabpanel-${index}`}
aria-labelledby={`simple-tab-${index}`}
{...other}
>
{value === index && (
<Box sx={{ p: 3 }}>
{children}
</Box>
)}
</div>
);
}

function App() {
console.log("Starting");
const [isAddDocumentOpen, setIsAddDocumentOpen] = useState(false);
const [tabValue, setTabValue] = useState(0);

const handleChange = (event, newValue) => {
setTabValue(newValue);
};

return (
<div className="min-h-screen bg-gray-100 py-8">
<div className="container mx-auto">
{/*<DeviceStateComponent></DeviceStateComponent>*/}
<h1 className="text-3xl font-bold text-center mb-8 text-gray-800">
Chat Interface
</h1>
<ChatInterface/>
<StateProvider>
<div className="min-h-screen bg-gray-100">
<AppBar position="fixed">
<Toolbar>
<Typography variant="h6" component="div" sx={{ flexGrow: 1 }}>
Chat Interface
</Typography>
</Toolbar>
</AppBar>
<div className="container mx-auto" style={{ paddingTop: '64px' }}>
<Box sx={{ width: '100%' }}>
<Box sx={{ borderBottom: 1, borderColor: 'divider' }}>
<Tabs
value={tabValue}
onChange={handleChange}
centered
aria-label="chat and documents tabs"
>
<Tab
icon={<MessageSquare className="w-4 h-4" />}
label="Chat"
iconPosition="start"
/>
<Tab
icon={<FileText className="w-4 h-4" />}
label="Documents"
iconPosition="start"
/>
</Tabs>
</Box>

<TabPanel value={tabValue} index={0}>
<ChatInterface />
</TabPanel>

<TabPanel value={tabValue} index={1}>
<div className="space-y-4">
<div className="flex justify-end mb-4">
<Button
variant="contained"
color="primary"
startIcon={<Upload className="w-4 h-4" />}
onClick={() => setIsAddDocumentOpen(true)}
>
Upload Document
</Button>
</div>
<DocumentGrid />
<AddDocumentModal
isOpen={isAddDocumentOpen}
onClose={() => setIsAddDocumentOpen(false)}
/>
</div>
</TabPanel>
</Box>
</div>
</div>
</div>
</StateProvider>
);
}

105 changes: 40 additions & 65 deletions ui/src/components/AudioController.jsx
Original file line number Diff line number Diff line change
@@ -1,111 +1,86 @@
import React, {useEffect, useRef, useState} from 'react';
import {Pause, Play, Volume2, VolumeX} from 'lucide-react';
import {useStateContext} from "../state/StateProvider.jsx";

const AudioController = ({audio}) => {
const AudioController = () => {
const [isPlaying, setIsPlaying] = useState(false);
const [isMuted, setIsMuted] = useState(false);
const [progress, setProgress] = useState(0);
const progressRef = useRef(null);

const {state, send} = useStateContext();

// Watch for audio becoming available
useEffect(() => {
console.log("Audio state changed:", state.context.audio);
if (state.context.audio) {
state.context.audio.play().catch(error => {
console.error('Error auto-playing audio:', error);
setIsPlaying(false);
});
}
}, [state.context.audio]);

// Handle audio event listeners
useEffect(() => {
const audio = state.context.audio;
if (!audio) return;

const updateProgress = () => setProgress((audio.currentTime / audio.duration) * 100);

const handlePlay = () => setIsPlaying(true);
const handlePause = () => setIsPlaying(false);
const handleEnded = () => {
setIsPlaying(false);
setProgress(0);
};

const handleTimeUpdate = () => {
const percentage = (audio.currentTime / audio.duration) * 100;
setProgress(percentage);
};

// Auto-play when audio is ready
const handleCanPlay = () => {
audio.play().catch(console.error);
};

audio.addEventListener('play', handlePlay);
audio.addEventListener('pause', handlePause);
audio.addEventListener('ended', handleEnded);
audio.addEventListener('timeupdate', handleTimeUpdate);
audio.addEventListener('canplay', handleCanPlay);
audio.addEventListener('timeupdate', updateProgress);

return () => {
audio.removeEventListener('play', handlePlay);
audio.removeEventListener('pause', handlePause);
audio.removeEventListener('ended', handleEnded);
audio.removeEventListener('timeupdate', handleTimeUpdate);
audio.removeEventListener('canplay', handleCanPlay);
audio.removeEventListener('timeupdate', updateProgress);
};
}, [audio]);
}, [state.context.audio]);

const togglePlay = () => {
if (!audio) return;

if (isPlaying) {
audio.pause();
} else {
audio.play().catch(console.error);
}
const audio = state.context.audio;
if (isPlaying) audio.pause();
else audio.play().catch(console.error);
};

const toggleMute = () => {
if (!audio) return;

const audio = state.context.audio;
audio.muted = !audio.muted;
setIsMuted(!isMuted);
};

const handleProgressClick = (e) => {
if (!audio || !progressRef.current) return;

const audio = state.context.audio;
const rect = progressRef.current.getBoundingClientRect();
const clickPosition = e.clientX - rect.left;
const percentage = (clickPosition / rect.width) * 100;
const newTime = (percentage / 100) * audio.duration;

const newTime = ((e.clientX - rect.left) / rect.width) * audio.duration;
audio.currentTime = newTime;
setProgress(percentage);
setProgress((newTime / audio.duration) * 100);
};

if (!audio) return null;

return (
<div className="flex items-center gap-2">
<button
onClick={togglePlay}
className="p-1 rounded-full hover:bg-gray-200 transition-colors"
aria-label={isPlaying ? 'Pause' : 'Play'}
>
{isPlaying ?
<Pause size={16} className="text-gray-700"/> :
<Play size={16} className="text-gray-700"/>
}
<div className="flex items-center gap-2 max-w-md">
<button onClick={togglePlay} className="p-1 rounded-full hover:bg-gray-200 transition-colors"
aria-label={isPlaying ? 'Pause' : 'Play'}>
{isPlaying ? <Pause size={16} className="text-gray-700"/> : <Play size={16} className="text-gray-700"/>}
</button>

<button
onClick={toggleMute}
className="p-1 rounded-full hover:bg-gray-200 transition-colors"
aria-label={isMuted ? 'Unmute' : 'Mute'}
>
{isMuted ?
<VolumeX size={16} className="text-gray-700"/> :
<Volume2 size={16} className="text-gray-700"/>
}
<button onClick={toggleMute} className="p-1 rounded-full hover:bg-gray-200 transition-colors"
aria-label={isMuted ? 'Unmute' : 'Mute'}>
{isMuted ? <VolumeX size={16} className="text-gray-700"/> :
<Volume2 size={16} className="text-gray-700"/>}
</button>

<div
ref={progressRef}
className="flex-1 h-1 bg-gray-200 rounded-full cursor-pointer"
onClick={handleProgressClick}
>
<div
className="h-full bg-blue-500 rounded-full"
style={{width: `${progress}%`}}
/>
<div ref={progressRef} className="flex-1 h-1 bg-gray-200 rounded-full cursor-pointer relative"
onClick={handleProgressClick}>
<div className="absolute top-0 left-0 h-full bg-blue-500 rounded-full" style={{width: `${progress}%`}}/>
</div>
</div>
);
177 changes: 100 additions & 77 deletions ui/src/components/ChatInterface.jsx
Original file line number Diff line number Diff line change
@@ -1,95 +1,118 @@
import React, {useState} from 'react';
import {LoadingSpinner} from './LoadingSpinner';
import {MessageBubble} from './MessageBubble';
import {ChatInput} from './ChatInput';
import {ModeToggle} from './ModeToggle';
import {useChat} from '../hooks/useChat';
import {AddDocumentModal} from "./AddDocumentModal.jsx";
import {Plus} from "lucide-react";
import {DocumentGrid} from "./DocumentGrid.jsx";
import React, { useState } from 'react';
import { LoadingSpinner } from './LoadingSpinner';
import { MessageBubble } from './MessageBubble';
import { ChatInput } from './ChatInput';
import { ModeToggle } from './ModeToggle';
import { useChat } from '../hooks/useChat';
import { useStateContext } from "../state/StateProvider.jsx";
import AudioController from './AudioController';
import Container from '@mui/material/Container';
import Card from '@mui/material/Card';
import CardContent from '@mui/material/CardContent';
import Stack from '@mui/material/Stack';
import Box from '@mui/material/Box';
import Typography from '@mui/material/Typography';
import Switch from '@mui/material/Switch';
import FormControlLabel from '@mui/material/FormControlLabel';
import Paper from '@mui/material/Paper';

const ChatInterface = () => {
const { state, send } = useStateContext();
const [message, setMessage] = useState('');
const [mode, setMode] = useState('openai-chat');
const [isAddDocumentOpen, setIsAddDocumentOpen] = useState(false);
const {messages, isLoading, sendMessage, audioElements} = useChat(); // Added audioElements
const [isStreaming, setIsStreaming] = useState(false);
const { isLoading, sendMessage, streamMessage } = useChat();

const handleSubmit = async (e) => {
e.preventDefault();
const success = await sendMessage(message, mode);
if (success) {
setMessage('');
if (isStreaming) {
await streamMessage(message, send);
} else {
await sendMessage(message, mode, send);
}
setMessage('');
};

// Helper function to automatically scroll to bottom
const scrollToBottom = (behavior = 'smooth') => {
window.scrollTo({
top: document.documentElement.scrollHeight,
behavior
});
};

// Scroll to bottom whenever messages change
React.useEffect(() => {
scrollToBottom();
}, [messages]);
const EmptyState = () => (
<Box className="text-center mt-4">
<Typography variant="body2" className="text-gray-400 italic">
{mode === 'openai-image'
? 'Describe an image to generate...'
: 'Start a conversation...'}
</Typography>
</Box>
);

return (
<div className="max-w-2xl mx-auto p-4 space-y-4">
<div className="flex justify-between items-center">
<ModeToggle mode={mode} setMode={setMode}/>
<button
onClick={() => setIsAddDocumentOpen(true)}
className="flex items-center gap-2 px-4 py-2 bg-green-500 text-white rounded-lg hover:bg-green-600"
>
<Plus size={16}/>
Add Document
</button>
</div>

<div className="rounded-lg bg-white p-4 shadow-md h-[600px] flex flex-col">
<div className="flex-1 overflow-y-auto space-y-2 mb-4 scroll-smooth">
{messages.length === 0 ? (
<div className="text-gray-400 italic text-center mt-4">
{mode === 'openai-image'
? 'Describe an image to generate...'
: 'Start a conversation...'}
</div>
) : (
<div className="space-y-4">
{messages.map((msg, index) => (
<MessageBubble
key={index}
message={msg}
audio={msg.hasAudio ? audioElements[msg.messageId] : null}
<Container maxWidth="md" className="py-4">
<Stack spacing={2}>
{/* Controls Bar */}
<Paper className="p-4">
<Stack direction="row" spacing={4} alignItems="center">
{!isStreaming && <ModeToggle mode={mode} setMode={setMode} />}
<FormControlLabel
control={
<Switch
checked={isStreaming}
onChange={() => setIsStreaming(!isStreaming)}
inputProps={{ 'aria-label': 'Use streaming' }}
/>
))}
{isLoading && (
<div className="flex justify-center">
<LoadingSpinner/>
</div>
)}
</div>
)}
</div>

<ChatInput
message={message}
setMessage={setMessage}
isLoading={isLoading}
onSubmit={handleSubmit}
mode={mode}
/>
</div>
}
label={
<Typography variant="body2" className="text-gray-600">
Streaming
</Typography>
}
/>
</Stack>
</Paper>

<DocumentGrid/>
{/* Chat Container */}
<Card>
<CardContent className="h-[600px] p-0">
<Stack className="h-full">
{/* Messages Area */}
<Box className="flex-1 overflow-y-auto p-4">
{Object.keys(state.context.messages).length === 0 ? (
<EmptyState />
) : (
<Stack spacing={2}>
{Object.values(state.context.messages).map((msg, index) => (
<MessageBubble
key={index}
message={{
...msg,
showAudioController: false
}}
/>
))}
{isLoading && (
<Box className="flex justify-center">
<LoadingSpinner />
</Box>
)}
</Stack>
)}
</Box>

<AddDocumentModal
isOpen={isAddDocumentOpen}
onClose={() => setIsAddDocumentOpen(false)}
/>
</div>
{/* Input Area */}
<Paper className="p-4 border-t" elevation={0}>
<Stack spacing={2}>
<ChatInput
message={message}
setMessage={setMessage}
isLoading={isLoading}
onSubmit={handleSubmit}
mode={mode}
/>
<AudioController />
</Stack>
</Paper>
</Stack>
</CardContent>
</Card>
</Stack>
</Container>
);
};

9 changes: 5 additions & 4 deletions ui/src/components/DeviceStateComponent.jsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import React, { useEffect, useState, useCallback } from 'react';
import { Card, CardContent, CardHeader, Typography, Button } from '@mui/material';
import { Alert, AlertTitle } from '@mui/material';
import React, {useCallback, useEffect, useState} from 'react';
import {Alert, AlertTitle, Button, Card, CardContent, CardHeader} from '@mui/material';

const DeviceStateComponent = () => {
const [deviceStates, setDeviceStates] = useState({});
const [connectionStatus, setConnectionStatus] = useState('disconnected');
@@ -100,7 +100,8 @@ const DeviceStateComponent = () => {
{Object.entries(deviceStates).map(([device, state]) => (
<div key={device} className="flex items-center justify-between p-2 border rounded">
<span className="font-medium">{device}</span>
<span className={`px-2 py-1 rounded ${state ? 'bg-green-100 text-green-800' : 'bg-red-100 text-red-800'}`}>
<span
className={`px-2 py-1 rounded ${state ? 'bg-green-100 text-green-800' : 'bg-red-100 text-red-800'}`}>
{state ? 'On' : 'Off'}
</span>
</div>
80 changes: 53 additions & 27 deletions ui/src/components/MessageBubble.jsx
Original file line number Diff line number Diff line change
@@ -1,12 +1,20 @@
import React from 'react';
import {Bot, ImageIcon, Sparkles} from 'lucide-react';
import AudioController from './AudioController';
import React, { useState, useEffect } from 'react';
import { Bot, ImageIcon, Sparkles } from 'lucide-react';
import { LoadingSpinner } from './LoadingSpinner';

export const MessageBubble = ({message, audio}) => {
export const MessageBubble = ({ message }) => {
const [isLoading, setIsLoading] = useState(true);
const isUser = message.type === 'user';
const isError = message.type === 'error';
const isImage = message.mode === 'openai-image' && message.type === 'ai';

useEffect(() => {
// If it's an AI message and content changes from empty to having content
if (message.type === 'ai' && message.content) {
setIsLoading(false);
}
}, [message.content, message.type]);

const isBase64 = (str) => {
try {
return str.startsWith('data:image') || btoa(atob(str)) === str;
@@ -41,6 +49,46 @@ export const MessageBubble = ({message, audio}) => {
}
};

const renderContent = () => {
if (isImage && message.type === 'ai') {
if (!message.content) {
return (
<div className="flex justify-center w-full">
<LoadingSpinner size="small" />
</div>
);
}

return (
<img
src={isBase64(message.content)
? `data:image/png;base64,${message.content}`
: message.content}
alt="Generated"
className="rounded-lg max-w-full h-auto"
onError={(e) => {
e.target.onerror = null;
e.target.src = 'data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M12 4v16m-8-8h16"/></svg>';
e.target.classList.add('bg-gray-200', 'p-4');
e.target.nextSibling.textContent = 'Error loading image';
}}
/>
);
}

return (
<div className="whitespace-pre-wrap min-h-[24px] min-w-[24px] flex items-center">
{!isUser && isLoading ? (
<div className="flex justify-center w-full">
<LoadingSpinner size="small" />
</div>
) : (
message.content
)}
</div>
);
};

return (
<div className={`flex ${isUser ? 'justify-end' : 'justify-start'} mb-4`}>
<div
@@ -59,31 +107,9 @@ export const MessageBubble = ({message, audio}) => {
</div>
)}

{isImage && message.type === 'ai' ? (
<img
src={isBase64(message.content)
? `data:image/png;base64,${message.content}`
: message.content}
alt="Generated"
className="rounded-lg max-w-full h-auto"
onError={(e) => {
e.target.onerror = null;
e.target.src = 'data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M12 4v16m-8-8h16"/></svg>';
e.target.classList.add('bg-gray-200', 'p-4');
e.target.nextSibling.textContent = 'Error loading image';
}}
/>
) : (
<div className="whitespace-pre-wrap">{message.content}</div>
)}
{renderContent()}

<div className="flex flex-col gap-2">
{!isUser && message.hasAudio && audio && (
<div className="mt-2 mb-1">
<AudioController audio={audio}/>
</div>
)}

<div className={`text-xs ${
isUser ? 'text-blue-100' : 'text-gray-500'
}`}>
177 changes: 82 additions & 95 deletions ui/src/hooks/useChat.js
Original file line number Diff line number Diff line change
@@ -1,125 +1,112 @@
import {useState} from 'react';
import {v4 as uuidv4} from 'uuid';

export const useChat = () => {
const [messages, setMessages] = useState([]);
const [isLoading, setIsLoading] = useState(false);
const [audioElements, setAudioElements] = useState({});

const getEndpoint = (mode) => {
switch (mode) {
case 'openai-chat':
return '/openai';
case 'openai-image':
return '/openai/image';
case 'anthropic':
return '/anthropic/anthropic';
default:
return '/openai';
const endpoints = {
'openai-chat': '/openai',
'openai-image': '/openai/image',
'anthropic': '/anthropic'
};
return endpoints[mode] || '/openai';
};

const generateAudioForMessage = async (messageId, text, send) => {
try {
const audioResponse = await fetch(`/openai/audio?message=${encodeURIComponent(text)}`, {
method: 'GET'
});
if (!audioResponse.ok) throw new Error('Audio stream response was not ok');

await send({
type: 'PLAYBACK',
audioResponse,
responseId: messageId
});
} catch (error) {
console.error('Audio generation error:', error);
}
};

const sendMessage = async (message, mode) => {
const sendMessage = async (message, mode, send) => {
if (!message.trim()) return;

const messageId = Date.now().toString();
const userMessage = {type: 'user', content: message, timestamp: new Date(), mode};
setMessages(msgs => [...msgs, userMessage]);
setIsLoading(true);
const messageId = uuidv4();
await send({
type: 'ASK',
message,
speaker: "user",
responder: "ai",
responseId: messageId,
mode
});

try {
// First get the text response
const endpoint = getEndpoint(mode);
const textResponse = await fetch(
`${endpoint}?message=${encodeURIComponent(message)}`,
{
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
}
);

const textResponse = await fetch(`${endpoint}?message=${encodeURIComponent(message)}`, {
method: 'GET',
headers: {'Content-Type': 'application/json'}
});
const responseText = await textResponse.text();

// Create and set up audio element
const audio = new Audio();
await send({type: 'STREAM', chunk: responseText, responseId: messageId});
await send({type: 'COMPLETE'});

// Now use that response text for the audio stream
const audioResponse = await fetch(
`/openai/stream?message=${encodeURIComponent(responseText)}`,
{method: 'GET'}
);
await generateAudioForMessage(messageId, responseText, send);
return true;

if (!audioResponse.ok) {
throw new Error('Audio stream response was not ok');
}
} catch (error) {
console.error('Error:', error);
await send({type: 'STREAM_ERROR', error: 'Failed to get response', responseId: messageId});
return false;
} finally {
setIsLoading(false);
}
};

const mediaSource = new MediaSource();
audio.src = URL.createObjectURL(mediaSource);

mediaSource.addEventListener('sourceopen', async () => {
const sourceBuffer = mediaSource.addSourceBuffer('audio/mpeg');
const reader = audioResponse.body.getReader();
const chunks = [];

while (true) {
const {done, value} = await reader.read();
if (done) break;
chunks.push(value);
}

for (const chunk of chunks) {
await new Promise((resolve) => {
if (!sourceBuffer.updating) {
sourceBuffer.appendBuffer(chunk);
sourceBuffer.addEventListener('updateend', resolve, {once: true});
} else {
sourceBuffer.addEventListener('updateend', () => {
sourceBuffer.appendBuffer(chunk);
sourceBuffer.addEventListener('updateend', resolve, {once: true});
}, {once: true});
}
});
}

mediaSource.endOfStream();
});
const streamMessage = async (message, send) => {
if (!message.trim()) return;

const messageId = uuidv4();
let completeMessage = '';

// Store the audio element
setAudioElements(prev => ({
...prev,
[messageId]: audio
}));

// Add AI message
setMessages(msgs => [...msgs, {
type: 'ai',
content: responseText,
timestamp: new Date(),
mode,
messageId,
hasAudio: true
}]);
await send({type: 'ASK', message, speaker: "user", responder: "ai", responseId: messageId});

try {
const response = await fetch(`/openai/stream?message=${encodeURIComponent(message)}`, {
method: 'GET',
headers: {'Content-Type': 'application/json'}
});
const reader = response.body.getReader();
const decoder = new TextDecoder("utf-8");

while (true) {
const {done, value} = await reader.read();
if (done) break;
const chunk = decoder.decode(value, {stream: true});
completeMessage += chunk;
await send({
type: 'STREAM',
chunk,
responseId: messageId
});
}

await send({type: 'COMPLETE'});
if(message.mode !== 'openai-image'){
await generateAudioForMessage(messageId, completeMessage, send);
}
return true;

} catch (error) {
console.error('Audio Error:', error);
setMessages(msgs => [...msgs, {
type: 'error',
content: 'Error: Failed to get response',
timestamp: new Date(),
mode
}]);
await send({type: 'STREAM_ERROR'});
return false;
} finally {
setIsLoading(false);
}
};

return {
messages,
isLoading,
sendMessage,
audioElements
};
return {isLoading, sendMessage, streamMessage};
};
225 changes: 225 additions & 0 deletions ui/src/state/StateMachine.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,225 @@
import {assign, createMachine, fromPromise} from "xstate";
import {v4 as uuidv4} from 'uuid';

const handleAudioPlayback = fromPromise(async ({ input, context }) => {
const audio = new Audio();
const mediaSource = new MediaSource();

// Create object URL and set as audio source
audio.src = URL.createObjectURL(mediaSource);

return new Promise((resolve, reject) => {
mediaSource.addEventListener('sourceopen', async () => {
try {
console.log("Media source opened");
// Get the audio stream
const response = input.audioResponse;
const reader = response.body.getReader();

// Try to determine content type from response headers
const contentType = response.headers.get('content-type');
let sourceBuffer;

// Fallback MIME types if content-type header is missing or unsupported
const mimeTypes = [
contentType,
'audio/mpeg',
'audio/mp4',
'audio/aac',
'audio/webm',
'audio/webm; codecs=opus'
].filter(Boolean); // Remove null/undefined entries

// Try each MIME type until we find one that works
for (const mimeType of mimeTypes) {
if (MediaSource.isTypeSupported(mimeType)) {
try {
sourceBuffer = mediaSource.addSourceBuffer(mimeType);
break;
} catch (e) {
console.warn(`Failed to create source buffer for ${mimeType}:`, e);
}
}
}

if (!sourceBuffer) {
throw new Error('No supported audio format found');
}

// Function to safely append buffer
const appendBuffer = async (chunk) => {
return new Promise((resolveAppend) => {
if (!sourceBuffer.updating) {
sourceBuffer.appendBuffer(chunk);
sourceBuffer.addEventListener('updateend', resolveAppend, { once: true });
} else {
sourceBuffer.addEventListener('updateend', () => {
sourceBuffer.appendBuffer(chunk);
sourceBuffer.addEventListener('updateend', resolveAppend, { once: true });
}, { once: true });
}
});
};

// Process chunks as they arrive
while (true) {
const { done, value } = await reader.read();
if (done) break;

try {
await appendBuffer(value);
} catch (e) {
console.error('Error appending buffer:', e);
// If we hit a quota exceeded error, try to remove some data
if (e.name === 'QuotaExceededError') {
const removeAmount = value.length;
await new Promise(resolveRemove => {
sourceBuffer.remove(0, removeAmount / sourceBuffer.timestampOffset);
sourceBuffer.addEventListener('updateend', resolveRemove, { once: true });
});
await appendBuffer(value);
} else {
throw e;
}
}
}

// All chunks processed, end the stream
if (mediaSource.readyState === 'open') {
mediaSource.endOfStream();
}

// Set up audio element event handlers
audio.addEventListener('canplay', () => {
resolve(audio);
}, { once: true });

audio.addEventListener('error', (e) => {
reject(new Error('Audio element error: ' + e.error));
}, { once: true });

} catch (error) {
reject(error);
}
}, { once: true });

mediaSource.addEventListener('sourceclosed', () => {
reject(new Error('MediaSource was closed'));
}, { once: true });

mediaSource.addEventListener('sourceerror', (error) => {
reject(new Error('MediaSource error: ' + error));
}, { once: true });
});
});

const askQuestion = fromPromise(async ({input}) => {
try {
console.log(`Adding message ${input.message} by ${input.speaker}`);
return {
...input.messages,
[uuidv4()]: {
type: input.speaker,
content: input.message,
timestamp: new Date()
},
[input.responseId]: {
type: input.responder,
content: "",
mode: input.mode,
timestamp: new Date()
}
};
} catch (error) {
console.error('Error in askQuestion:', error);
throw error;
}
});

export const simpleMachine = createMachine({
initial: 'idle',
context: {
messages: {},
isLoading: false,
audioElements: {},
errorMessage: ""
},
states: {
idle: {
on: {
ASK: 'ask',
PLAYBACK: {
target: 'playback',
}
}
},
playback: {
invoke: {
src: handleAudioPlayback,
input: ({event}) => ({
responseId: event.responseId,
audioResponse: event.audioResponse
}),
onDone: {
target: 'idle',
actions: assign(({event, context}) => {
context.audio = event.output;
})
},
onError: {
target: 'idle',
actions: assign({
errorMessage: ({event}) => event.data
})
}
}
},
ask: {
invoke: {
src: askQuestion,
input: ({event, context}) => ({
messages: context.messages,
message: event.message,
responseId: event.responseId,
speaker: event.speaker,
responder: event.responder,
mode: event.mode
}),
onDone: {
actions: assign(({event, context}) => {
context.messages = event.output
})
},
onError: {
target: 'idle',
actions: assign(({event, context}) => {
context.errorMessage = event.data
})
}
},
on: {
STREAM: {
actions: assign(({context, event}) => {
const currentValue = context.messages[event.responseId];
context.messages[event.responseId] = {
...currentValue,
content: currentValue.content + event.chunk
}
})
},
STREAM_ERROR: {
target: 'idle',
actions: assign({
// TODO: Add error message to the responseId
errorMessage: ({event}) => {
return event.error
}
})
},
COMPLETE: {
target: 'idle'
}
}
}
}
});
26 changes: 26 additions & 0 deletions ui/src/state/StateProvider.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
// StateProvider.jsx
import React, {createContext, useContext, useEffect} from 'react';
import {useMachine} from '@xstate/react';
import {simpleMachine} from "./StateMachine.js";

const StateContext = createContext();

export const StateProvider = ({children}) => {
const [state, send] = useMachine(simpleMachine);

useEffect(() => {
send(
{type: 'FETCH'}
)
}, []);

return (
<StateContext.Provider value={{state, send}}>
{children}
</StateContext.Provider>
);
};

export const useStateContext = () => {
return useContext(StateContext);
};
1 change: 1 addition & 0 deletions ui/vite.config.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {defineConfig} from 'vite';
import react from '@vitejs/plugin-react';

let chunkCounter = 0;

const autoIncrementPlugin = () => {

0 comments on commit e8fcb6c

Please sign in to comment.