From 2f6184b99d6fe69991c400a73bb0c56915aa8afa Mon Sep 17 00:00:00 2001 From: L <6723574+louisgv@users.noreply.github.com> Date: Sun, 11 Jun 2023 23:14:48 -0400 Subject: [PATCH] chore: cleanup ux, prepare for prompt params tweak (#28) --- apps/desktop/package.json | 2 +- apps/desktop/src-tauri/src/inference/mod.rs | 2 ++ .../stop_handler.rs | 0 .../mod.rs => inference/thread.rs} | 5 ++-- .../desktop/src-tauri/src/inference_server.rs | 2 +- apps/desktop/src-tauri/src/main.rs | 2 +- apps/desktop/src-tauri/src/model_pool.rs | 2 +- .../src-tauri/src/threads_directory.rs | 1 + .../inference-server/model-list-item.tsx | 4 +-- .../model-downloader/model-selector.tsx | 4 +-- .../src/features/thread/new-thread.tsx | 2 +- .../src/features/thread/prompt-textarea.tsx | 2 +- apps/desktop/src/features/thread/side-bar.tsx | 13 ++++----- .../src/features/thread/use-active-thread.ts | 2 +- .../src/features/thread/use-thread-config.ts | 11 +++++++ apps/desktop/src/views/chat.tsx | 10 ++++--- apps/desktop/src/views/model-manager.tsx | 29 ++++++++++--------- 17 files changed, 53 insertions(+), 40 deletions(-) create mode 100644 apps/desktop/src-tauri/src/inference/mod.rs rename apps/desktop/src-tauri/src/{inference_thread => inference}/stop_handler.rs (100%) rename apps/desktop/src-tauri/src/{inference_thread/mod.rs => inference/thread.rs} (98%) create mode 100644 apps/desktop/src/features/thread/use-thread-config.ts diff --git a/apps/desktop/package.json b/apps/desktop/package.json index 5e5a59e..6ffeb0f 100644 --- a/apps/desktop/package.json +++ b/apps/desktop/package.json @@ -1,7 +1,7 @@ { "name": "@localai/desktop", "private": true, - "version": "0.2.7", + "version": "0.2.8", "scripts": { "dev:next": "next dev -p 1470", "build:next": "next build", diff --git a/apps/desktop/src-tauri/src/inference/mod.rs b/apps/desktop/src-tauri/src/inference/mod.rs new file mode 100644 index 0000000..145e50d --- /dev/null +++ b/apps/desktop/src-tauri/src/inference/mod.rs @@ -0,0 +1,2 @@ +mod stop_handler; +pub mod thread; diff --git a/apps/desktop/src-tauri/src/inference_thread/stop_handler.rs b/apps/desktop/src-tauri/src/inference/stop_handler.rs similarity index 100% rename from apps/desktop/src-tauri/src/inference_thread/stop_handler.rs rename to apps/desktop/src-tauri/src/inference/stop_handler.rs diff --git a/apps/desktop/src-tauri/src/inference_thread/mod.rs b/apps/desktop/src-tauri/src/inference/thread.rs similarity index 98% rename from apps/desktop/src-tauri/src/inference_thread/mod.rs rename to apps/desktop/src-tauri/src/inference/thread.rs index eb26566..23554d2 100644 --- a/apps/desktop/src-tauri/src/inference_thread/mod.rs +++ b/apps/desktop/src-tauri/src/inference/thread.rs @@ -1,3 +1,4 @@ +/// Inference thread as in Machine Physical Thread use std::{convert::Infallible, sync::Arc}; use actix_web::web::Bytes; @@ -15,12 +16,10 @@ use serde::{Deserialize, Serialize}; use tokio::task::JoinHandle; use crate::{ - inference_thread::stop_handler::StopHandler, + inference::stop_handler::StopHandler, model_pool::{self, get_n_threads}, }; -mod stop_handler; - #[derive(Serialize, Deserialize, Debug)] pub struct CompletionRequest { prompt: String, diff --git a/apps/desktop/src-tauri/src/inference_server.rs b/apps/desktop/src-tauri/src/inference_server.rs index 994ffc9..8530852 100644 --- a/apps/desktop/src-tauri/src/inference_server.rs +++ b/apps/desktop/src-tauri/src/inference_server.rs @@ -15,7 +15,7 @@ use std::sync::{ use crate::abort_stream::AbortStream; use crate::config::ConfigKey; -use crate::inference_thread::{ +use crate::inference::thread::{ start_inference, CompletionRequest, InferenceThreadRequest, }; use crate::model_pool::{self, spawn_pool}; diff --git a/apps/desktop/src-tauri/src/main.rs b/apps/desktop/src-tauri/src/main.rs index 4376a09..9d827dc 100644 --- a/apps/desktop/src-tauri/src/main.rs +++ b/apps/desktop/src-tauri/src/main.rs @@ -7,8 +7,8 @@ mod abort_stream; mod config; mod db; mod downloader; +mod inference; mod inference_server; -mod inference_thread; mod kv_bucket; mod macros; mod model_integrity; diff --git a/apps/desktop/src-tauri/src/model_pool.rs b/apps/desktop/src-tauri/src/model_pool.rs index 6473cd8..ab81963 100644 --- a/apps/desktop/src-tauri/src/model_pool.rs +++ b/apps/desktop/src-tauri/src/model_pool.rs @@ -7,7 +7,7 @@ use llm::{load_progress_callback_stdout, ModelArchitecture, VocabularySource}; use std::path::Path; -use crate::inference_thread::ModelGuard; +use crate::inference::thread::ModelGuard; use std::collections::VecDeque; pub static LOADED_MODEL_POOL: Lazy>>> = diff --git a/apps/desktop/src-tauri/src/threads_directory.rs b/apps/desktop/src-tauri/src/threads_directory.rs index 24606e7..22cda09 100644 --- a/apps/desktop/src-tauri/src/threads_directory.rs +++ b/apps/desktop/src-tauri/src/threads_directory.rs @@ -1,3 +1,4 @@ +/// Thread as in chat/conversation thread use chrono::Utc; use rand::Rng; use serde::{Deserialize, Serialize}; diff --git a/apps/desktop/src/features/inference-server/model-list-item.tsx b/apps/desktop/src/features/inference-server/model-list-item.tsx index 2e0e69a..474261a 100644 --- a/apps/desktop/src/features/inference-server/model-list-item.tsx +++ b/apps/desktop/src/features/inference-server/model-list-item.tsx @@ -32,8 +32,8 @@ export const ModelListItem = ({ model }: { model: ModelMetadata }) => { "text-gray-11 hover:text-gray-12", "transition-colors group", activeModel?.path === model.path - ? "ring ring-green-7 hover:ring-green-8" - : "ring ring-gray-7 hover:ring-gray-8" + ? "border border-green-7 hover:border-green-8" + : "border border-gray-7 hover:border-gray-8" )}>
diff --git a/apps/desktop/src/features/model-downloader/model-selector.tsx b/apps/desktop/src/features/model-downloader/model-selector.tsx index 274cf4d..3602492 100644 --- a/apps/desktop/src/features/model-downloader/model-selector.tsx +++ b/apps/desktop/src/features/model-downloader/model-selector.tsx @@ -17,7 +17,7 @@ import { toGB } from "~features/model-downloader/model-file" import { useModelsApi } from "~features/model-downloader/use-models-api" import { useGlobal } from "~providers/global" -export const ModelSelector = () => { +export const ModelSelector = ({ className = "" }) => { const { modelsDirectoryState: { updateModelsDirectory, modelsMap } } = useGlobal() @@ -34,7 +34,7 @@ export const ModelSelector = () => { ) return ( -
+