We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 743fa63 commit 4235f0aCopy full SHA for 4235f0a
backend/src/api/endpoints/chat.py
@@ -2,7 +2,7 @@
2
3
from app.core.authorization.authz_user import AuthzUser
4
from app.core.data.dto.chat import LLMSessionResponse
5
-from app.core.data.llm.chat_service import (
+from app.core.data.llm.llm_chat import (
6
chat_session,
7
retrieval_augmented_generation_with_session,
8
)
@@ -49,7 +49,7 @@ def rag_with_session(
49
@router.get(
50
"/chat_session",
51
response_model=LLMSessionResponse,
52
- summary="TEST CHAT SESSION",
+ summary="Initiate or continue a chat session with the LLM using a prompt",
53
54
def chat_sesh(
55
*,
…nd/src/app/core/data/llm/chat_service.py backend/src/app/core/data/llm/llm_chat.pybackend/src/app/core/data/llm/chat_service.py renamed to backend/src/app/core/data/llm/llm_chat.py
0 commit comments