Skip to content

Commit

Permalink
🚑 fix max tokens not working, on other then default llm
Browse files Browse the repository at this point in the history
  • Loading branch information
Meteord committed Sep 11, 2024
1 parent da4a38a commit 445bb6a
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 6 deletions.
46 changes: 41 additions & 5 deletions app/backend/core/llmhelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,27 @@ def getModel(models: List[ModelsConfig],
n=n,
streaming=streaming,
temperature=temperature,
)
).configurable_fields(
temperature=ConfigurableField(
id="llm_temperature",
name="LLM Temperature",
description="The temperature of the LLM",
),
max_tokens= ConfigurableField(
id="llm_max_tokens",
name="LLM max Tokens",
description="The token Limit of the LLM",
),
streaming = ConfigurableField(
id="llm_streaming",
name="Streaming",
description="Should the LLM Stream"),
callbacks = ConfigurableField(
id="llm_callbacks",
name="Callbacks",
description="Callbacks for the llm")

)
elif model["type"] == "OPENAI":
alternative = ChatOpenAI(
model=model["model_name"],
Expand All @@ -73,6 +93,26 @@ def getModel(models: List[ModelsConfig],
n=n,
streaming=streaming,
temperature=temperature,
).configurable_fields(
temperature=ConfigurableField(
id="llm_temperature",
name="LLM Temperature",
description="The temperature of the LLM",
),
max_tokens= ConfigurableField(
id="llm_max_tokens",
name="LLM max Tokens",
description="The token Limit of the LLM",
),
streaming = ConfigurableField(
id="llm_streaming",
name="Streaming",
description="Should the LLM Stream"),
callbacks = ConfigurableField(
id="llm_callbacks",
name="Callbacks",
description="Callbacks for the llm")

)
alternatives[model["model_name"]] = alternative
llm = llm.configurable_fields(
Expand All @@ -86,10 +126,6 @@ def getModel(models: List[ModelsConfig],
name="LLM max Tokens",
description="The token Limit of the LLM",
),
openai_api_key = ConfigurableField(
id="llm_api_key",
name="The api key",
description="The api key"),
streaming = ConfigurableField(
id="llm_streaming",
name="Streaming",
Expand Down
1 change: 0 additions & 1 deletion app/backend/core/types/LlmConfigs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,5 @@ class LlmConfigs(TypedDict, total=False):
llm: NotRequired[str] # one of the SupportedModels
llm_max_tokens: NotRequired[int]
llm_temperature: NotRequired[float]
llm_api_key: NotRequired[str]
llm_streaming: NotRequired[bool]
llm_callbacks: NotRequired[List]

0 comments on commit 445bb6a

Please sign in to comment.