Skip to content

Commit 80ca31b

Browse files
authored
Merge pull request #578 from devoxx/issue-576
Swapped LocalAIChatModel to OpenAIChatModel. Rolledback to v1.0.0-bet…
2 parents 476670e + cc3cbcd commit 80ca31b

File tree

15 files changed

+102
-66
lines changed

15 files changed

+102
-66
lines changed

build.gradle.kts

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,16 +50,15 @@ tasks.named("buildPlugin") {
5050
}
5151

5252
dependencies {
53-
val lg4j_version = "1.0.0-beta2"
53+
val lg4j_version = "1.0.0-beta1"
5454

5555
// Add the dependencies for the core module
5656
implementation(project(":core"))
5757

5858
// Langchain4J dependencies
5959
implementation("dev.langchain4j:langchain4j:$lg4j_version")
6060
implementation("dev.langchain4j:langchain4j-ollama:$lg4j_version")
61-
implementation("dev.langchain4j:langchain4j-local-ai:$lg4j_version")
62-
implementation("dev.langchain4j:langchain4j-open-ai:1.0.0-alpha2-SNAPSHOT")
61+
implementation("dev.langchain4j:langchain4j-open-ai:$lg4j_version")
6362
implementation("dev.langchain4j:langchain4j-anthropic:$lg4j_version")
6463
implementation("dev.langchain4j:langchain4j-bedrock:$lg4j_version")
6564
implementation("dev.langchain4j:langchain4j-mistral-ai:$lg4j_version")

core/build.gradle.kts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ repositories {
77
}
88

99
dependencies {
10-
val lg4j_version = "1.0.0-beta2"
10+
val lg4j_version = "1.0.0-beta1"
1111
implementation("dev.langchain4j:langchain4j:$lg4j_version")
1212
implementation("dev.langchain4j:langchain4j-ollama:$lg4j_version")
1313
implementation("dev.langchain4j:langchain4j-local-ai:$lg4j_version")

src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
import com.devoxx.genie.chatmodel.local.customopenai.CustomOpenAIChatModelFactory;
1414
import com.devoxx.genie.chatmodel.local.gpt4all.GPT4AllChatModelFactory;
1515
import com.devoxx.genie.chatmodel.local.jan.JanChatModelFactory;
16-
import com.devoxx.genie.chatmodel.local.llamaCPP.LlamaChatModelFactory;
16+
import com.devoxx.genie.chatmodel.local.llamacpp.LlamaChatModelFactory;
1717
import com.devoxx.genie.chatmodel.local.lmstudio.LMStudioChatModelFactory;
1818
import com.devoxx.genie.chatmodel.local.ollama.OllamaChatModelFactory;
1919
import org.jetbrains.annotations.NotNull;

src/main/java/com/devoxx/genie/chatmodel/local/LocalChatModelFactory.java

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@
99
import com.intellij.util.concurrency.AppExecutorUtil;
1010
import dev.langchain4j.model.chat.ChatLanguageModel;
1111
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
12-
import dev.langchain4j.model.localai.LocalAiChatModel;
13-
import dev.langchain4j.model.localai.LocalAiStreamingChatModel;
12+
import dev.langchain4j.model.openai.OpenAiChatModel;
13+
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
1414
import org.jetbrains.annotations.NotNull;
1515

1616
import java.io.IOException;
@@ -40,9 +40,10 @@ protected LocalChatModelFactory(ModelProvider modelProvider) {
4040

4141
protected abstract String getModelUrl();
4242

43-
protected ChatLanguageModel createLocalAiChatModel(@NotNull ChatModel chatModel) {
44-
return LocalAiChatModel.builder()
43+
protected ChatLanguageModel createOpenAiChatModel(@NotNull ChatModel chatModel) {
44+
return OpenAiChatModel.builder()
4545
.baseUrl(getModelUrl())
46+
.apiKey("na")
4647
.modelName(chatModel.getModelName())
4748
.maxRetries(chatModel.getMaxRetries())
4849
.temperature(chatModel.getTemperature())
@@ -52,9 +53,10 @@ protected ChatLanguageModel createLocalAiChatModel(@NotNull ChatModel chatModel)
5253
.build();
5354
}
5455

55-
protected StreamingChatLanguageModel createLocalAiStreamingChatModel(@NotNull ChatModel chatModel) {
56-
return LocalAiStreamingChatModel.builder()
56+
protected StreamingChatLanguageModel createOpenAiStreamingChatModel(@NotNull ChatModel chatModel) {
57+
return OpenAiStreamingChatModel.builder()
5758
.baseUrl(getModelUrl())
59+
.apiKey("na")
5860
.modelName(chatModel.getModelName())
5961
.temperature(chatModel.getTemperature())
6062
.topP(chatModel.getTopP())

src/main/java/com/devoxx/genie/chatmodel/local/gpt4all/GPT4AllChatModelFactory.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@ public GPT4AllChatModelFactory() {
2020

2121
@Override
2222
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
23-
return createLocalAiChatModel(chatModel);
23+
return createOpenAiChatModel(chatModel);
2424
}
2525

2626
@Override
2727
public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
28-
return createLocalAiStreamingChatModel(chatModel);
28+
return createOpenAiStreamingChatModel(chatModel);
2929
}
3030

3131
@Override

src/main/java/com/devoxx/genie/chatmodel/local/jan/JanChatModelFactory.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@ public JanChatModelFactory() {
2020

2121
@Override
2222
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
23-
return createLocalAiChatModel(chatModel);
23+
return createOpenAiChatModel(chatModel);
2424
}
2525

2626
@Override
2727
public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
28-
return createLocalAiStreamingChatModel(chatModel);
28+
return createOpenAiStreamingChatModel(chatModel);
2929
}
3030

3131
@Override

src/main/java/com/devoxx/genie/chatmodel/local/llamaCPP/LlamaChatModelFactory.java renamed to src/main/java/com/devoxx/genie/chatmodel/local/llamacpp/LlamaChatModelFactory.java

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1-
package com.devoxx.genie.chatmodel.local.llamaCPP;
1+
package com.devoxx.genie.chatmodel.local.llamacpp;
22

33
import com.devoxx.genie.chatmodel.ChatModelFactory;
44
import com.devoxx.genie.model.ChatModel;
55
import com.devoxx.genie.model.LanguageModel;
66
import com.devoxx.genie.model.enumarations.ModelProvider;
77
import com.devoxx.genie.ui.settings.DevoxxGenieStateService;
88
import dev.langchain4j.model.chat.ChatLanguageModel;
9-
import dev.langchain4j.model.localai.LocalAiChatModel;
9+
import dev.langchain4j.model.openai.OpenAiChatModel;
1010
import org.jetbrains.annotations.NotNull;
1111

1212
import java.time.Duration;
@@ -17,8 +17,9 @@ public class LlamaChatModelFactory implements ChatModelFactory {
1717

1818
@Override
1919
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
20-
return LocalAiChatModel.builder()
20+
return OpenAiChatModel.builder()
2121
.baseUrl(DevoxxGenieStateService.getInstance().getLlamaCPPUrl())
22+
.apiKey("na")
2223
.modelName(chatModel.getModelName())
2324
.temperature(chatModel.getTemperature())
2425
.topP(chatModel.getTopP())
@@ -29,7 +30,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
2930

3031
@Override
3132
public List<LanguageModel> getModels() {
32-
LanguageModel lmStudio = LanguageModel.builder()
33+
LanguageModel model = LanguageModel.builder()
3334
.provider(ModelProvider.LLaMA)
3435
.modelName(TEST_MODEL)
3536
.displayName(TEST_MODEL)
@@ -40,7 +41,7 @@ public List<LanguageModel> getModels() {
4041
.build();
4142

4243
List<LanguageModel> modelNames = new ArrayList<>();
43-
modelNames.add(lmStudio);
44+
modelNames.add(model);
4445
return modelNames;
4546
}
4647
}

src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioChatModelFactory.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,12 @@ public LMStudioChatModelFactory() {
2222

2323
@Override
2424
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
25-
return createLocalAiChatModel(chatModel);
25+
return createOpenAiChatModel(chatModel);
2626
}
2727

2828
@Override
2929
public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
30-
return createLocalAiStreamingChatModel(chatModel);
30+
return createOpenAiStreamingChatModel(chatModel);
3131
}
3232

3333
@Override

src/main/java/com/devoxx/genie/service/MessageCreationService.java

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -129,11 +129,6 @@ private void constructUserMessageWithFullContext(@NotNull ChatMessageContext cha
129129
stringBuilder.append("</Context>\n");
130130
}
131131

132-
stringBuilder
133-
.append("<ProjectPath>")
134-
.append(chatMessageContext.getProject().getBasePath())
135-
.append("</ProjectPath>\n");
136-
137132
stringBuilder.append("<UserPrompt>");
138133
stringBuilder.append(chatMessageContext.getUserPrompt());
139134
stringBuilder.append("</UserPrompt>");
@@ -177,10 +172,13 @@ private void constructUserMessageWithCombinedContext(@NotNull ChatMessageContext
177172
}
178173
}
179174

180-
stringBuilder
181-
.append("<ProjectPath>\n")
182-
.append(chatMessageContext.getProject().getBasePath())
183-
.append("</ProjectPath>");
175+
if (MCPService.isMCPEnabled()) {
176+
// We'll add more info about the project path so tools can use this info.
177+
stringBuilder
178+
.append("<ProjectPath>\n")
179+
.append(chatMessageContext.getProject().getBasePath())
180+
.append("</ProjectPath>");
181+
}
184182

185183
// Add the user's prompt
186184
stringBuilder.append("<UserPrompt>\n").append(chatMessageContext.getUserPrompt()).append("\n</UserPrompt>\n\n");

src/main/java/com/devoxx/genie/service/prompt/memory/ChatMemoryManager.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -254,10 +254,10 @@ private boolean shouldIncludeSystemMessage(@NotNull ChatMessageContext context)
254254
}
255255

256256
// Check for Bedrock Mistral AI model
257-
if (context.getChatLanguageModel() instanceof BedrockChatModel bedrockChatModel) {
258-
// TODO Test if this refactoring still works because BedrockMistralChatModel is deprecated
259-
return bedrockChatModel.provider().name().startsWith("mistral.");
260-
}
257+
// if (context.getChatLanguageModel() instanceof BedrockChatModel bedrockChatModel) {
258+
// // TODO Test if this refactoring still works because BedrockMistralChatModel is deprecated
259+
// return bedrockChatModel.provider().name().startsWith("mistral.");
260+
// }
261261

262262
return true;
263263
}

0 commit comments

Comments
 (0)