Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[LLM]Mistral implementation #190

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,4 @@ product.overrides.json
*.snap.actual
.vscode-test
.tmp/
.tool-versions
27 changes: 24 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
"@google/generative-ai": "^0.21.0",
"@microsoft/1ds-core-js": "^3.2.13",
"@microsoft/1ds-post-js": "^3.2.13",
"@mistralai/mistralai": "^1.3.5",
"@parcel/watcher": "2.1.0",
"@rrweb/record": "^2.0.0-alpha.17",
"@rrweb/types": "^2.0.0-alpha.17",
Expand Down Expand Up @@ -130,7 +131,8 @@
"vscode-regexpp": "^3.1.0",
"vscode-textmate": "9.1.0",
"yauzl": "^3.0.0",
"yazl": "^2.4.3"
"yazl": "^2.4.3",
"zod": "^3.24.1"
},
"devDependencies": {
"@playwright/test": "^1.46.1",
Expand Down
62 changes: 62 additions & 0 deletions src/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

42 changes: 37 additions & 5 deletions src/vs/platform/void/common/voidSettingsTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,18 @@ export const defaultGeminiModels = modelInfoOfDefaultNames([
'gemini-1.0-pro'
])


export const defaultMistralModels = modelInfoOfDefaultNames([
"open-codestral-mamba",
"open-mistral-nemo",
"pixtral-12b-2409",
"mistral-large-latest",
"pixtral-large-latest",
"ministral-3b-latest",
"ministral-8b-latest",
"mistral-small-latest",
"codestral-latest",
"mistral-embed"
])

// export const parseMaxTokensStr = (maxTokensStr: string) => {
// // parse the string but only if the full string is a valid number, eg parseInt('100abc') should return NaN
Expand Down Expand Up @@ -137,6 +148,9 @@ export const defaultProviderSettings = {
apiKey: '',
},
groq: {
apiKey: '',
},
mistral: {
apiKey: ''
}
} as const
Expand Down Expand Up @@ -216,6 +230,11 @@ export const displayInfoOfProviderName = (providerName: ProviderName): DisplayIn
title: 'Groq',
}
}
else if (providerName === 'mistral') {
return {
title: 'Mistral',
}
}

throw new Error(`descOfProviderName: Unknown provider name: "${providerName}"`)
}
Expand All @@ -234,16 +253,18 @@ export const displayInfoOfSettingName = (providerName: ProviderName, settingName
providerName === 'openRouter' ? 'sk-or-key...' : // sk-or-v1-key
providerName === 'gemini' ? 'key...' :
providerName === 'groq' ? 'gsk_key...' :
providerName === 'openAICompatible' ? 'sk-key...' :
'(never)',
providerName === 'mistral' ? 'api-key...' :
providerName === 'openAICompatible' ? 'sk-key...' :
'(never)',

subTextMd: providerName === 'anthropic' ? 'Get your [API Key here](https://console.anthropic.com/settings/keys).' :
providerName === 'openAI' ? 'Get your [API Key here](https://platform.openai.com/api-keys).' :
providerName === 'openRouter' ? 'Get your [API Key here](https://openrouter.ai/settings/keys).' :
providerName === 'gemini' ? 'Get your [API Key here](https://aistudio.google.com/apikey).' :
providerName === 'groq' ? 'Get your [API Key here](https://console.groq.com/keys).' :
providerName === 'openAICompatible' ? 'Add any OpenAI-Compatible endpoint.' :
undefined,
providerName === 'mistral' ? 'Get your [API Key here](https://console.mistral.ai/api-keys/).' :
providerName === 'openAICompatible' ? undefined :
undefined,
}
}
else if (settingName === 'endpoint') {
Expand Down Expand Up @@ -285,6 +306,8 @@ const defaultCustomSettings: Record<CustomSettingName, undefined> = {
endpoint: undefined,
}



export const voidInitModelOptions = {
anthropic: {
models: defaultAnthropicModels,
Expand All @@ -307,6 +330,9 @@ export const voidInitModelOptions = {
groq: {
models: defaultGroqModels,
},
mistral: {
models: defaultMistralModels,
}
}


Expand Down Expand Up @@ -354,6 +380,12 @@ export const defaultSettingsOfProvider: SettingsOfProvider = {
...voidInitModelOptions.openAICompatible,
_enabled: undefined,
},
mistral: {
...defaultCustomSettings,
...defaultProviderSettings.mistral,
...voidInitModelOptions.mistral,
_enabled: undefined,
}
}


Expand Down
109 changes: 109 additions & 0 deletions src/vs/platform/void/electron-main/llmMessage/mistral.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Glass Devtools, Inc. All rights reserved.
* Mistral implementation by Jérôme Commaret (https://github.com/jcommaret)
* Void Editor additions licensed under the AGPL 3.0 License.
*--------------------------------------------------------------------------------------------*/

import { Mistral } from '@mistralai/mistralai';
import { _InternalSendLLMMessageFnType } from '../../common/llmMessageTypes.js';

interface MistralMessage {
role: 'user' | 'assistant';
content: string;
}

interface MistralChunk {
data: {
id: string;
object: string;
created: number;
model: string;
choices: Array<{
index: number;
delta: {
content?: string;
role?: string;
};
finishReason: string | null;
}>;
};
}

// Mistral
export const sendMistralMsg: _InternalSendLLMMessageFnType = async ({ messages, onText, onFinalMessage, onError, settingsOfProvider, modelName, _setAborter }) => {
let fullText = '';

const thisConfig = settingsOfProvider.mistral;

if (!thisConfig.apiKey) {
onError({ message: 'Mistral API key not configured.', fullError: new Error('No API key') });
return;
}

const mistral = new Mistral({
apiKey: thisConfig.apiKey
});

try {
// Check if there are messages to process
if (!messages || messages.length === 0) {
onError({ message: 'No messages to process.', fullError: new Error('No messages provided') });
return;
}

// Convert messages for Mistral
const mistralMessages = messages
.filter(msg => msg.role !== 'system') // Ignore system messages
.map(msg => ({
role: msg.role === 'assistant' ? 'assistant' : 'user',
content: msg.content.trim()
})) as MistralMessage[];

// Ensure there is at least one message
if (mistralMessages.length === 0) {
onError({ message: 'No valid messages to send.', fullError: new Error('No valid messages') });
return;
}

// Ensure the last message is from the user
if (mistralMessages[mistralMessages.length - 1].role === 'assistant') {
mistralMessages.push({
role: 'user',
content: 'Continue.'
});
}

const stream = await mistral.chat.stream({
model: modelName,
messages: mistralMessages,
temperature: 0.7,
maxTokens: 2048
});

_setAborter(() => { }); // Mistral does not provide an abort method

for await (const chunk of stream) {
if (typeof chunk === 'object' && chunk && 'data' in chunk) {
const { data } = chunk as MistralChunk;
if (data.choices?.[0]?.delta?.content) {
const newText = data.choices[0].delta.content;
fullText += newText;
onText({ newText, fullText });
}
}
}

if (!fullText) {
onError({ message: 'No response received from Mistral.', fullError: new Error('No response content') });
return;
}

onFinalMessage({ fullText });
} catch (error: any) {
const errorMessage = error.message || JSON.stringify(error);
onError({
message: `Mistral Error: ${errorMessage}`,
fullError: error
});
}
};
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { sendOllamaMsg } from './ollama.js';
import { sendOpenAIMsg } from './openai.js';
import { sendGeminiMsg } from './gemini.js';
import { sendGroqMsg } from './groq.js';
import { sendMistralMsg } from './mistral.js';

export const sendLLMMessage = ({
messages,
Expand Down Expand Up @@ -96,6 +97,9 @@ export const sendLLMMessage = ({
case 'groq':
sendGroqMsg({ messages, onText, onFinalMessage, onError, settingsOfProvider, modelName, _setAborter, providerName });
break;
case 'mistral':
sendMistralMsg({ messages, onText, onFinalMessage, onError, settingsOfProvider, modelName, _setAborter, providerName });
break;
default:
onError({ message: `Error: Void provider was "${providerName}", which is not recognized.`, fullError: null })
break;
Expand Down