Skip to content

Commit 98c1dfc

Browse files
authored
Consume changed API shape (#910)
* Consume updated API * Bump engine
1 parent d71a4e8 commit 98c1dfc

File tree

10 files changed

+57
-58
lines changed

10 files changed

+57
-58
lines changed

package-lock.json

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"icon": "assets/copilot.png",
2525
"pricing": "Trial",
2626
"engines": {
27-
"vscode": "^1.104.0-20250828",
27+
"vscode": "^1.104.0-20250905",
2828
"npm": ">=9.0.0",
2929
"node": ">=22.14.0"
3030
},
@@ -3969,7 +3969,6 @@
39693969
"web": "vscode-test-web --headless --extensionDevelopmentPath=. .",
39703970
"test:prompt": "mocha \"src/extension/completions-core/prompt/**/test/**/*.test.{ts,tsx}\"",
39713971
"test:lib": "mocha \"src/extension/completions-core/lib/src/**/*.test.{ts,tsx}\""
3972-
39733972
},
39743973
"devDependencies": {
39753974
"@azure/identity": "4.9.1",
@@ -4033,8 +4032,8 @@
40334032
"mobx": "^6.13.7",
40344033
"mobx-react-lite": "^4.1.0",
40354034
"mocha": "^11.7.1",
4036-
"mocha-junit-reporter": "^2.2.1",
4037-
"mocha-multi-reporters": "^1.5.1",
4035+
"mocha-junit-reporter": "^2.2.1",
4036+
"mocha-multi-reporters": "^1.5.1",
40384037
"monaco-editor": "0.44.0",
40394038
"npm-run-all": "^4.1.5",
40404039
"open": "^10.1.2",
@@ -4092,4 +4091,4 @@
40924091
"string_decoder": "npm:[email protected]",
40934092
"node-gyp": "npm:[email protected]"
40944093
}
4095-
}
4094+
}

src/extension/byok/vscode-node/anthropicProvider.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
*--------------------------------------------------------------------------------------------*/
55

66
import Anthropic from '@anthropic-ai/sdk';
7-
import { CancellationToken, LanguageModelChatInformation, LanguageModelChatMessage, LanguageModelChatMessage2, LanguageModelChatRequestHandleOptions, LanguageModelResponsePart2, LanguageModelTextPart, LanguageModelToolCallPart, Progress } from 'vscode';
7+
import { CancellationToken, LanguageModelChatInformation, LanguageModelChatMessage, LanguageModelChatMessage2, LanguageModelResponsePart2, LanguageModelTextPart, LanguageModelToolCallPart, Progress, ProvideLanguageModelChatResponseOptions } from 'vscode';
88
import { ChatFetchResponseType, ChatLocation } from '../../../platform/chat/common/commonTypes';
99
import { ILogService } from '../../../platform/log/common/logService';
1010
import { IResponseDelta, OpenAiFunctionTool } from '../../../platform/networking/common/fetch';
@@ -57,7 +57,7 @@ export class AnthropicLMProvider implements BYOKModelProvider<LanguageModelChatI
5757
}
5858
}
5959

60-
async prepareLanguageModelChatInformation(options: { silent: boolean }, token: CancellationToken): Promise<LanguageModelChatInformation[]> {
60+
async provideLanguageModelChatInformation(options: { silent: boolean }, token: CancellationToken): Promise<LanguageModelChatInformation[]> {
6161
if (!this._apiKey) { // If we don't have the API key it might just be in storage, so we try to read it first
6262
this._apiKey = await this._byokStorageService.getAPIKey(AnthropicLMProvider.providerName);
6363
}
@@ -79,7 +79,7 @@ export class AnthropicLMProvider implements BYOKModelProvider<LanguageModelChatI
7979
}
8080
}
8181

82-
async provideLanguageModelChatResponse(model: LanguageModelChatInformation, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: LanguageModelChatRequestHandleOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<any> {
82+
async provideLanguageModelChatResponse(model: LanguageModelChatInformation, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: ProvideLanguageModelChatResponseOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<any> {
8383
if (!this._anthropicAPIClient) {
8484
return;
8585
}

src/extension/byok/vscode-node/baseOpenAICompatibleProvider.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
* Licensed under the MIT License. See License.txt in the project root for license information.
44
*--------------------------------------------------------------------------------------------*/
55

6-
import { CancellationToken, LanguageModelChatInformation, LanguageModelChatMessage, LanguageModelChatMessage2, LanguageModelChatRequestHandleOptions, LanguageModelResponsePart2, Progress } from 'vscode';
6+
import { CancellationToken, LanguageModelChatInformation, LanguageModelChatMessage, LanguageModelChatMessage2, LanguageModelResponsePart2, Progress, ProvideLanguageModelChatResponseOptions } from 'vscode';
77
import { IChatModelInformation, ModelSupportedEndpoint } from '../../../platform/endpoint/common/endpointProvider';
88
import { ILogService } from '../../../platform/log/common/logService';
99
import { IFetcherService } from '../../../platform/networking/common/fetcherService';
@@ -62,7 +62,7 @@ export abstract class BaseOpenAICompatibleLMProvider implements BYOKModelProvide
6262
}
6363
}
6464

65-
async prepareLanguageModelChatInformation(options: { silent: boolean }, token: CancellationToken): Promise<LanguageModelChatInformation[]> {
65+
async provideLanguageModelChatInformation(options: { silent: boolean }, token: CancellationToken): Promise<LanguageModelChatInformation[]> {
6666
if (!this._apiKey && this.authType === BYOKAuthType.GlobalApiKey) { // If we don't have the API key it might just be in storage, so we try to read it first
6767
this._apiKey = await this._byokStorageService.getAPIKey(this._name);
6868
}
@@ -83,7 +83,7 @@ export abstract class BaseOpenAICompatibleLMProvider implements BYOKModelProvide
8383
return [];
8484
}
8585
}
86-
async provideLanguageModelChatResponse(model: LanguageModelChatInformation, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: LanguageModelChatRequestHandleOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<any> {
86+
async provideLanguageModelChatResponse(model: LanguageModelChatInformation, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: ProvideLanguageModelChatResponseOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<any> {
8787
const openAIChatEndpoint = await this.getEndpointImpl(model);
8888
return this._lmWrapper.provideLanguageModelResponse(openAIChatEndpoint, messages, options, options.requestInitiator, progress, token);
8989
}

src/extension/byok/vscode-node/customOAIProvider.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
* Licensed under the MIT License. See License.txt in the project root for license information.
44
*--------------------------------------------------------------------------------------------*/
55

6-
import { CancellationToken, LanguageModelChatInformation, LanguageModelChatMessage, LanguageModelChatMessage2, LanguageModelChatRequestHandleOptions, LanguageModelResponsePart2, Progress, QuickPickItem, window } from 'vscode';
6+
import { CancellationToken, LanguageModelChatInformation, LanguageModelChatMessage, LanguageModelChatMessage2, LanguageModelResponsePart2, Progress, ProvideLanguageModelChatResponseOptions, QuickPickItem, window } from 'vscode';
77
import { ConfigKey, IConfigurationService } from '../../../platform/configuration/common/configurationService';
88
import { ILogService } from '../../../platform/log/common/logService';
99
import { IExperimentationService } from '../../../platform/telemetry/common/nullExperimentationService';
@@ -134,7 +134,7 @@ export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIMo
134134
return baseInfo;
135135
}
136136

137-
async prepareLanguageModelChatInformation(options: { silent: boolean }, token: CancellationToken): Promise<CustomOAIModelInfo[]> {
137+
async provideLanguageModelChatInformation(options: { silent: boolean }, token: CancellationToken): Promise<CustomOAIModelInfo[]> {
138138
try {
139139
let knownModels = await this.getModelsWithAPIKeys(options.silent);
140140
if (Object.keys(knownModels).length === 0 && !options.silent) {
@@ -149,7 +149,7 @@ export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIMo
149149
}
150150
}
151151

152-
async provideLanguageModelChatResponse(model: CustomOAIModelInfo, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: LanguageModelChatRequestHandleOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<any> {
152+
async provideLanguageModelChatResponse(model: CustomOAIModelInfo, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: ProvideLanguageModelChatResponseOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<any> {
153153
const requireAPIKey = this.requiresAPIKey(model.id);
154154
let apiKey: string | undefined;
155155
if (requireAPIKey) {

src/extension/conversation/vscode-node/languageModelAccess.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -86,15 +86,15 @@ export class LanguageModelAccess extends Disposable implements IExtensionContrib
8686

8787
private async _registerChatProvider(): Promise<void> {
8888
const provider: vscode.LanguageModelChatProvider = {
89-
onDidChangeLanguageModelInformation: this._onDidChange.event,
90-
prepareLanguageModelChatInformation: this._prepareLanguageModelChat.bind(this),
89+
onDidChangeLanguageModelChatInformation: this._onDidChange.event,
90+
provideLanguageModelChatInformation: this._provideLanguageModelChatInfo.bind(this),
9191
provideLanguageModelChatResponse: this._provideLanguageModelChatResponse.bind(this),
9292
provideTokenCount: this._provideTokenCount.bind(this)
9393
};
9494
this._register(vscode.lm.registerLanguageModelChatProvider('copilot', provider));
9595
}
9696

97-
private async _prepareLanguageModelChat(options: { silent: boolean }, token: vscode.CancellationToken): Promise<vscode.LanguageModelChatInformation[]> {
97+
private async _provideLanguageModelChatInfo(options: { silent: boolean }, token: vscode.CancellationToken): Promise<vscode.LanguageModelChatInformation[]> {
9898
const session = await this._getAuthSession();
9999
if (!session) {
100100
this._currentModels = [];
@@ -175,7 +175,7 @@ export class LanguageModelAccess extends Disposable implements IExtensionContrib
175175
private async _provideLanguageModelChatResponse(
176176
model: vscode.LanguageModelChatInformation,
177177
messages: Array<vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2>,
178-
options: vscode.LanguageModelChatRequestHandleOptions,
178+
options: vscode.ProvideLanguageModelChatResponseOptions,
179179
progress: vscode.Progress<vscode.LanguageModelResponsePart2>,
180180
token: vscode.CancellationToken
181181
): Promise<any> {
@@ -272,7 +272,7 @@ export class CopilotLanguageModelWrapper extends Disposable {
272272
super();
273273
}
274274

275-
private async _provideLanguageModelResponse(_endpoint: IChatEndpoint, _messages: Array<vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2>, _options: vscode.LanguageModelChatRequestHandleOptions, extensionId: string, callback: FinishedCallback, token: vscode.CancellationToken): Promise<any> {
275+
private async _provideLanguageModelResponse(_endpoint: IChatEndpoint, _messages: Array<vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2>, _options: vscode.ProvideLanguageModelChatResponseOptions, extensionId: string, callback: FinishedCallback, token: vscode.CancellationToken): Promise<any> {
276276

277277
const extensionInfo = extensionId === 'core' ? { packageJSON: { version: this._envService.vscodeVersion } } : vscode.extensions.getExtension(extensionId, true);
278278
if (!extensionInfo || typeof extensionInfo.packageJSON.version !== 'string') {
@@ -407,7 +407,7 @@ export class CopilotLanguageModelWrapper extends Disposable {
407407
);
408408
}
409409

410-
async provideLanguageModelResponse(endpoint: IChatEndpoint, messages: Array<vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2>, options: vscode.LanguageModelChatRequestHandleOptions, extensionId: string, progress: vscode.Progress<LMResponsePart>, token: vscode.CancellationToken): Promise<any> {
410+
async provideLanguageModelResponse(endpoint: IChatEndpoint, messages: Array<vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2>, options: vscode.ProvideLanguageModelChatResponseOptions, extensionId: string, progress: vscode.Progress<LMResponsePart>, token: vscode.CancellationToken): Promise<any> {
411411
const finishCallback: FinishedCallback = async (_text, index, delta): Promise<undefined> => {
412412
if (delta.text) {
413413
progress.report(new vscode.LanguageModelTextPart(delta.text));

src/extension/conversation/vscode-node/test/languageModelAccess.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ suite('CopilotLanguageModelWrapper', () => {
3939

4040
const runTest = async (messages: vscode.LanguageModelChatMessage[], tools?: vscode.LanguageModelChatTool[], errMsg?: string) => {
4141
await assert.rejects(
42-
() => wrapper.provideLanguageModelResponse(endpoint, messages, { tools, requestInitiator: 'unknown' }, vscode.extensions.all[0].id, null!, null!),
42+
() => wrapper.provideLanguageModelResponse(endpoint, messages, { tools, requestInitiator: 'unknown', toolMode: vscode.LanguageModelChatToolMode.Auto }, vscode.extensions.all[0].id, null!, null!),
4343
err => {
4444
errMsg ??= 'Invalid request';
4545
assert.ok(err instanceof Error, 'expected an Error');
@@ -66,7 +66,7 @@ suite('CopilotLanguageModelWrapper', () => {
6666
wrapper = instaService.createInstance(CopilotLanguageModelWrapper);
6767
});
6868
const runTest = async (messages: vscode.LanguageModelChatMessage[], tools?: vscode.LanguageModelChatTool[]) => {
69-
await wrapper.provideLanguageModelResponse(endpoint, messages, { tools, requestInitiator: 'unknown' }, vscode.extensions.all[0].id, null!, null!);
69+
await wrapper.provideLanguageModelResponse(endpoint, messages, { tools, requestInitiator: 'unknown', toolMode: vscode.LanguageModelChatToolMode.Auto }, vscode.extensions.all[0].id, null!, null!);
7070
};
7171

7272
test('simple', async () => {

src/extension/test/vscode-node/sanity.sanity-test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ suite('Copilot Chat Sanity Test', function () {
157157
await realInstaAccessor.invokeFunction(async (accessor) => {
158158

159159
const r = vscode.lm.registerLanguageModelChatProvider('test', new class implements vscode.LanguageModelChatProvider {
160-
async prepareLanguageModelChatInformation(options: { silent: boolean }, token: vscode.CancellationToken): Promise<vscode.LanguageModelChatInformation[]> {
160+
async provideLanguageModelChatInformation(options: { silent: boolean }, token: vscode.CancellationToken): Promise<vscode.LanguageModelChatInformation[]> {
161161
return [{
162162
id: 'test',
163163
name: 'test',
@@ -169,7 +169,7 @@ suite('Copilot Chat Sanity Test', function () {
169169
capabilities: {}
170170
}];
171171
}
172-
async provideLanguageModelChatResponse(model: vscode.LanguageModelChatInformation, messages: Array<vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2>, options: vscode.LanguageModelChatRequestHandleOptions, progress: vscode.Progress<vscode.LanguageModelResponsePart2>, token: vscode.CancellationToken): Promise<any> {
172+
async provideLanguageModelChatResponse(model: vscode.LanguageModelChatInformation, messages: Array<vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2>, options: vscode.ProvideLanguageModelChatResponseOptions, progress: vscode.Progress<vscode.LanguageModelResponsePart2>, token: vscode.CancellationToken): Promise<any> {
173173
throw new Error('Method not implemented.');
174174
}
175175
async provideTokenCount(model: vscode.LanguageModelChatInformation, text: string | vscode.LanguageModelChatMessage | vscode.LanguageModelChatMessage2, token: vscode.CancellationToken): Promise<number> {

0 commit comments

Comments
 (0)