1
+ /*---------------------------------------------------------------------------------------------
2
+ * Copyright (c) Microsoft Corporation. All rights reserved.
3
+ * Licensed under the MIT License. See License.txt in the project root for license information.
4
+ *--------------------------------------------------------------------------------------------*/
5
+
6
+ import type { RequestMetadata } from '@vscode/copilot-api' ;
7
+ import { ChatMessage } from '@vscode/prompt-tsx/dist/base/output/rawTypes' ;
8
+ import type { CancellationToken } from 'vscode' ;
9
+ import { ITokenizer , TokenizerType } from '../../../util/common/tokenizer' ;
10
+ import { AsyncIterableObject } from '../../../util/vs/base/common/async' ;
11
+ import { IntentParams , Source } from '../../chat/common/chatMLFetcher' ;
12
+ import { ChatLocation , ChatResponse } from '../../chat/common/commonTypes' ;
13
+ import { IEnvService } from '../../env/common/envService' ;
14
+ import { ILogService } from '../../log/common/logService' ;
15
+ import { FinishedCallback , OptionalChatRequestParams } from '../../networking/common/fetch' ;
16
+ import { Response } from '../../networking/common/fetcherService' ;
17
+ import { IChatEndpoint } from '../../networking/common/networking' ;
18
+ import { ChatCompletion } from '../../networking/common/openai' ;
19
+ import { IExperimentationService } from '../../telemetry/common/nullExperimentationService' ;
20
+ import { ITelemetryService , TelemetryProperties } from '../../telemetry/common/telemetry' ;
21
+ import { TelemetryData } from '../../telemetry/common/telemetryData' ;
22
+
23
+ /**
24
+ * This endpoint represents the "Auto" model in the model picker.
25
+ * It just effectively wraps a different endpoint and adds the auto stuff on top
26
+ */
27
+ export class AutoChatEndpoint implements IChatEndpoint {
28
+ public static readonly id = 'auto' ;
29
+ maxOutputTokens : number = this . _wrappedEndpoint . maxOutputTokens ;
30
+ model : string = AutoChatEndpoint . id ;
31
+ supportsToolCalls : boolean = this . _wrappedEndpoint . supportsToolCalls ;
32
+ supportsVision : boolean = this . _wrappedEndpoint . supportsVision ;
33
+ supportsPrediction : boolean = this . _wrappedEndpoint . supportsPrediction ;
34
+ showInModelPicker : boolean = true ;
35
+ isPremium ?: boolean | undefined = this . _wrappedEndpoint . isPremium ;
36
+ multiplier ?: number | undefined = this . _wrappedEndpoint . multiplier ;
37
+ restrictedToSkus ?: string [ ] | undefined = this . _wrappedEndpoint . restrictedToSkus ;
38
+ isDefault : boolean = this . _wrappedEndpoint . isDefault ;
39
+ isFallback : boolean = this . _wrappedEndpoint . isFallback ;
40
+ policy : 'enabled' | { terms : string } = this . _wrappedEndpoint . policy ;
41
+ urlOrRequestMetadata : string | RequestMetadata = this . _wrappedEndpoint . urlOrRequestMetadata ;
42
+ modelMaxPromptTokens : number = this . _wrappedEndpoint . modelMaxPromptTokens ;
43
+ name : string = this . _wrappedEndpoint . name ;
44
+ version : string = this . _wrappedEndpoint . version ;
45
+ family : string = this . _wrappedEndpoint . family ;
46
+ tokenizer : TokenizerType = this . _wrappedEndpoint . tokenizer ;
47
+
48
+ constructor (
49
+ private readonly _wrappedEndpoint : IChatEndpoint ,
50
+ private readonly _sessionToken : string
51
+ ) { }
52
+
53
+ getExtraHeaders ( ) : Record < string , string > {
54
+ return {
55
+ ...( this . _wrappedEndpoint . getExtraHeaders ?.( ) || { } ) ,
56
+ 'Copilot-Session-Token' : this . _sessionToken
57
+ } ;
58
+ }
59
+
60
+ processResponseFromChatEndpoint ( telemetryService : ITelemetryService , logService : ILogService , response : Response , expectedNumChoices : number , finishCallback : FinishedCallback , telemetryData : TelemetryData , cancellationToken ?: CancellationToken ) : Promise < AsyncIterableObject < ChatCompletion > > {
61
+ return this . _wrappedEndpoint . processResponseFromChatEndpoint ( telemetryService , logService , response , expectedNumChoices , finishCallback , telemetryData , cancellationToken ) ;
62
+ }
63
+ acceptChatPolicy ( ) : Promise < boolean > {
64
+ return this . _wrappedEndpoint . acceptChatPolicy ( ) ;
65
+ }
66
+ cloneWithTokenOverride ( modelMaxPromptTokens : number ) : IChatEndpoint {
67
+ return this . _wrappedEndpoint . cloneWithTokenOverride ( modelMaxPromptTokens ) ;
68
+ }
69
+ acquireTokenizer ( ) : ITokenizer {
70
+ return this . _wrappedEndpoint . acquireTokenizer ( ) ;
71
+ }
72
+
73
+ async makeChatRequest ( debugName : string , messages : ChatMessage [ ] , finishedCb : FinishedCallback | undefined , token : CancellationToken , location : ChatLocation , source ?: Source , requestOptions ?: Omit < OptionalChatRequestParams , 'n' > , userInitiatedRequest ?: boolean , telemetryProperties ?: TelemetryProperties , intentParams ?: IntentParams ) : Promise < ChatResponse > {
74
+ return this . _wrappedEndpoint . makeChatRequest ( debugName , messages , finishedCb , token , location , source , requestOptions , userInitiatedRequest , telemetryProperties , intentParams ) ;
75
+ }
76
+ }
77
+
78
+ /**
79
+ * Checks if the auto chat mode is enabled.
80
+ * @param expService The experimentation service to use to check if the auto mode is enabled
81
+ * @param envService The environment service to use to check if the auto mode is enabled
82
+ * @returns True if the auto mode is enabled, false otherwise
83
+ */
84
+ export function isAutoModeEnabled ( expService : IExperimentationService , envService : IEnvService ) : boolean {
85
+ return ! ! expService . getTreatmentVariable < boolean > ( 'vscode' , 'copilotchatcapiautomode' ) || envService . isPreRelease ( ) ;
86
+ }
0 commit comments