@@ -48,6 +48,11 @@ export interface MarkEdit {
4848 */
4949 editorAPI : TextEditable ;
5050
51+ /**
52+ * Retrieves a generative language model by name.
53+ */
54+ languageModel ( name : LanguageModelName ) : LanguageModel | undefined ;
55+
5156 /**
5257 * CodeMirror modules used by MarkEdit.
5358 */
@@ -247,6 +252,85 @@ export interface TextEditable {
247252 redo ( ) : void ;
248253}
249254
255+ /**
256+ * Unique identifier of a generative language model.
257+ *
258+ * The only supported model at this time is [Apple's Foundation Models](https://developer.apple.com/documentation/foundationmodels).
259+ */
260+ export type LanguageModelName = 'Apple-Foundation-Models' ;
261+
262+ /**
263+ * Response of a language model content generation.
264+ *
265+ * For non-streaming scenarios, `done` is always true.
266+ */
267+ export type LanguageModelResponse = {
268+ content ?: string ;
269+ error ?: string ;
270+ done : boolean ;
271+ } ;
272+
273+ /**
274+ * Response notifier for language model streaming responses.
275+ */
276+ export type LanguageModelStream = ( response : LanguageModelResponse ) => void ;
277+
278+ /**
279+ * Interfaces for a generative language model.
280+ */
281+ export interface LanguageModel {
282+ /**
283+ * Check the language model availability.
284+ */
285+ availability ( ) : Promise < LanguageModelAvailability > ;
286+
287+ /**
288+ * Create a new language model session.
289+ */
290+ createSession ( options ?: { instructions ?: string } ) : Promise < LanguageModelSession | undefined > ;
291+ }
292+
293+ /**
294+ * Generative language model session.
295+ */
296+ export interface LanguageModelSession {
297+ /**
298+ * Indicates a response is being generated.
299+ */
300+ isResponding ( ) : Promise < boolean > ;
301+
302+ /**
303+ * Produces a response to a prompt.
304+ */
305+ respondTo ( prompt : string , options ?: LanguageModelGenerationOptions ) : Promise < LanguageModelResponse > ;
306+
307+ /**
308+ * Produces a response stream to a prompt.
309+ *
310+ * Each update delivers the latest snapshot of the content, not partial chunks.
311+ */
312+ streamResponseTo (
313+ prompt : string ,
314+ options : LanguageModelGenerationOptions | LanguageModelStream ,
315+ stream ?: LanguageModelStream ,
316+ ) : void ;
317+ }
318+
319+ export type LanguageModelAvailability = {
320+ isAvailable : boolean ;
321+ unavailableReason ?: string ;
322+ } ;
323+
324+ export type LanguageModelGenerationOptions = {
325+ sampling ?: LanguageModelSampling ;
326+ temperature ?: number ;
327+ maximumResponseTokens ?: number ;
328+ } ;
329+
330+ export type LanguageModelSampling =
331+ | { mode : 'greedy' }
332+ | { mode : 'top-k' | 'top-p' ; value : number ; seed ?: number } ;
333+
250334/**
251335 * Information of a file in the file system.
252336 */
0 commit comments