diff --git a/ml.md b/ml.md index bce656f..10c963e 100644 --- a/ml.md +++ b/ml.md @@ -13,6 +13,7 @@ Then, the user passes tensor inputs to the graph, computes the
  • interface wasi:nn/errors@0.2.0-rc-2024-06-25
  • interface wasi:nn/inference@0.2.0-rc-2024-06-25
  • interface wasi:nn/graph@0.2.0-rc-2024-06-25
  • +
  • interface wasi:nn/prompt@0.2.0-rc-2024-06-25
  • @@ -301,3 +302,36 @@ range from simple to complex (e.g., URLs?) and caching mechanisms of various kin +

    Import interface wasi:nn/prompt@0.2.0-rc-2024-06-25

    +

    A prompt "session" is encapsulated by a context.

    +
    +

    Types

    +

    type graph

    +

    graph

    +

    +#### `resource context` +

    A prompt "session."

    +

    Functions

    +

    init: func

    +

    Initialize a prompt session with a graph.

    +

    Note that not all graphs are prompt-ready (see inference); this +function may fail in this case.

    +
    Params
    + +
    Return values
    + +

    [method]context.compute: func

    +

    Compute an inference request with the given inputs.

    +
    Params
    + +
    Return values
    + diff --git a/wit/wasi-nn.wit b/wit/wasi-nn.wit index 872e8cd..67ec492 100644 --- a/wit/wasi-nn.wit +++ b/wit/wasi-nn.wit @@ -12,6 +12,7 @@ world ml { import tensor; import graph; import inference; + import prompt; import errors; } @@ -132,6 +133,23 @@ interface inference { } } +/// A prompt "session" is encapsulated by a `context`. +interface prompt { + use graph.{graph}; + + /// Initialize a prompt session with a graph. + /// + /// Note that not all graphs are prompt-ready (see `inference`); this + /// function may fail in this case. + init: func(graph: graph) -> result; + + /// A prompt "session." + resource context { + /// Compute an inference request with the given inputs. + compute: func(prompt: string) -> result; + } +} + /// TODO: create function-specific errors (https://github.com/WebAssembly/wasi-nn/issues/42) interface errors { enum error-code {