Skip to content

Commit 17ce636

Browse files
committed
Add timeout params for PUT inference and POST inference/_stream
1 parent 5263910 commit 17ce636

File tree

7 files changed

+114
-6
lines changed

7 files changed

+114
-6
lines changed

output/openapi/elasticsearch-openapi.json

Lines changed: 32 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

output/openapi/elasticsearch-serverless-openapi.json

Lines changed: 16 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

output/schema/schema-serverless.json

Lines changed: 16 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

output/schema/schema.json

Lines changed: 32 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

output/typescript/types.ts

Lines changed: 2 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

specification/inference/put/PutRequest.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import { InferenceEndpoint } from '@inference/_types/Services'
2121
import { TaskType } from '@inference/_types/TaskType'
2222
import { RequestBase } from '@_types/Base'
2323
import { Id } from '@_types/common'
24+
import { Duration } from '@_types/Time'
2425

2526
/**
2627
* Create an inference endpoint.
@@ -60,6 +61,13 @@ export interface Request extends RequestBase {
6061
*/
6162
inference_id: Id
6263
}
64+
query_parameters: {
65+
/**
66+
* Specifies the amount of time to wait for the inference endpoint to be created.
67+
* @server_default 30s
68+
*/
69+
timeout?: Duration
70+
}
6371
/** @codegen_name inference_config */
6472
body: InferenceEndpoint
6573
}

specification/inference/stream_inference/StreamInferenceRequest.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import { TaskType } from '@inference/_types/TaskType'
2121
import { RequestBase } from '@_types/Base'
2222
import { Id } from '@_types/common'
23+
import { Duration } from '@_types/Time'
2324

2425
/**
2526
* Perform streaming inference.
@@ -55,6 +56,13 @@ export interface Request extends RequestBase {
5556
*/
5657
task_type?: TaskType
5758
}
59+
query_parameters: {
60+
/**
61+
* The amount of time to wait for the inference request to complete.
62+
* @server_default 30s
63+
*/
64+
timeout?: Duration
65+
}
5866
body: {
5967
/**
6068
* The text on which you want to perform the inference task.

0 commit comments

Comments
 (0)