@@ -1203,8 +1203,58 @@ class IngestionClient(
12031203 execute[ListTransformationsResponse ](request, requestOptions)
12041204 }
12051205
1206- /** Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the
1207- * observability endpoints.
1206+ /** Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the
1207+ * `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to
1208+ * see the status of your task. If you want to leverage the [pre-indexing data
1209+ * transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/),
1210+ * this is the recommended way of ingesting your records. This method is similar to `pushTask`, but requires an
1211+ * `indexName` instead of a `taskID`. If zero or many tasks are found, an error will be returned.
1212+ *
1213+ * Required API Key ACLs:
1214+ * - addObject
1215+ * - deleteIndex
1216+ * - editSettings
1217+ *
1218+ * @param indexName
1219+ * Name of the index on which to perform the operation.
1220+ * @param watch
1221+ * When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished
1222+ * before responding.
1223+ */
1224+ def push (
1225+ indexName : String ,
1226+ pushTaskPayload : PushTaskPayload ,
1227+ watch : Option [Boolean ] = None ,
1228+ requestOptions : Option [RequestOptions ] = None
1229+ )(implicit ec : ExecutionContext ): Future [WatchResponse ] = Future {
1230+ requireNotNull(indexName, " Parameter `indexName` is required when calling `push`." )
1231+ requireNotNull(pushTaskPayload, " Parameter `pushTaskPayload` is required when calling `push`." )
1232+
1233+ val request = HttpRequest
1234+ .builder()
1235+ .withMethod(" POST" )
1236+ .withPath(s " /1/push/ ${escape(indexName)}" )
1237+ .withBody(pushTaskPayload)
1238+ .withQueryParameter(" watch" , watch)
1239+ .build()
1240+ execute[WatchResponse ](
1241+ request,
1242+ Some (
1243+ RequestOptions (
1244+ writeTimeout = Some (Duration (180000 , TimeUnit .MILLISECONDS )),
1245+ readTimeout = Some (Duration (180000 , TimeUnit .MILLISECONDS )),
1246+ connectTimeout = Some (Duration (180000 , TimeUnit .MILLISECONDS ))
1247+ ) + requestOptions
1248+ )
1249+ )
1250+ }
1251+
1252+ /** Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the
1253+ * `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to
1254+ * see the status of your task. If you want to leverage the [pre-indexing data
1255+ * transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/),
1256+ * this is the recommended way of ingesting your records. This method is similar to `push`, but requires a `taskID`
1257+ * instead of a `indexName`, which is useful when many `destinations` target the same `indexName`.
12081258 *
12091259 * Required API Key ACLs:
12101260 * - addObject
@@ -1213,8 +1263,6 @@ class IngestionClient(
12131263 *
12141264 * @param taskID
12151265 * Unique identifier of a task.
1216- * @param pushTaskPayload
1217- * Request body of a Search API `batch` request that will be pushed in the Connectors pipeline.
12181266 * @param watch
12191267 * When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished
12201268 * before responding.
0 commit comments