@@ -86,8 +86,10 @@ describe("RealtimeClient", () => {
8686
8787 const headers = Object . fromEntries ( response . headers . entries ( ) ) ;
8888
89- const shapeId = headers [ "electric-shape-id" ] ;
90- const chunkOffset = headers [ "electric-chunk-last-offset" ] ;
89+ console . log ( headers ) ;
90+
91+ const shapeId = headers [ "electric-handle" ] ;
92+ const chunkOffset = headers [ "electric-offset" ] ;
9193
9294 expect ( response . status ) . toBe ( 200 ) ;
9395 expect ( response2 . status ) . toBe ( 200 ) ;
@@ -96,15 +98,15 @@ describe("RealtimeClient", () => {
9698
9799 // Okay, now we will do two live requests, and the second one should fail because of the concurrency limit
98100 const liveResponsePromise = client . streamRun (
99- `http://localhost:3000?offset=0_0&live=true&shape_id =${ shapeId } ` ,
101+ `http://localhost:3000?offset=0_0&live=true&handle =${ shapeId } ` ,
100102 environment ,
101103 run . id
102104 ) ;
103105
104106 const liveResponsePromise2 = new Promise < Response > ( ( resolve ) => {
105107 setTimeout ( async ( ) => {
106108 const response = await client . streamRun (
107- `http://localhost:3000?offset=0_0&live=true&shape_id =${ shapeId } ` ,
109+ `http://localhost:3000?offset=0_0&live=true&handle =${ shapeId } ` ,
108110 environment ,
109111 run . id
110112 ) ;
@@ -200,8 +202,8 @@ describe("RealtimeClient", () => {
200202
201203 const headers = Object . fromEntries ( response . headers . entries ( ) ) ;
202204
203- const shapeId = headers [ "electric-shape-id " ] ;
204- const chunkOffset = headers [ "electric-chunk-last- offset" ] ;
205+ const shapeId = headers [ "electric-handle " ] ;
206+ const chunkOffset = headers [ "electric-offset" ] ;
205207
206208 expect ( response . status ) . toBe ( 200 ) ;
207209 expect ( shapeId ) . toBeDefined ( ) ;
0 commit comments