@@ -167,6 +167,15 @@ def generate_write_requests(pyarrow_table):
167167 # To be safe, we'll aim for a soft limit of 7 MB.
168168 max_request_bytes = 7 * 1024 * 1024 # 7 MB
169169
170+ def _create_request (batches ):
171+ """Helper to create an AppendRowsRequest from a list of batches."""
172+ combined_table = pa .Table .from_batches (batches )
173+ request = gapic_types .AppendRowsRequest ()
174+ request .arrow_rows .rows .serialized_record_batch = (
175+ combined_table .serialize ().to_pybytes ()
176+ )
177+ return request
178+
170179 batches_in_request = []
171180 current_size = 0
172181
@@ -186,12 +195,7 @@ def generate_write_requests(pyarrow_table):
186195
187196 if current_size + batch_size > max_request_bytes and batches_in_request :
188197 # Combine collected batches and yield request
189- combined_table = pa .Table .from_batches (batches_in_request )
190- request = gapic_types .AppendRowsRequest ()
191- request .arrow_rows .rows .serialized_record_batch = (
192- combined_table .serialize ().to_pybytes ()
193- )
194- yield request
198+ yield _create_request (batches_in_request )
195199
196200 # Reset for next request.
197201 batches_in_request = []
@@ -202,12 +206,7 @@ def generate_write_requests(pyarrow_table):
202206
203207 # Yield any remaining batches
204208 if batches_in_request :
205- combined_table = pa .Table .from_batches (batches_in_request )
206- request = gapic_types .AppendRowsRequest ()
207- request .arrow_rows .rows .serialized_record_batch = (
208- combined_table .serialize ().to_pybytes ()
209- )
210- yield request
209+ yield _create_request (batches_in_request )
211210
212211
213212def verify_result (client , table , futures ):
0 commit comments