Skip to content

Commit 243f704

Browse files
committed
aws: compress: Add error logs
Signed-off-by: Hiroshi Hatake <[email protected]>
1 parent 0afb495 commit 243f704

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

src/aws/compression/arrow/compress.c

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,7 @@ static GArrowResizableBuffer* table_to_parquet_buffer(GArrowTable *table)
185185
&error);
186186
g_object_unref(schema);
187187
if (writer == NULL) {
188+
flb_error("[aws][compress] Failed to create parquet writer: %s", error->message);
188189
g_error_free(error);
189190
g_object_unref(buffer);
190191
g_object_unref(sink);
@@ -194,6 +195,7 @@ static GArrowResizableBuffer* table_to_parquet_buffer(GArrowTable *table)
194195
/* Write the entire table to the Parquet file buffer */
195196
success = gparquet_arrow_file_writer_write_table(writer, table, 0, &error);
196197
if (!success) {
198+
flb_error("[aws][compress] Failed to write table to parquet buffer: %s", error->message);
197199
g_error_free(error);
198200
g_object_unref(buffer);
199201
g_object_unref(sink);
@@ -228,12 +230,14 @@ int out_s3_compress_parquet(void *json, size_t size, void **out_buf, size_t *out
228230

229231
table = parse_json((uint8_t *) json, size);
230232
if (table == NULL) {
233+
flb_error("[aws][compress] Failed to parse JSON into Arrow Table for Parquet conversion");
231234
return -1;
232235
}
233236

234237
buffer = table_to_parquet_buffer(table);
235238
g_object_unref(table);
236239
if (buffer == NULL) {
240+
flb_error("[aws][compress] Failed to convert Arrow Table into Parquet buffer");
237241
return -1;
238242
}
239243

0 commit comments

Comments
 (0)