|
1 | 1 | import pyarrow as pa |
2 | 2 |
|
3 | 3 | from cloudquery.plugin_v3 import plugin_pb2, plugin_pb2_grpc |
4 | | -from cloudquery.sdk.plugin.plugin import Plugin |
| 4 | +from cloudquery.sdk.message import SyncInsertMessage, SyncMigrateTableMessage |
| 5 | +from cloudquery.sdk.plugin.plugin import Plugin, SyncOptions |
5 | 6 | from cloudquery.sdk.schema import tables_to_arrow_schemas |
6 | 7 |
|
7 | 8 |
|
@@ -33,8 +34,31 @@ def GetTables(self, request: plugin_pb2.GetTables.Request, context): |
33 | 34 | return plugin_pb2.GetTables.Response(tables=tablesBytes) |
34 | 35 |
|
35 | 36 | def Sync(self, request, context): |
36 | | - plugin_pb2.Sync.Response() |
37 | | - return plugin_pb2.Sync.Response() |
| 37 | + options = SyncOptions( |
| 38 | + deterministic_cq_id=False, # TODO |
| 39 | + skip_dependent_tables=request.skip_dependent_tables, |
| 40 | + skip_tables=request.skip_tables, |
| 41 | + tables=request.tables, |
| 42 | + backend_options=None, |
| 43 | + ) |
| 44 | + |
| 45 | + for msg in self._plugin.sync(options): |
| 46 | + if isinstance(msg, SyncInsertMessage): |
| 47 | + sink = pa.BufferOutputStream() |
| 48 | + writer = pa.ipc.new_stream(sink, msg.record.schema) |
| 49 | + writer.write_batch(msg.record) |
| 50 | + writer.close() |
| 51 | + buf = sink.getvalue().to_pybytes() |
| 52 | + yield plugin_pb2.Sync.Response(insert=plugin_pb2.Sync.MessageInsert( |
| 53 | + record=buf |
| 54 | + )) |
| 55 | + elif isinstance(msg, SyncMigrateTableMessage): |
| 56 | + yield plugin_pb2.Sync.Response(migrate_table=plugin_pb2.Sync.MessageMigrateTable( |
| 57 | + table=msg.table.to_arrow_schema().serialize().to_pybytes() |
| 58 | + )) |
| 59 | + else: |
| 60 | + # unknown sync message type |
| 61 | + raise NotImplementedError() |
38 | 62 |
|
39 | 63 | def Read(self, request, context): |
40 | 64 | raise NotImplementedError() |
|
0 commit comments