diff --git a/.gitignore b/.gitignore index dfe71b44..95fba1e8 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ pubspec_overrides.yaml .flutter-plugins-dependencies .flutter-plugins build +**/doc/api .build # Shared assets diff --git a/demos/supabase-todolist/lib/attachments/local_storage_native.dart b/demos/supabase-todolist/lib/attachments/local_storage_native.dart new file mode 100644 index 00000000..31c35b96 --- /dev/null +++ b/demos/supabase-todolist/lib/attachments/local_storage_native.dart @@ -0,0 +1,8 @@ +import 'package:path_provider/path_provider.dart'; +import 'package:powersync_core/attachments/attachments.dart'; +import 'package:powersync_core/attachments/io.dart'; + +Future localAttachmentStorage() async { + final appDocDir = await getApplicationDocumentsDirectory(); + return IOLocalStorage(appDocDir); +} diff --git a/demos/supabase-todolist/lib/attachments/local_storage_unsupported.dart b/demos/supabase-todolist/lib/attachments/local_storage_unsupported.dart new file mode 100644 index 00000000..811fa7d3 --- /dev/null +++ b/demos/supabase-todolist/lib/attachments/local_storage_unsupported.dart @@ -0,0 +1,7 @@ +import 'package:powersync_core/attachments/attachments.dart'; + +Future localAttachmentStorage() async { + // This file is imported on the web, where we don't currently have a + // persistent local storage implementation. + return LocalStorage.inMemory(); +} diff --git a/demos/supabase-todolist/lib/attachments/photo_capture_widget.dart b/demos/supabase-todolist/lib/attachments/photo_capture_widget.dart index 38838dd7..89660808 100644 --- a/demos/supabase-todolist/lib/attachments/photo_capture_widget.dart +++ b/demos/supabase-todolist/lib/attachments/photo_capture_widget.dart @@ -1,11 +1,9 @@ import 'dart:async'; - +import 'dart:io'; import 'package:camera/camera.dart'; import 'package:flutter/material.dart'; -import 'package:powersync/powersync.dart' as powersync; +import 'package:logging/logging.dart'; import 'package:powersync_flutter_demo/attachments/queue.dart'; -import 'package:powersync_flutter_demo/models/todo_item.dart'; -import 'package:powersync_flutter_demo/powersync.dart'; class TakePhotoWidget extends StatefulWidget { final String todoId; @@ -23,6 +21,7 @@ class TakePhotoWidget extends StatefulWidget { class _TakePhotoWidgetState extends State { late CameraController _cameraController; late Future _initializeControllerFuture; + final log = Logger('TakePhotoWidget'); @override void initState() { @@ -37,7 +36,6 @@ class _TakePhotoWidgetState extends State { } @override - // Dispose of the camera controller when the widget is disposed void dispose() { _cameraController.dispose(); super.dispose(); @@ -45,25 +43,26 @@ class _TakePhotoWidgetState extends State { Future _takePhoto(context) async { try { - // Ensure the camera is initialized before taking a photo + log.info('Taking photo for todo: ${widget.todoId}'); await _initializeControllerFuture; - final XFile photo = await _cameraController.takePicture(); - // copy photo to new directory with ID as name - String photoId = powersync.uuid.v4(); - String storageDirectory = await attachmentQueue.getStorageDirectory(); - await attachmentQueue.localStorage - .copyFile(photo.path, '$storageDirectory/$photoId.jpg'); - int photoSize = await photo.length(); + // Read the photo data as bytes + final photoFile = File(photo.path); + if (!await photoFile.exists()) { + log.warning('Photo file does not exist: ${photo.path}'); + return; + } + + final photoData = photoFile.openRead(); - TodoItem.addPhoto(photoId, widget.todoId); - attachmentQueue.saveFile(photoId, photoSize); + // Save the photo attachment with the byte data + final attachment = await savePhotoAttachment(photoData, widget.todoId); + + log.info('Photo attachment saved with ID: ${attachment.id}'); } catch (e) { - log.info('Error taking photo: $e'); + log.severe('Error taking photo: $e'); } - - // After taking the photo, navigate back to the previous screen Navigator.pop(context); } diff --git a/demos/supabase-todolist/lib/attachments/photo_widget.dart b/demos/supabase-todolist/lib/attachments/photo_widget.dart index f034ef5b..f41bc0b0 100644 --- a/demos/supabase-todolist/lib/attachments/photo_widget.dart +++ b/demos/supabase-todolist/lib/attachments/photo_widget.dart @@ -1,12 +1,14 @@ import 'dart:io'; +import 'package:path_provider/path_provider.dart'; +import 'package:path/path.dart' as p; import 'package:flutter/material.dart'; -import 'package:powersync_attachments_helper/powersync_attachments_helper.dart'; +import 'package:powersync_core/attachments/attachments.dart'; import 'package:powersync_flutter_demo/attachments/camera_helpers.dart'; import 'package:powersync_flutter_demo/attachments/photo_capture_widget.dart'; -import 'package:powersync_flutter_demo/attachments/queue.dart'; import '../models/todo_item.dart'; +import '../powersync.dart'; class PhotoWidget extends StatefulWidget { final TodoItem todo; @@ -37,11 +39,12 @@ class _PhotoWidgetState extends State { if (photoId == null) { return _ResolvedPhotoState(photoPath: null, fileExists: false); } - photoPath = await attachmentQueue.getLocalUri('$photoId.jpg'); + final appDocDir = await getApplicationDocumentsDirectory(); + photoPath = p.join(appDocDir.path, '$photoId.jpg'); bool fileExists = await File(photoPath).exists(); - final row = await attachmentQueue.db + final row = await db .getOptional('SELECT * FROM attachments_queue WHERE id = ?', [photoId]); if (row != null) { @@ -98,7 +101,7 @@ class _PhotoWidgetState extends State { String? filePath = data.photoPath; bool fileIsDownloading = !data.fileExists; bool fileArchived = - data.attachment?.state == AttachmentState.archived.index; + data.attachment?.state == AttachmentState.archived; if (fileArchived) { return Column( diff --git a/demos/supabase-todolist/lib/attachments/queue.dart b/demos/supabase-todolist/lib/attachments/queue.dart index 2a8dd9ca..80460daf 100644 --- a/demos/supabase-todolist/lib/attachments/queue.dart +++ b/demos/supabase-todolist/lib/attachments/queue.dart @@ -1,90 +1,64 @@ import 'dart:async'; +import 'package:logging/logging.dart'; import 'package:powersync/powersync.dart'; -import 'package:powersync_attachments_helper/powersync_attachments_helper.dart'; -import 'package:powersync_flutter_demo/app_config.dart'; +import 'package:powersync_core/attachments/attachments.dart'; + import 'package:powersync_flutter_demo/attachments/remote_storage_adapter.dart'; -import 'package:powersync_flutter_demo/models/schema.dart'; +import 'local_storage_unsupported.dart' + if (dart.library.io) 'local_storage_native.dart'; -/// Global reference to the queue -late final PhotoAttachmentQueue attachmentQueue; +late AttachmentQueue attachmentQueue; final remoteStorage = SupabaseStorageAdapter(); +final logger = Logger('AttachmentQueue'); -/// Function to handle errors when downloading attachments -/// Return false if you want to archive the attachment -Future onDownloadError(Attachment attachment, Object exception) async { - if (exception.toString().contains('Object not found')) { - return false; - } - return true; -} - -class PhotoAttachmentQueue extends AbstractAttachmentQueue { - PhotoAttachmentQueue(db, remoteStorage) - : super( - db: db, - remoteStorage: remoteStorage, - onDownloadError: onDownloadError); - - @override - init() async { - if (AppConfig.supabaseStorageBucket.isEmpty) { - log.info( - 'No Supabase bucket configured, skip setting up PhotoAttachmentQueue watches'); - return; - } - - await super.init(); - } - - @override - Future saveFile(String fileId, int size, - {mediaType = 'image/jpeg'}) async { - String filename = '$fileId.jpg'; +Future initializeAttachmentQueue(PowerSyncDatabase db) async { + attachmentQueue = AttachmentQueue( + db: db, + remoteStorage: remoteStorage, + logger: logger, + localStorage: await localAttachmentStorage(), + watchAttachments: () => db.watch(''' + SELECT photo_id as id FROM todos WHERE photo_id IS NOT NULL + ''').map( + (results) => [ + for (final row in results) + WatchedAttachmentItem( + id: row['id'] as String, + fileExtension: 'jpg', + ) + ], + ), + ); - Attachment photoAttachment = Attachment( - id: fileId, - filename: filename, - state: AttachmentState.queuedUpload.index, - mediaType: mediaType, - localUri: getLocalFilePathSuffix(filename), - size: size, - ); - - return attachmentsService.saveAttachment(photoAttachment); - } - - @override - Future deleteFile(String fileId) async { - String filename = '$fileId.jpg'; - - Attachment photoAttachment = Attachment( - id: fileId, - filename: filename, - state: AttachmentState.queuedDelete.index); - - return attachmentsService.saveAttachment(photoAttachment); - } + await attachmentQueue.startSync(); +} - @override - StreamSubscription watchIds({String fileExtension = 'jpg'}) { - log.info('Watching photos in $todosTable...'); - return db.watch(''' - SELECT photo_id FROM $todosTable - WHERE photo_id IS NOT NULL - ''').map((results) { - return results.map((row) => row['photo_id'] as String).toList(); - }).listen((ids) async { - List idsInQueue = await attachmentsService.getAttachmentIds(); - List relevantIds = - ids.where((element) => !idsInQueue.contains(element)).toList(); - syncingService.processIds(relevantIds, fileExtension); - }); - } +Future savePhotoAttachment( + Stream> photoData, String todoId, + {String mediaType = 'image/jpeg'}) async { + // Save the file using the AttachmentQueue API + return await attachmentQueue.saveFile( + data: photoData, + mediaType: mediaType, + fileExtension: 'jpg', + metaData: 'Photo attachment for todo: $todoId', + updateHook: (context, attachment) async { + // Update the todo item to reference this attachment + await context.execute( + 'UPDATE todos SET photo_id = ? WHERE id = ?', + [attachment.id, todoId], + ); + }, + ); } -initializeAttachmentQueue(PowerSyncDatabase db) async { - attachmentQueue = PhotoAttachmentQueue(db, remoteStorage); - await attachmentQueue.init(); +Future deletePhotoAttachment(String fileId) async { + return await attachmentQueue.deleteFile( + attachmentId: fileId, + updateHook: (context, attachment) async { + // Optionally update relationships in the same transaction + }, + ); } diff --git a/demos/supabase-todolist/lib/attachments/remote_storage_adapter.dart b/demos/supabase-todolist/lib/attachments/remote_storage_adapter.dart index 596c5da5..5b711b83 100644 --- a/demos/supabase-todolist/lib/attachments/remote_storage_adapter.dart +++ b/demos/supabase-todolist/lib/attachments/remote_storage_adapter.dart @@ -1,49 +1,96 @@ import 'dart:io'; import 'dart:typed_data'; -import 'package:powersync_attachments_helper/powersync_attachments_helper.dart'; + +import 'package:powersync_core/attachments/attachments.dart'; import 'package:powersync_flutter_demo/app_config.dart'; import 'package:supabase_flutter/supabase_flutter.dart'; -import 'package:image/image.dart' as img; +import 'package:logging/logging.dart'; + +class SupabaseStorageAdapter implements RemoteStorage { + static final _log = Logger('SupabaseStorageAdapter'); -class SupabaseStorageAdapter implements AbstractRemoteStorageAdapter { @override - Future uploadFile(String filename, File file, - {String mediaType = 'text/plain'}) async { + Future uploadFile( + Stream> fileData, Attachment attachment) async { _checkSupabaseBucketIsConfigured(); + // Check if attachment size is specified (required for buffer allocation) + final byteSize = attachment.size; + if (byteSize == null) { + throw Exception('Cannot upload a file with no byte size specified'); + } + + _log.info('uploadFile: ${attachment.filename} (size: $byteSize bytes)'); + + // Collect all stream data into a single Uint8List buffer + final buffer = Uint8List(byteSize); + var position = 0; + + await for (final chunk in fileData) { + if (position + chunk.length > byteSize) { + throw Exception('File data exceeds specified size'); + } + buffer.setRange(position, position + chunk.length, chunk); + position += chunk.length; + } + + if (position != byteSize) { + throw Exception( + 'File data size ($position) does not match specified size ($byteSize)'); + } + + // Create a temporary file from the buffer for upload + final tempFile = + File('${Directory.systemTemp.path}/${attachment.filename}'); try { + await tempFile.writeAsBytes(buffer); + await Supabase.instance.client.storage .from(AppConfig.supabaseStorageBucket) - .upload(filename, file, - fileOptions: FileOptions(contentType: mediaType)); + .upload(attachment.filename, tempFile, + fileOptions: FileOptions( + contentType: + attachment.mediaType ?? 'application/octet-stream')); + + _log.info('Successfully uploaded ${attachment.filename}'); } catch (error) { + _log.severe('Error uploading ${attachment.filename}', error); throw Exception(error); + } finally { + if (await tempFile.exists()) { + await tempFile.delete(); + } } } @override - Future downloadFile(String filePath) async { + Future>> downloadFile(Attachment attachment) async { _checkSupabaseBucketIsConfigured(); try { + _log.info('downloadFile: ${attachment.filename}'); + Uint8List fileBlob = await Supabase.instance.client.storage .from(AppConfig.supabaseStorageBucket) - .download(filePath); - final image = img.decodeImage(fileBlob); - Uint8List blob = img.JpegEncoder().encode(image!); - return blob; + .download(attachment.filename); + + _log.info( + 'Successfully downloaded ${attachment.filename} (${fileBlob.length} bytes)'); + + // Return the raw file data as a stream + return Stream.value(fileBlob); } catch (error) { + _log.severe('Error downloading ${attachment.filename}', error); throw Exception(error); } } @override - Future deleteFile(String filename) async { + Future deleteFile(Attachment attachment) async { _checkSupabaseBucketIsConfigured(); - try { await Supabase.instance.client.storage .from(AppConfig.supabaseStorageBucket) - .remove([filename]); + .remove([attachment.filename]); } catch (error) { throw Exception(error); } diff --git a/demos/supabase-todolist/lib/models/schema.dart b/demos/supabase-todolist/lib/models/schema.dart index 89b69b0c..5a6a261b 100644 --- a/demos/supabase-todolist/lib/models/schema.dart +++ b/demos/supabase-todolist/lib/models/schema.dart @@ -1,9 +1,9 @@ import 'package:powersync/powersync.dart'; -import 'package:powersync_attachments_helper/powersync_attachments_helper.dart'; +import 'package:powersync_core/attachments/attachments.dart'; const todosTable = 'todos'; -Schema schema = Schema(([ +Schema schema = Schema([ const Table(todosTable, [ Column.text('list_id'), Column.text('photo_id'), @@ -22,6 +22,5 @@ Schema schema = Schema(([ Column.text('name'), Column.text('owner_id') ]), - AttachmentsQueueTable( - attachmentsQueueTableName: defaultAttachmentsQueueTableName) -])); + AttachmentsQueueTable() +]); diff --git a/demos/supabase-todolist/lib/widgets/todo_item_widget.dart b/demos/supabase-todolist/lib/widgets/todo_item_widget.dart index a59812ed..700a869a 100644 --- a/demos/supabase-todolist/lib/widgets/todo_item_widget.dart +++ b/demos/supabase-todolist/lib/widgets/todo_item_widget.dart @@ -23,7 +23,13 @@ class TodoItemWidget extends StatelessWidget { Future deleteTodo(TodoItem todo) async { if (todo.photoId != null) { - attachmentQueue.deleteFile(todo.photoId!); + await attachmentQueue.deleteFile( + attachmentId: todo.photoId!, + updateHook: (context, attachment) async { + await context.execute( + "UPDATE todos SET photo_id = NULL WHERE id = ?", [todo.id]); + }, + ); } await todo.delete(); } diff --git a/demos/supabase-todolist/pubspec.lock b/demos/supabase-todolist/pubspec.lock index a16ca13c..24fad347 100644 --- a/demos/supabase-todolist/pubspec.lock +++ b/demos/supabase-todolist/pubspec.lock @@ -1,6 +1,22 @@ # Generated by pub # See https://dart.dev/tools/pub/glossary#lockfile packages: + _fe_analyzer_shared: + dependency: transitive + description: + name: _fe_analyzer_shared + sha256: da0d9209ca76bde579f2da330aeb9df62b6319c834fa7baae052021b0462401f + url: "https://pub.dev" + source: hosted + version: "85.0.0" + analyzer: + dependency: transitive + description: + name: analyzer + sha256: "974859dc0ff5f37bc4313244b3218c791810d03ab3470a579580279ba971a48d" + url: "https://pub.dev" + source: hosted + version: "7.7.1" app_links: dependency: transitive description: @@ -121,6 +137,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.0.4" + cli_config: + dependency: transitive + description: + name: cli_config + sha256: ac20a183a07002b700f0c25e61b7ee46b23c309d76ab7b7640a028f18e4d99ec + url: "https://pub.dev" + source: hosted + version: "0.2.0" clock: dependency: transitive description: @@ -137,6 +161,22 @@ packages: url: "https://pub.dev" source: hosted version: "1.19.1" + convert: + dependency: transitive + description: + name: convert + sha256: b30acd5944035672bc15c6b7a8b47d773e41e2f17de064350988c5d02adb1c68 + url: "https://pub.dev" + source: hosted + version: "3.1.2" + coverage: + dependency: transitive + description: + name: coverage + sha256: "5da775aa218eaf2151c721b16c01c7676fbfdd99cebba2bf64e8b807a28ff94d" + url: "https://pub.dev" + source: hosted + version: "1.15.0" cross_file: dependency: transitive description: @@ -216,6 +256,14 @@ packages: description: flutter source: sdk version: "0.0.0" + frontend_server_client: + dependency: transitive + description: + name: frontend_server_client + sha256: f64a0333a82f30b0cca061bc3d143813a486dc086b574bfb233b7c1372427694 + url: "https://pub.dev" + source: hosted + version: "4.0.0" functions_client: dependency: transitive description: @@ -224,6 +272,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.4.4" + glob: + dependency: transitive + description: + name: glob + sha256: c3f1ee72c96f8f78935e18aa8cecced9ab132419e8625dc187e1c2408efc20de + url: "https://pub.dev" + source: hosted + version: "2.1.3" gotrue: dependency: transitive description: @@ -248,6 +304,14 @@ packages: url: "https://pub.dev" source: hosted version: "1.5.0" + http_multi_server: + dependency: transitive + description: + name: http_multi_server + sha256: aa6199f908078bb1c5efb8d8638d4ae191aac11b311132c3ef48ce352fb52ef8 + url: "https://pub.dev" + source: hosted + version: "3.2.2" http_parser: dependency: transitive description: @@ -264,6 +328,22 @@ packages: url: "https://pub.dev" source: hosted version: "4.5.4" + io: + dependency: transitive + description: + name: io + sha256: dfd5a80599cf0165756e3181807ed3e77daf6dd4137caaad72d0b7931597650b + url: "https://pub.dev" + source: hosted + version: "1.0.5" + js: + dependency: transitive + description: + name: js + sha256: "53385261521cc4a0c4658fd0ad07a7d14591cf8fc33abbceae306ddb974888dc" + url: "https://pub.dev" + source: hosted + version: "0.7.2" json_annotation: dependency: transitive description: @@ -360,6 +440,22 @@ packages: url: "https://pub.dev" source: hosted version: "3.1.0" + node_preamble: + dependency: transitive + description: + name: node_preamble + sha256: "6e7eac89047ab8a8d26cf16127b5ed26de65209847630400f9aefd7cd5c730db" + url: "https://pub.dev" + source: hosted + version: "2.0.2" + package_config: + dependency: transitive + description: + name: package_config + sha256: f096c55ebb7deb7e384101542bfba8c52696c1b56fca2eb62827989ef2353bbc + url: "https://pub.dev" + source: hosted + version: "2.2.0" path: dependency: "direct main" description: @@ -440,6 +536,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.1.8" + pool: + dependency: transitive + description: + name: pool + sha256: "20fe868b6314b322ea036ba325e6fc0711a22948856475e2c2b6306e8ab39c2a" + url: "https://pub.dev" + source: hosted + version: "1.5.1" posix: dependency: transitive description: @@ -464,14 +568,14 @@ packages: source: path version: "1.15.2" powersync_attachments_helper: - dependency: "direct main" + dependency: "direct overridden" description: path: "../../packages/powersync_attachments_helper" relative: true source: path version: "0.6.19" powersync_core: - dependency: "direct overridden" + dependency: "direct main" description: path: "../../packages/powersync_core" relative: true @@ -580,11 +684,59 @@ packages: url: "https://pub.dev" source: hosted version: "2.4.1" + shelf: + dependency: transitive + description: + name: shelf + sha256: e7dd780a7ffb623c57850b33f43309312fc863fb6aa3d276a754bb299839ef12 + url: "https://pub.dev" + source: hosted + version: "1.4.2" + shelf_packages_handler: + dependency: transitive + description: + name: shelf_packages_handler + sha256: "89f967eca29607c933ba9571d838be31d67f53f6e4ee15147d5dc2934fee1b1e" + url: "https://pub.dev" + source: hosted + version: "3.0.2" + shelf_static: + dependency: transitive + description: + name: shelf_static + sha256: c87c3875f91262785dade62d135760c2c69cb217ac759485334c5857ad89f6e3 + url: "https://pub.dev" + source: hosted + version: "1.1.3" + shelf_web_socket: + dependency: transitive + description: + name: shelf_web_socket + sha256: "3632775c8e90d6c9712f883e633716432a27758216dfb61bd86a8321c0580925" + url: "https://pub.dev" + source: hosted + version: "3.0.0" sky_engine: dependency: transitive description: flutter source: sdk version: "0.0.0" + source_map_stack_trace: + dependency: transitive + description: + name: source_map_stack_trace + sha256: c0713a43e323c3302c2abe2a1cc89aa057a387101ebd280371d6a6c9fa68516b + url: "https://pub.dev" + source: hosted + version: "2.1.2" + source_maps: + dependency: transitive + description: + name: source_maps + sha256: "190222579a448b03896e0ca6eca5998fa810fda630c1d65e2f78b3f638f54812" + url: "https://pub.dev" + source: hosted + version: "0.10.13" source_span: dependency: transitive description: @@ -697,6 +849,14 @@ packages: url: "https://pub.dev" source: hosted version: "1.2.2" + test: + dependency: "direct dev" + description: + name: test + sha256: "65e29d831719be0591f7b3b1a32a3cda258ec98c58c7b25f7b84241bc31215bb" + url: "https://pub.dev" + source: hosted + version: "1.26.2" test_api: dependency: transitive description: @@ -705,6 +865,14 @@ packages: url: "https://pub.dev" source: hosted version: "0.7.6" + test_core: + dependency: transitive + description: + name: test_core + sha256: "80bf5a02b60af04b09e14f6fe68b921aad119493e26e490deaca5993fef1b05a" + url: "https://pub.dev" + source: hosted + version: "0.6.11" typed_data: dependency: transitive description: @@ -809,6 +977,14 @@ packages: url: "https://pub.dev" source: hosted version: "15.0.2" + watcher: + dependency: transitive + description: + name: watcher + sha256: "5bf046f41320ac97a469d506261797f35254fa61c641741ef32dacda98b7d39c" + url: "https://pub.dev" + source: hosted + version: "1.1.3" web: dependency: transitive description: @@ -833,6 +1009,14 @@ packages: url: "https://pub.dev" source: hosted version: "3.0.3" + webkit_inspection_protocol: + dependency: transitive + description: + name: webkit_inspection_protocol + sha256: "87d3f2333bb240704cd3f1c6b5b7acd8a10e7f0bc28c28dcf14e782014f4a572" + url: "https://pub.dev" + source: hosted + version: "1.2.1" xdg_directories: dependency: transitive description: diff --git a/demos/supabase-todolist/pubspec.yaml b/demos/supabase-todolist/pubspec.yaml index ddb02717..53d61b7a 100644 --- a/demos/supabase-todolist/pubspec.yaml +++ b/demos/supabase-todolist/pubspec.yaml @@ -10,8 +10,8 @@ environment: dependencies: flutter: sdk: flutter - powersync_attachments_helper: ^0.6.19 powersync: ^1.15.2 + powersync_core: ^1.5.2 path_provider: ^2.1.1 supabase_flutter: ^2.0.1 path: ^1.8.3 @@ -26,6 +26,7 @@ dev_dependencies: sdk: flutter flutter_lints: ^3.0.1 + test: ^1.25.15 flutter: uses-material-design: true diff --git a/packages/powersync_attachments_stream/pubspec.lock b/packages/powersync_attachments_stream/pubspec.lock deleted file mode 100644 index ae003e13..00000000 --- a/packages/powersync_attachments_stream/pubspec.lock +++ /dev/null @@ -1,669 +0,0 @@ -# Generated by pub -# See https://dart.dev/tools/pub/glossary#lockfile -packages: - _fe_analyzer_shared: - dependency: transitive - description: - name: _fe_analyzer_shared - sha256: da0d9209ca76bde579f2da330aeb9df62b6319c834fa7baae052021b0462401f - url: "https://pub.dev" - source: hosted - version: "85.0.0" - analyzer: - dependency: transitive - description: - name: analyzer - sha256: b1ade5707ab7a90dfd519eaac78a7184341d19adb6096c68d499b59c7c6cf880 - url: "https://pub.dev" - source: hosted - version: "7.7.0" - args: - dependency: transitive - description: - name: args - sha256: d0481093c50b1da8910eb0bb301626d4d8eb7284aa739614d2b394ee09e3ea04 - url: "https://pub.dev" - source: hosted - version: "2.7.0" - async: - dependency: transitive - description: - name: async - sha256: "758e6d74e971c3e5aceb4110bfd6698efc7f501675bcfe0c775459a8140750eb" - url: "https://pub.dev" - source: hosted - version: "2.13.0" - boolean_selector: - dependency: transitive - description: - name: boolean_selector - sha256: "8aab1771e1243a5063b8b0ff68042d67334e3feab9e95b9490f9a6ebf73b42ea" - url: "https://pub.dev" - source: hosted - version: "2.1.2" - characters: - dependency: transitive - description: - name: characters - sha256: f71061c654a3380576a52b451dd5532377954cf9dbd272a78fc8479606670803 - url: "https://pub.dev" - source: hosted - version: "1.4.0" - checked_yaml: - dependency: transitive - description: - name: checked_yaml - sha256: "959525d3162f249993882720d52b7e0c833978df229be20702b33d48d91de70f" - url: "https://pub.dev" - source: hosted - version: "2.0.4" - cli_config: - dependency: transitive - description: - name: cli_config - sha256: ac20a183a07002b700f0c25e61b7ee46b23c309d76ab7b7640a028f18e4d99ec - url: "https://pub.dev" - source: hosted - version: "0.2.0" - clock: - dependency: transitive - description: - name: clock - sha256: fddb70d9b5277016c77a80201021d40a2247104d9f4aa7bab7157b7e3f05b84b - url: "https://pub.dev" - source: hosted - version: "1.1.2" - collection: - dependency: transitive - description: - name: collection - sha256: "2f5709ae4d3d59dd8f7cd309b4e023046b57d8a6c82130785d2b0e5868084e76" - url: "https://pub.dev" - source: hosted - version: "1.19.1" - convert: - dependency: transitive - description: - name: convert - sha256: b30acd5944035672bc15c6b7a8b47d773e41e2f17de064350988c5d02adb1c68 - url: "https://pub.dev" - source: hosted - version: "3.1.2" - coverage: - dependency: transitive - description: - name: coverage - sha256: "5da775aa218eaf2151c721b16c01c7676fbfdd99cebba2bf64e8b807a28ff94d" - url: "https://pub.dev" - source: hosted - version: "1.15.0" - crypto: - dependency: transitive - description: - name: crypto - sha256: "1e445881f28f22d6140f181e07737b22f1e099a5e1ff94b0af2f9e4a463f4855" - url: "https://pub.dev" - source: hosted - version: "3.0.6" - fake_async: - dependency: transitive - description: - name: fake_async - sha256: "5368f224a74523e8d2e7399ea1638b37aecfca824a3cc4dfdf77bf1fa905ac44" - url: "https://pub.dev" - source: hosted - version: "1.3.3" - ffi: - dependency: transitive - description: - name: ffi - sha256: "289279317b4b16eb2bb7e271abccd4bf84ec9bdcbe999e278a94b804f5630418" - url: "https://pub.dev" - source: hosted - version: "2.1.4" - file: - dependency: transitive - description: - name: file - sha256: a3b4f84adafef897088c160faf7dfffb7696046cb13ae90b508c2cbc95d3b8d4 - url: "https://pub.dev" - source: hosted - version: "7.0.1" - fixnum: - dependency: transitive - description: - name: fixnum - sha256: b6dc7065e46c974bc7c5f143080a6764ec7a4be6da1285ececdc37be96de53be - url: "https://pub.dev" - source: hosted - version: "1.1.1" - flutter: - dependency: "direct main" - description: flutter - source: sdk - version: "0.0.0" - flutter_lints: - dependency: "direct dev" - description: - name: flutter_lints - sha256: "5398f14efa795ffb7a33e9b6a08798b26a180edac4ad7db3f231e40f82ce11e1" - url: "https://pub.dev" - source: hosted - version: "5.0.0" - flutter_test: - dependency: "direct dev" - description: flutter - source: sdk - version: "0.0.0" - frontend_server_client: - dependency: transitive - description: - name: frontend_server_client - sha256: f64a0333a82f30b0cca061bc3d143813a486dc086b574bfb233b7c1372427694 - url: "https://pub.dev" - source: hosted - version: "4.0.0" - glob: - dependency: transitive - description: - name: glob - sha256: c3f1ee72c96f8f78935e18aa8cecced9ab132419e8625dc187e1c2408efc20de - url: "https://pub.dev" - source: hosted - version: "2.1.3" - http: - dependency: transitive - description: - name: http - sha256: "2c11f3f94c687ee9bad77c171151672986360b2b001d109814ee7140b2cf261b" - url: "https://pub.dev" - source: hosted - version: "1.4.0" - http_multi_server: - dependency: transitive - description: - name: http_multi_server - sha256: aa6199f908078bb1c5efb8d8638d4ae191aac11b311132c3ef48ce352fb52ef8 - url: "https://pub.dev" - source: hosted - version: "3.2.2" - http_parser: - dependency: transitive - description: - name: http_parser - sha256: "178d74305e7866013777bab2c3d8726205dc5a4dd935297175b19a23a2e66571" - url: "https://pub.dev" - source: hosted - version: "4.1.2" - io: - dependency: transitive - description: - name: io - sha256: dfd5a80599cf0165756e3181807ed3e77daf6dd4137caaad72d0b7931597650b - url: "https://pub.dev" - source: hosted - version: "1.0.5" - js: - dependency: transitive - description: - name: js - sha256: "53385261521cc4a0c4658fd0ad07a7d14591cf8fc33abbceae306ddb974888dc" - url: "https://pub.dev" - source: hosted - version: "0.7.2" - json_annotation: - dependency: transitive - description: - name: json_annotation - sha256: "1ce844379ca14835a50d2f019a3099f419082cfdd231cd86a142af94dd5c6bb1" - url: "https://pub.dev" - source: hosted - version: "4.9.0" - leak_tracker: - dependency: transitive - description: - name: leak_tracker - sha256: "6bb818ecbdffe216e81182c2f0714a2e62b593f4a4f13098713ff1685dfb6ab0" - url: "https://pub.dev" - source: hosted - version: "10.0.9" - leak_tracker_flutter_testing: - dependency: transitive - description: - name: leak_tracker_flutter_testing - sha256: f8b613e7e6a13ec79cfdc0e97638fddb3ab848452eff057653abd3edba760573 - url: "https://pub.dev" - source: hosted - version: "3.0.9" - leak_tracker_testing: - dependency: transitive - description: - name: leak_tracker_testing - sha256: "6ba465d5d76e67ddf503e1161d1f4a6bc42306f9d66ca1e8f079a47290fb06d3" - url: "https://pub.dev" - source: hosted - version: "3.0.1" - lints: - dependency: transitive - description: - name: lints - sha256: c35bb79562d980e9a453fc715854e1ed39e24e7d0297a880ef54e17f9874a9d7 - url: "https://pub.dev" - source: hosted - version: "5.1.1" - logging: - dependency: "direct main" - description: - name: logging - sha256: c8245ada5f1717ed44271ed1c26b8ce85ca3228fd2ffdb75468ab01979309d61 - url: "https://pub.dev" - source: hosted - version: "1.3.0" - matcher: - dependency: transitive - description: - name: matcher - sha256: dc58c723c3c24bf8d3e2d3ad3f2f9d7bd9cf43ec6feaa64181775e60190153f2 - url: "https://pub.dev" - source: hosted - version: "0.12.17" - material_color_utilities: - dependency: transitive - description: - name: material_color_utilities - sha256: f7142bb1154231d7ea5f96bc7bde4bda2a0945d2806bb11670e30b850d56bdec - url: "https://pub.dev" - source: hosted - version: "0.11.1" - meta: - dependency: transitive - description: - name: meta - sha256: e3641ec5d63ebf0d9b41bd43201a66e3fc79a65db5f61fc181f04cd27aab950c - url: "https://pub.dev" - source: hosted - version: "1.16.0" - mime: - dependency: transitive - description: - name: mime - sha256: "41a20518f0cb1256669420fdba0cd90d21561e560ac240f26ef8322e45bb7ed6" - url: "https://pub.dev" - source: hosted - version: "2.0.0" - mutex: - dependency: transitive - description: - name: mutex - sha256: "8827da25de792088eb33e572115a5eb0d61d61a3c01acbc8bcbe76ed78f1a1f2" - url: "https://pub.dev" - source: hosted - version: "3.1.0" - node_preamble: - dependency: transitive - description: - name: node_preamble - sha256: "6e7eac89047ab8a8d26cf16127b5ed26de65209847630400f9aefd7cd5c730db" - url: "https://pub.dev" - source: hosted - version: "2.0.2" - package_config: - dependency: transitive - description: - name: package_config - sha256: f096c55ebb7deb7e384101542bfba8c52696c1b56fca2eb62827989ef2353bbc - url: "https://pub.dev" - source: hosted - version: "2.2.0" - path: - dependency: "direct main" - description: - name: path - sha256: "75cca69d1490965be98c73ceaea117e8a04dd21217b37b292c9ddbec0d955bc5" - url: "https://pub.dev" - source: hosted - version: "1.9.1" - path_provider: - dependency: "direct main" - description: - name: path_provider - sha256: "50c5dd5b6e1aaf6fb3a78b33f6aa3afca52bf903a8a5298f53101fdaee55bbcd" - url: "https://pub.dev" - source: hosted - version: "2.1.5" - path_provider_android: - dependency: transitive - description: - name: path_provider_android - sha256: d0d310befe2c8ab9e7f393288ccbb11b60c019c6b5afc21973eeee4dda2b35e9 - url: "https://pub.dev" - source: hosted - version: "2.2.17" - path_provider_foundation: - dependency: transitive - description: - name: path_provider_foundation - sha256: "4843174df4d288f5e29185bd6e72a6fbdf5a4a4602717eed565497429f179942" - url: "https://pub.dev" - source: hosted - version: "2.4.1" - path_provider_linux: - dependency: transitive - description: - name: path_provider_linux - sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279 - url: "https://pub.dev" - source: hosted - version: "2.2.1" - path_provider_platform_interface: - dependency: transitive - description: - name: path_provider_platform_interface - sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334" - url: "https://pub.dev" - source: hosted - version: "2.1.2" - path_provider_windows: - dependency: transitive - description: - name: path_provider_windows - sha256: bd6f00dbd873bfb70d0761682da2b3a2c2fccc2b9e84c495821639601d81afe7 - url: "https://pub.dev" - source: hosted - version: "2.3.0" - platform: - dependency: transitive - description: - name: platform - sha256: "5d6b1b0036a5f331ebc77c850ebc8506cbc1e9416c27e59b439f917a902a4984" - url: "https://pub.dev" - source: hosted - version: "3.1.6" - plugin_platform_interface: - dependency: transitive - description: - name: plugin_platform_interface - sha256: "4820fbfdb9478b1ebae27888254d445073732dae3d6ea81f0b7e06d5dedc3f02" - url: "https://pub.dev" - source: hosted - version: "2.1.8" - pool: - dependency: transitive - description: - name: pool - sha256: "20fe868b6314b322ea036ba325e6fc0711a22948856475e2c2b6306e8ab39c2a" - url: "https://pub.dev" - source: hosted - version: "1.5.1" - powersync_core: - dependency: "direct main" - description: - name: powersync_core - sha256: d8ae292bc77f0a96f44c6cc2911d1b781760a87a919e5045e75458cba83bb759 - url: "https://pub.dev" - source: hosted - version: "1.5.0" - pub_semver: - dependency: transitive - description: - name: pub_semver - sha256: "5bfcf68ca79ef689f8990d1160781b4bad40a3bd5e5218ad4076ddb7f4081585" - url: "https://pub.dev" - source: hosted - version: "2.2.0" - pubspec_parse: - dependency: transitive - description: - name: pubspec_parse - sha256: "0560ba233314abbed0a48a2956f7f022cce7c3e1e73df540277da7544cad4082" - url: "https://pub.dev" - source: hosted - version: "1.5.0" - shelf: - dependency: transitive - description: - name: shelf - sha256: e7dd780a7ffb623c57850b33f43309312fc863fb6aa3d276a754bb299839ef12 - url: "https://pub.dev" - source: hosted - version: "1.4.2" - shelf_packages_handler: - dependency: transitive - description: - name: shelf_packages_handler - sha256: "89f967eca29607c933ba9571d838be31d67f53f6e4ee15147d5dc2934fee1b1e" - url: "https://pub.dev" - source: hosted - version: "3.0.2" - shelf_static: - dependency: transitive - description: - name: shelf_static - sha256: c87c3875f91262785dade62d135760c2c69cb217ac759485334c5857ad89f6e3 - url: "https://pub.dev" - source: hosted - version: "1.1.3" - shelf_web_socket: - dependency: transitive - description: - name: shelf_web_socket - sha256: "3632775c8e90d6c9712f883e633716432a27758216dfb61bd86a8321c0580925" - url: "https://pub.dev" - source: hosted - version: "3.0.0" - sky_engine: - dependency: transitive - description: flutter - source: sdk - version: "0.0.0" - source_map_stack_trace: - dependency: transitive - description: - name: source_map_stack_trace - sha256: c0713a43e323c3302c2abe2a1cc89aa057a387101ebd280371d6a6c9fa68516b - url: "https://pub.dev" - source: hosted - version: "2.1.2" - source_maps: - dependency: transitive - description: - name: source_maps - sha256: "190222579a448b03896e0ca6eca5998fa810fda630c1d65e2f78b3f638f54812" - url: "https://pub.dev" - source: hosted - version: "0.10.13" - source_span: - dependency: transitive - description: - name: source_span - sha256: "254ee5351d6cb365c859e20ee823c3bb479bf4a293c22d17a9f1bf144ce86f7c" - url: "https://pub.dev" - source: hosted - version: "1.10.1" - sprintf: - dependency: transitive - description: - name: sprintf - sha256: "1fc9ffe69d4df602376b52949af107d8f5703b77cda567c4d7d86a0693120f23" - url: "https://pub.dev" - source: hosted - version: "7.0.0" - sqlite3: - dependency: transitive - description: - name: sqlite3 - sha256: "608b56d594e4c8498c972c8f1507209f9fd74939971b948ddbbfbfd1c9cb3c15" - url: "https://pub.dev" - source: hosted - version: "2.7.7" - sqlite3_web: - dependency: transitive - description: - name: sqlite3_web - sha256: "967e076442f7e1233bd7241ca61f3efe4c7fc168dac0f38411bdb3bdf471eb3c" - url: "https://pub.dev" - source: hosted - version: "0.3.1" - sqlite_async: - dependency: "direct main" - description: - name: sqlite_async - sha256: "9332aedd311a19dd215dcb55729bc68dc587dc7655b569ab8819b68ee0be0082" - url: "https://pub.dev" - source: hosted - version: "0.11.7" - stack_trace: - dependency: transitive - description: - name: stack_trace - sha256: "8b27215b45d22309b5cddda1aa2b19bdfec9df0e765f2de506401c071d38d1b1" - url: "https://pub.dev" - source: hosted - version: "1.12.1" - stream_channel: - dependency: transitive - description: - name: stream_channel - sha256: "969e04c80b8bcdf826f8f16579c7b14d780458bd97f56d107d3950fdbeef059d" - url: "https://pub.dev" - source: hosted - version: "2.1.4" - string_scanner: - dependency: transitive - description: - name: string_scanner - sha256: "921cd31725b72fe181906c6a94d987c78e3b98c2e205b397ea399d4054872b43" - url: "https://pub.dev" - source: hosted - version: "1.4.1" - term_glyph: - dependency: transitive - description: - name: term_glyph - sha256: "7f554798625ea768a7518313e58f83891c7f5024f88e46e7182a4558850a4b8e" - url: "https://pub.dev" - source: hosted - version: "1.2.2" - test: - dependency: "direct dev" - description: - name: test - sha256: "301b213cd241ca982e9ba50266bd3f5bd1ea33f1455554c5abb85d1be0e2d87e" - url: "https://pub.dev" - source: hosted - version: "1.25.15" - test_api: - dependency: transitive - description: - name: test_api - sha256: fb31f383e2ee25fbbfe06b40fe21e1e458d14080e3c67e7ba0acfde4df4e0bbd - url: "https://pub.dev" - source: hosted - version: "0.7.4" - test_core: - dependency: transitive - description: - name: test_core - sha256: "84d17c3486c8dfdbe5e12a50c8ae176d15e2a771b96909a9442b40173649ccaa" - url: "https://pub.dev" - source: hosted - version: "0.6.8" - typed_data: - dependency: transitive - description: - name: typed_data - sha256: f9049c039ebfeb4cf7a7104a675823cd72dba8297f264b6637062516699fa006 - url: "https://pub.dev" - source: hosted - version: "1.4.0" - universal_io: - dependency: transitive - description: - name: universal_io - sha256: "1722b2dcc462b4b2f3ee7d188dad008b6eb4c40bbd03a3de451d82c78bba9aad" - url: "https://pub.dev" - source: hosted - version: "2.2.2" - uuid: - dependency: transitive - description: - name: uuid - sha256: a5be9ef6618a7ac1e964353ef476418026db906c4facdedaa299b7a2e71690ff - url: "https://pub.dev" - source: hosted - version: "4.5.1" - vector_math: - dependency: transitive - description: - name: vector_math - sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803" - url: "https://pub.dev" - source: hosted - version: "2.1.4" - vm_service: - dependency: transitive - description: - name: vm_service - sha256: ddfa8d30d89985b96407efce8acbdd124701f96741f2d981ca860662f1c0dc02 - url: "https://pub.dev" - source: hosted - version: "15.0.0" - watcher: - dependency: transitive - description: - name: watcher - sha256: "0b7fd4a0bbc4b92641dbf20adfd7e3fd1398fe17102d94b674234563e110088a" - url: "https://pub.dev" - source: hosted - version: "1.1.2" - web: - dependency: transitive - description: - name: web - sha256: "868d88a33d8a87b18ffc05f9f030ba328ffefba92d6c127917a2ba740f9cfe4a" - url: "https://pub.dev" - source: hosted - version: "1.1.1" - web_socket: - dependency: transitive - description: - name: web_socket - sha256: "34d64019aa8e36bf9842ac014bb5d2f5586ca73df5e4d9bf5c936975cae6982c" - url: "https://pub.dev" - source: hosted - version: "1.0.1" - web_socket_channel: - dependency: transitive - description: - name: web_socket_channel - sha256: d645757fb0f4773d602444000a8131ff5d48c9e47adfe9772652dd1a4f2d45c8 - url: "https://pub.dev" - source: hosted - version: "3.0.3" - webkit_inspection_protocol: - dependency: transitive - description: - name: webkit_inspection_protocol - sha256: "87d3f2333bb240704cd3f1c6b5b7acd8a10e7f0bc28c28dcf14e782014f4a572" - url: "https://pub.dev" - source: hosted - version: "1.2.1" - xdg_directories: - dependency: transitive - description: - name: xdg_directories - sha256: "7a3f37b05d989967cdddcbb571f1ea834867ae2faa29725fd085180e0883aa15" - url: "https://pub.dev" - source: hosted - version: "1.1.0" - yaml: - dependency: transitive - description: - name: yaml - sha256: b9da305ac7c39faa3f030eccd175340f968459dae4af175130b3fc47e40d76ce - url: "https://pub.dev" - source: hosted - version: "3.1.3" -sdks: - dart: ">=3.8.1 <4.0.0" - flutter: ">=3.27.0" diff --git a/packages/powersync_core/dartdoc_options.yaml b/packages/powersync_core/dartdoc_options.yaml new file mode 100644 index 00000000..4fe2c4e0 --- /dev/null +++ b/packages/powersync_core/dartdoc_options.yaml @@ -0,0 +1,5 @@ +dartdoc: + categories: + attachments: + name: Attachments + markdown: doc/attachments.md diff --git a/packages/powersync_core/doc/attachments.md b/packages/powersync_core/doc/attachments.md new file mode 100644 index 00000000..aad9b5cb --- /dev/null +++ b/packages/powersync_core/doc/attachments.md @@ -0,0 +1,122 @@ +## Attachments + +In many cases, you might want to sync large binary data (like images) along with the data synced by +PowerSync. +Embedding this data directly in your source databases is [inefficient and not recommended](https://docs.powersync.com/usage/use-case-examples/attachments). + +Instead, the PowerSync SDK for Dart and Flutter provides utilities you can use to _reference_ this binary data +in your primary data model, and then download it from a secondary data store such as S3. +Because binary data is not directly stored in the source database in this model, we call these files _attachments_. + +## Alpha release + +The attachment helpers described in this document are currently in an alpha state, intended for testing. +Expect breaking changes and instability as development continues. +The attachments API is marked as `@experimental` for this reason. + +Do not rely on these libraries for production use. + +## Usage + +An `AttachmentQueue` instance is used to manage and sync attachments in your app. +The attachments' state is stored in a local-only attachments table. + +### Key assumptions + +- Each attachment is identified by a unique id. +- Attachments are immutable once created. +- Relational data should reference attachments using a foreign key column. +- Relational data should reflect the holistic state of attachments at any given time. Any existing local attachment + will be deleted locally if no relational data references it. + +### Example implementation + +See the [supabase todolist](https://github.com/powersync-ja/powersync.dart/tree/main/demos/supabase-todolist) demo for +a basic example of attachment syncing. + +### Setup + +First, add a table storing local attachment state to your database schema. + +```dart +final schema = Schema([ + AttachmentsQueueTable(), + // In this document, we assume the photo_id column of the todos table references an optional photo + // stored as an attachment. + Table('todos', [ + Column.text('list_id'), + Column.text('photo_id'), + Column.text('description'), + Column.integer('completed'), + ]), +]); +``` + +Next, create an `AttachmentQueue` instance. This class provides default syncing utilities and implements a default +sync strategy. This class can be extended for custom functionality, if needed. + +```dart +final directory = await getApplicationDocumentsDirectory(); + +final attachmentQueue = AttachmentQueue( + db: db, + remoteStorage: SupabaseStorageAdapter(), // instance responsible for uploads and downloads + logger: logger, + localStorage: IOLocalStorage(appDocDir), // IOLocalStorage requires `dart:io` and is not available on the web + watchAttachments: () => db.watch(''' + SELECT photo_id as id FROM todos WHERE photo_id IS NOT NULL + ''').map((results) => [ + for (final row in results) + WatchedAttachmentItem( + id: row['id'] as String, + fileExtension: 'jpg', + ) + ], + ), +); +``` + +Here, + + - An instance of `LocalStorageAdapter`, such as the `IOLocalStorage` provided by the SDK, is responsible for storing + attachment contents locally. + - An instance of `RemoteStorageAdapter` is responsible for downloading and uploading attachment contents to the secondary + service, such as S3, Firebase cloud storage or Supabase storage. + - `watchAttachments` is a function emitting a stream of attachment items that are considered to be referenced from + the current database state. In this example, `todos.photo_id` is the only column referencing attachments. + +Next, start the sync process by calling `attachmentQueue.startSync()`. + +## Storing attachments + +To create a new attachment locally, call `AttachmentQueue.saveFile`. To represent the attachment, this method takes +the contents to store, the media type, an optional file extension and id. +The queue will store the contents in a local file and mark it as queued for upload. It also invokes a callback +responsible for referencing the id of the generated attachment in the primary data model: + +```dart +Future savePhotoAttachment( + Stream> photoData, String todoId, + {String mediaType = 'image/jpeg'}) async { + // Save the file using the AttachmentQueue API + return await attachmentQueue.saveFile( + data: photoData, + mediaType: mediaType, + fileExtension: 'jpg', + metaData: 'Photo attachment for todo: $todoId', + updateHook: (context, attachment) async { + // Update the todo item to reference this attachment + await context.execute( + 'UPDATE todos SET photo_id = ? WHERE id = ?', + [attachment.id, todoId], + ); + }, + ); +} +``` + +## Deleting attachments + +To delete attachments, it is sufficient to stop referencing them in the data model, e.g. via +`UPDATE todos SET photo_id = NULL` in this example. The attachment sync implementation will eventually +delete orphaned attachments from the local storage. diff --git a/packages/powersync_core/lib/attachments/attachments.dart b/packages/powersync_core/lib/attachments/attachments.dart new file mode 100644 index 00000000..a69f9409 --- /dev/null +++ b/packages/powersync_core/lib/attachments/attachments.dart @@ -0,0 +1,12 @@ +/// Imports for attachments that are available on all platforms. +/// +/// For more details on using attachments, see the documentation for the topic. +/// +/// {@category attachments} +library; + +export '../src/attachments/attachment.dart'; +export '../src/attachments/attachment_queue_service.dart'; +export '../src/attachments/local_storage.dart'; +export '../src/attachments/remote_storage.dart'; +export '../src/attachments/sync_error_handler.dart'; diff --git a/packages/powersync_core/lib/attachments/io.dart b/packages/powersync_core/lib/attachments/io.dart new file mode 100644 index 00000000..142abb26 --- /dev/null +++ b/packages/powersync_core/lib/attachments/io.dart @@ -0,0 +1,12 @@ +/// A platform-specific import supporting attachments on native platforms. +/// +/// This library exports the [IOLocalStorage] class, implementing the +/// [LocalStorage] interface by storing files under a root directory. +/// +/// {@category attachments} +library; + +import '../src/attachments/io_local_storage.dart'; +import '../src/attachments/local_storage.dart'; + +export '../src/attachments/io_local_storage.dart'; diff --git a/packages/powersync_core/lib/src/attachments/attachment.dart b/packages/powersync_core/lib/src/attachments/attachment.dart new file mode 100644 index 00000000..00f0032a --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/attachment.dart @@ -0,0 +1,197 @@ +/// Defines attachment states and the Attachment model for the PowerSync +/// attachments system. +/// +/// Includes metadata, state, and utility methods for working with attachments. +library; + +import 'package:meta/meta.dart'; +import 'package:powersync_core/sqlite3_common.dart' show Row; +import 'package:powersync_core/powersync_core.dart'; + +/// Represents the state of an attachment. +/// +/// {@category attachments} +@experimental +enum AttachmentState { + /// The attachment is queued for download from the remote storage. + queuedDownload, + + /// The attachment is queued for upload to the remote storage. + queuedUpload, + + /// The attachment is queued for deletion from the remote storage. + queuedDelete, + + /// The attachment is fully synchronized with the remote storage. + synced, + + /// The attachment is archived and no longer actively synchronized. + archived; + + /// Constructs an [AttachmentState] from the corresponding integer value. + /// + /// Throws [ArgumentError] if the value does not match any [AttachmentState]. + static AttachmentState fromInt(int value) { + if (value < 0 || value >= AttachmentState.values.length) { + throw ArgumentError('Invalid value for AttachmentState: $value'); + } + return AttachmentState.values[value]; + } + + /// Returns the ordinal value of this [AttachmentState]. + int toInt() => index; +} + +/// Represents an attachment with metadata and state information. +/// +/// {@category Attachments} +/// +/// Properties: +/// - [id]: Unique identifier for the attachment. +/// - [timestamp]: Timestamp of the last record update. +/// - [filename]: Name of the attachment file, e.g., `[id].jpg`. +/// - [state]: Current state of the attachment, represented as an ordinal of [AttachmentState]. +/// - [localUri]: Local URI pointing to the attachment file, if available. +/// - [mediaType]: Media type of the attachment, typically represented as a MIME type. +/// - [size]: Size of the attachment in bytes, if available. +/// - [hasSynced]: Indicates whether the attachment has been synced locally before. +/// - [metaData]: Additional metadata associated with the attachment. +/// +/// {@category attachments} +@experimental +final class Attachment { + /// Unique identifier for the attachment. + final String id; + + /// Timestamp of the last record update. + final int timestamp; + + /// Name of the attachment file, e.g., `[id].jpg`. + final String filename; + + /// Current state of the attachment, represented as an ordinal of [AttachmentState]. + final AttachmentState state; + + /// Local URI pointing to the attachment file, if available. + final String? localUri; + + /// Media type of the attachment, typically represented as a MIME type. + final String? mediaType; + + /// Size of the attachment in bytes, if available. + final int? size; + + /// Indicates whether the attachment has been synced locally before. + final bool hasSynced; + + /// Additional metadata associated with the attachment. + final String? metaData; + + /// Creates an [Attachment] instance. + const Attachment({ + required this.id, + this.timestamp = 0, + required this.filename, + this.state = AttachmentState.queuedDownload, + this.localUri, + this.mediaType, + this.size, + this.hasSynced = false, + this.metaData, + }); + + /// Creates an [Attachment] instance from a database row. + /// + /// [row]: The [Row] containing attachment data. + /// Returns an [Attachment] instance populated with data from the row. + factory Attachment.fromRow(Row row) { + return Attachment( + id: row['id'] as String, + timestamp: row['timestamp'] as int? ?? 0, + filename: row['filename'] as String, + localUri: row['local_uri'] as String?, + mediaType: row['media_type'] as String?, + size: row['size'] as int?, + state: AttachmentState.fromInt(row['state'] as int), + hasSynced: (row['has_synced'] as int? ?? 0) > 0, + metaData: row['meta_data']?.toString(), + ); + } + + /// Returns a copy of this attachment with the given fields replaced. + Attachment copyWith({ + String? id, + int? timestamp, + String? filename, + AttachmentState? state, + String? localUri, + String? mediaType, + int? size, + bool? hasSynced, + String? metaData, + }) { + return Attachment( + id: id ?? this.id, + timestamp: timestamp ?? this.timestamp, + filename: filename ?? this.filename, + state: state ?? this.state, + localUri: localUri ?? this.localUri, + mediaType: mediaType ?? this.mediaType, + size: size ?? this.size, + hasSynced: hasSynced ?? this.hasSynced, + metaData: metaData ?? this.metaData, + ); + } + + Attachment markAsUnavailableLocally(AttachmentState newState) { + return Attachment( + id: id, + timestamp: timestamp, + filename: filename, + state: newState, + localUri: null, + mediaType: mediaType, + size: size, + hasSynced: false, + metaData: metaData, + ); + } + + @override + String toString() { + return 'Attachment(id: $id, state: $state, localUri: $localUri, metadata: $metaData)'; + } +} + +/// Table definition for the attachments queue. +/// +/// The columns in this table are used by the attachments implementation to +/// store which attachments have been download and tracks metadata for state. +/// +/// {@category attachments} +@experimental +final class AttachmentsQueueTable extends Table { + AttachmentsQueueTable({ + String attachmentsQueueTableName = defaultTableName, + List additionalColumns = const [], + List indexes = const [], + String? viewName, + }) : super.localOnly( + attachmentsQueueTableName, + [ + const Column.text('filename'), + const Column.text('local_uri'), + const Column.integer('timestamp'), + const Column.integer('size'), + const Column.text('media_type'), + const Column.integer('state'), + const Column.integer('has_synced'), + const Column.text('meta_data'), + ...additionalColumns, + ], + viewName: viewName, + indexes: indexes, + ); + + static const defaultTableName = 'attachments_queue'; +} diff --git a/packages/powersync_core/lib/src/attachments/attachment_queue_service.dart b/packages/powersync_core/lib/src/attachments/attachment_queue_service.dart new file mode 100644 index 00000000..0ec673e0 --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/attachment_queue_service.dart @@ -0,0 +1,443 @@ +// Implements the attachment queue for PowerSync attachments. +// +// This class manages the lifecycle of attachment records, including watching for new attachments, +// syncing with remote storage, handling uploads, downloads, and deletes, and managing local storage. +// It provides hooks for error handling, cache management, and custom filename resolution. + +import 'dart:async'; + +import 'package:logging/logging.dart'; +import 'package:meta/meta.dart'; +import 'package:powersync_core/powersync_core.dart'; +import 'package:sqlite_async/sqlite_async.dart'; + +import 'attachment.dart'; +import 'implementations/attachment_context.dart'; +import 'local_storage.dart'; +import 'remote_storage.dart'; +import 'sync_error_handler.dart'; +import 'implementations/attachment_service.dart'; +import 'sync/syncing_service.dart'; + +/// A watched attachment record item. +/// +/// This is usually returned from watching all relevant attachment IDs. +/// +/// - [id]: Id for the attachment record. +/// - [fileExtension]: File extension used to determine an internal filename for storage if no [filename] is provided. +/// - [filename]: Filename to store the attachment with. +/// - [metaData]: Optional metadata for the attachment record. +/// +/// {@category attachments} +@experimental +final class WatchedAttachmentItem { + /// Id for the attachment record. + final String id; + + /// File extension used to determine an internal filename for storage if no [filename] is provided. + final String? fileExtension; + + /// Filename to store the attachment with. + final String? filename; + + /// Optional metadata for the attachment record. + final String? metaData; + + /// Creates a [WatchedAttachmentItem]. + /// + /// Either [fileExtension] or [filename] must be provided. + const WatchedAttachmentItem({ + required this.id, + this.fileExtension, + this.filename, + this.metaData, + }) : assert( + fileExtension != null || filename != null, + 'Either fileExtension or filename must be provided.', + ); +} + +/// Class used to implement the attachment queue. +/// +/// Manages the lifecycle of attachment records, including watching for new attachments, +/// syncing with remote storage, handling uploads, downloads, and deletes, and managing local storage. +/// +/// {@category attachments} +@experimental +base class AttachmentQueue { + final PowerSyncDatabase _db; + final Stream> Function() _watchAttachments; + final LocalStorage _localStorage; + final bool _downloadAttachments; + final Logger _logger; + + final Mutex _mutex = Mutex(); + bool _closed = false; + StreamSubscription? _syncStatusSubscription; + StreamSubscription? _watchedAttachmentsSubscription; + final AttachmentService _attachmentsService; + final SyncingService _syncingService; + + AttachmentQueue._( + {required PowerSyncDatabase db, + required Stream> Function() watchAttachments, + required LocalStorage localStorage, + required bool downloadAttachments, + required Logger logger, + required AttachmentService attachmentsService, + required SyncingService syncingService}) + : _db = db, + _watchAttachments = watchAttachments, + _localStorage = localStorage, + _downloadAttachments = downloadAttachments, + _logger = logger, + _attachmentsService = attachmentsService, + _syncingService = syncingService; + + /// Creates a new attachment queue. + /// + /// Parameters: + /// + /// - [db]: PowerSync database client. + /// - [remoteStorage]: Adapter which interfaces with the remote storage backend. + /// - [watchAttachments]: A stream generator for the current state of local attachments. + /// - [localStorage]: Provides access to local filesystem storage methods. + /// - [attachmentsQueueTableName]: SQLite table where attachment state will be recorded. + /// - [errorHandler]: Attachment operation error handler. Specifies if failed attachment operations should be retried. + /// - [syncInterval]: Periodic interval to trigger attachment sync operations. + /// - [archivedCacheLimit]: Defines how many archived records are retained as a cache. + /// - [syncThrottleDuration]: Throttles remote sync operations triggering. + /// - [downloadAttachments]: Should attachments be downloaded. + /// - [logger]: Logging interface used for all log operations. + factory AttachmentQueue({ + required PowerSyncDatabase db, + required RemoteStorage remoteStorage, + required Stream> Function() watchAttachments, + required LocalStorage localStorage, + String attachmentsQueueTableName = AttachmentsQueueTable.defaultTableName, + AttachmentErrorHandler? errorHandler, + Duration syncInterval = const Duration(seconds: 30), + int archivedCacheLimit = 100, + Duration syncThrottleDuration = const Duration(seconds: 1), + bool downloadAttachments = true, + Logger? logger, + }) { + final resolvedLogger = logger ?? db.logger; + + final attachmentsService = AttachmentService( + db: db, + logger: resolvedLogger, + maxArchivedCount: archivedCacheLimit, + attachmentsQueueTableName: attachmentsQueueTableName, + ); + final syncingService = SyncingService( + remoteStorage: remoteStorage, + localStorage: localStorage, + attachmentsService: attachmentsService, + errorHandler: errorHandler, + syncThrottle: syncThrottleDuration, + period: syncInterval, + logger: resolvedLogger, + ); + + return AttachmentQueue._( + db: db, + watchAttachments: watchAttachments, + localStorage: localStorage, + downloadAttachments: downloadAttachments, + logger: resolvedLogger, + attachmentsService: attachmentsService, + syncingService: syncingService, + ); + } + + /// Initialize the attachment queue by: + /// 1. Creating the attachments directory. + /// 2. Adding watches for uploads, downloads, and deletes. + /// 3. Adding a trigger to run uploads, downloads, and deletes when the device is online after being offline. + Future startSync() async { + await _mutex.lock(() async { + if (_closed) { + throw StateError('Attachment queue has been closed'); + } + + await _stopSyncingInternal(); + + await _localStorage.initialize(); + + await _attachmentsService.withContext((context) async { + await _verifyAttachments(context); + }); + + // Listen for connectivity changes and watched attachments + await _syncingService.startSync(); + + _watchedAttachmentsSubscription = + _watchAttachments().listen((items) async { + await _processWatchedAttachments(items); + }); + + var previouslyConnected = _db.currentStatus.connected; + _syncStatusSubscription = _db.statusStream.listen((status) { + if (!previouslyConnected && status.connected) { + _syncingService.triggerSync(); + } + + previouslyConnected = status.connected; + }); + _watchAttachments().listen((items) async { + await _processWatchedAttachments(items); + }); + + _logger.info('AttachmentQueue started syncing.'); + }); + } + + /// Stops syncing. Syncing may be resumed with [startSync]. + Future stopSyncing() async { + await _mutex.lock(() async { + await _stopSyncingInternal(); + }); + } + + Future _stopSyncingInternal() async { + if (_closed || + _syncStatusSubscription == null || + _watchedAttachmentsSubscription == null) { + return; + } + + await ( + _syncStatusSubscription!.cancel(), + _watchedAttachmentsSubscription!.cancel(), + ).wait; + + _syncStatusSubscription = null; + _watchedAttachmentsSubscription = null; + await _syncingService.stopSync(); + + _logger.info('AttachmentQueue stopped syncing.'); + } + + /// Closes the queue. The queue cannot be used after closing. + Future close() async { + await _mutex.lock(() async { + if (_closed) return; + + await _stopSyncingInternal(); + _closed = true; + _logger.info('AttachmentQueue closed.'); + }); + } + + /// Resolves the filename for new attachment items. + /// Concatenates the attachment ID and extension by default. + Future resolveNewAttachmentFilename( + String attachmentId, + String? fileExtension, + ) async { + return '$attachmentId.${fileExtension ?? 'dat'}'; + } + + /// Processes attachment items returned from `watchAttachments`. + /// + /// The default implementation asserts the items returned from + /// `watchAttachments` as the definitive state for local attachments. + Future _processWatchedAttachments( + List items, + ) async { + await _attachmentsService.withContext((context) async { + final currentAttachments = await context.getAttachments(); + final List attachmentUpdates = []; + + for (final item in items) { + final existingQueueItem = + currentAttachments.where((a) => a.id == item.id).firstOrNull; + + if (existingQueueItem == null) { + if (!_downloadAttachments) continue; + + // This item should be added to the queue. + // This item is assumed to be coming from an upstream sync. + final String filename = item.filename ?? + await resolveNewAttachmentFilename(item.id, item.fileExtension); + + attachmentUpdates.add( + Attachment( + id: item.id, + filename: filename, + state: AttachmentState.queuedDownload, + metaData: item.metaData, + ), + ); + } else if (existingQueueItem.state == AttachmentState.archived) { + // The attachment is present again. Need to queue it for sync. + if (existingQueueItem.hasSynced) { + // No remote action required, we can restore the record (avoids deletion). + attachmentUpdates.add( + existingQueueItem.copyWith(state: AttachmentState.synced), + ); + } else { + // The localURI should be set if the record was meant to be downloaded + // and has been synced. If it's missing and hasSynced is false then + // it must be an upload operation. + attachmentUpdates.add( + existingQueueItem.copyWith( + state: existingQueueItem.localUri == null + ? AttachmentState.queuedDownload + : AttachmentState.queuedUpload, + ), + ); + } + } + } + + // Archive any items not specified in the watched items. + // For queuedDelete or queuedUpload states, archive only if hasSynced is true. + // For other states, archive if the record is not found in the items. + for (final attachment in currentAttachments) { + final notInWatchedItems = items.every( + (update) => update.id != attachment.id, + ); + + if (notInWatchedItems) { + switch (attachment.state) { + case AttachmentState.queuedDelete: + case AttachmentState.queuedUpload: + if (attachment.hasSynced) { + attachmentUpdates.add( + attachment.copyWith(state: AttachmentState.archived), + ); + } + default: + attachmentUpdates.add( + attachment.copyWith(state: AttachmentState.archived), + ); + } + } + } + + await context.saveAttachments(attachmentUpdates); + }); + } + + /// Generates a random attachment id. + Future generateAttachmentId() async { + final row = await _db.get('SELECT uuid() as id'); + return row['id'] as String; + } + + /// Creates a new attachment locally and queues it for upload. + /// The filename is resolved using [resolveNewAttachmentFilename]. + Future saveFile({ + required Stream> data, + required String mediaType, + String? fileExtension, + String? metaData, + String? id, + required Future Function( + SqliteWriteContext context, Attachment attachment) + updateHook, + }) async { + final resolvedId = id ?? await generateAttachmentId(); + + final filename = await resolveNewAttachmentFilename( + resolvedId, + fileExtension, + ); + + // Write the file to the filesystem. + final fileSize = await _localStorage.saveFile(filename, data); + + return await _attachmentsService.withContext((attachmentContext) async { + return await _db.writeTransaction((tx) async { + final attachment = Attachment( + id: resolvedId, + filename: filename, + size: fileSize, + mediaType: mediaType, + state: AttachmentState.queuedUpload, + localUri: filename, + metaData: metaData, + ); + + // Allow consumers to set relationships to this attachment ID. + await updateHook(tx, attachment); + + return await attachmentContext.upsertAttachment(attachment, tx); + }); + }); + } + + /// Queues an attachment for delete. + /// The default implementation assumes the attachment record already exists locally. + Future deleteFile({ + required String attachmentId, + required Future Function( + SqliteWriteContext context, Attachment attachment) + updateHook, + }) async { + return await _attachmentsService.withContext((attachmentContext) async { + final attachment = await attachmentContext.getAttachment(attachmentId); + if (attachment == null) { + throw Exception( + 'Attachment record with id $attachmentId was not found.', + ); + } + + return await _db.writeTransaction((tx) async { + await updateHook(tx, attachment); + return await attachmentContext.upsertAttachment( + attachment.copyWith( + state: AttachmentState.queuedDelete, + hasSynced: false, + ), + tx, + ); + }); + }); + } + + /// Removes all archived items. + Future expireCache() async { + await _attachmentsService.withContext((context) async { + bool done; + do { + done = await _syncingService.deleteArchivedAttachments(context); + } while (!done); + }); + } + + /// Clears the attachment queue and deletes all attachment files. + Future clearQueue() async { + await _attachmentsService.withContext((context) async { + await context.clearQueue(); + }); + await _localStorage.clear(); + } + + /// Cleans up stale attachments. + Future _verifyAttachments(AttachmentContext context) async { + final attachments = await context.getActiveAttachments(); + final List updates = []; + + for (final attachment in attachments) { + // Only check attachments that should have local files + if (attachment.localUri == null) { + // Skip attachments that don't have localUri (like queued downloads) + continue; + } + + final exists = await _localStorage.fileExists(attachment.localUri!); + if ((attachment.state == AttachmentState.synced || + attachment.state == AttachmentState.queuedUpload) && + !exists) { + updates.add( + attachment.markAsUnavailableLocally(AttachmentState.archived), + ); + } + } + + await context.saveAttachments(updates); + } +} diff --git a/packages/powersync_core/lib/src/attachments/implementations/attachment_context.dart b/packages/powersync_core/lib/src/attachments/implementations/attachment_context.dart new file mode 100644 index 00000000..9428d79c --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/implementations/attachment_context.dart @@ -0,0 +1,160 @@ +import 'package:powersync_core/powersync_core.dart'; +import 'package:powersync_core/sqlite3_common.dart'; +import 'package:logging/logging.dart'; +import 'package:sqlite_async/sqlite_async.dart'; +import 'package:meta/meta.dart'; + +import '../attachment.dart'; + +@internal +final class AttachmentContext { + final PowerSyncDatabase db; + final Logger log; + final int maxArchivedCount; + final String attachmentsQueueTableName; + + AttachmentContext( + this.db, + this.log, + this.maxArchivedCount, + this.attachmentsQueueTableName, + ); + + /// Table used for storing attachments in the attachment queue. + String get table { + return attachmentsQueueTableName; + } + + Future deleteAttachment(String id) async { + log.info('deleteAttachment: $id'); + await db.writeTransaction((tx) async { + await tx.execute('DELETE FROM $table WHERE id = ?', [id]); + }); + } + + Future ignoreAttachment(String id) async { + await db.execute( + 'UPDATE $table SET state = ${AttachmentState.archived.index} WHERE id = ?', + [id], + ); + } + + Future getAttachment(String id) async { + final row = await db.getOptional('SELECT * FROM $table WHERE id = ?', [id]); + if (row == null) { + return null; + } + return Attachment.fromRow(row); + } + + Future saveAttachment(Attachment attachment) async { + return await db.writeLock((ctx) async { + return await upsertAttachment(attachment, ctx); + }); + } + + Future saveAttachments(List attachments) async { + if (attachments.isEmpty) { + log.finer('No attachments to save.'); + return; + } + await db.writeTransaction((tx) async { + for (final attachment in attachments) { + await upsertAttachment(attachment, tx); + } + }); + } + + Future> getAttachmentIds() async { + ResultSet results = await db.getAll( + 'SELECT id FROM $table WHERE id IS NOT NULL', + ); + + List ids = results.map((row) => row['id'] as String).toList(); + + return ids; + } + + Future> getAttachments() async { + final results = await db.getAll('SELECT * FROM $table'); + return results.map((row) => Attachment.fromRow(row)).toList(); + } + + Future> getActiveAttachments() async { + // Return all attachments that are not archived (i.e., state != AttachmentState.archived) + final results = await db.getAll('SELECT * FROM $table WHERE state != ?', [ + AttachmentState.archived.index, + ]); + return results.map((row) => Attachment.fromRow(row)).toList(); + } + + Future clearQueue() async { + log.info('Clearing attachment queue...'); + await db.execute('DELETE FROM $table'); + } + + Future deleteArchivedAttachments( + Future Function(List) callback, + ) async { + // Only delete archived attachments exceeding the maxArchivedCount, ordered by timestamp DESC + const limit = 1000; + + final results = await db.getAll( + 'SELECT * FROM $table WHERE state = ? ORDER BY timestamp DESC LIMIT ? OFFSET ?', + [ + AttachmentState.archived.index, + limit, + maxArchivedCount, + ], + ); + final archivedAttachments = + results.map((row) => Attachment.fromRow(row)).toList(); + + if (archivedAttachments.isEmpty) { + return false; + } + + log.info( + 'Deleting ${archivedAttachments.length} archived attachments (exceeding maxArchivedCount=$maxArchivedCount)...'); + // Call the callback with the list of archived attachments before deletion + await callback(archivedAttachments); + + // Delete the archived attachments from the table + final ids = archivedAttachments.map((a) => a.id).toList(); + if (ids.isNotEmpty) { + await db.executeBatch('DELETE FROM $table WHERE id = ?', [ + for (final id in ids) [id], + ]); + } + + log.info('Deleted ${archivedAttachments.length} archived attachments.'); + return archivedAttachments.length < limit; + } + + Future upsertAttachment( + Attachment attachment, + SqliteWriteContext context, + ) async { + log.finest('Updating attachment ${attachment.id}: ${attachment.state}'); + + await context.execute( + '''INSERT OR REPLACE INTO + $table (id, timestamp, filename, local_uri, media_type, size, state, has_synced, meta_data) + VALUES + (?, ?, ?, ?, ?, ?, ?, ?, ?)''', + [ + attachment.id, + attachment.timestamp, + attachment.filename, + attachment.localUri, + attachment.mediaType, + attachment.size, + attachment.state.index, + attachment.hasSynced ? 1 : 0, + attachment.metaData, + ], + ); + + return attachment; + } +} diff --git a/packages/powersync_core/lib/src/attachments/implementations/attachment_service.dart b/packages/powersync_core/lib/src/attachments/implementations/attachment_service.dart new file mode 100644 index 00000000..0ccafd75 --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/implementations/attachment_service.dart @@ -0,0 +1,75 @@ +import 'dart:async'; + +import 'package:meta/meta.dart'; +import 'package:logging/logging.dart'; +import 'package:powersync_core/powersync_core.dart'; +import 'package:sqlite_async/sqlite_async.dart'; + +import '../attachment.dart'; +import 'attachment_context.dart'; + +@internal +final class AttachmentService { + final PowerSyncDatabase db; + final Logger logger; + final int maxArchivedCount; + final String attachmentsQueueTableName; + final Mutex _mutex = Mutex(); + + late final AttachmentContext _context; + + AttachmentService({ + required this.db, + required this.logger, + required this.maxArchivedCount, + required this.attachmentsQueueTableName, + }) { + _context = AttachmentContext( + db, + logger, + maxArchivedCount, + attachmentsQueueTableName, + ); + } + + Stream watchActiveAttachments({Duration? throttle}) async* { + logger.info('Watching attachments...'); + + // Watch for attachments with active states (queued for upload, download, or delete) + final stream = db.watch( + ''' + SELECT + id + FROM + $attachmentsQueueTableName + WHERE + state = ? + OR state = ? + OR state = ? + ORDER BY + timestamp ASC + ''', + parameters: [ + AttachmentState.queuedUpload.index, + AttachmentState.queuedDownload.index, + AttachmentState.queuedDelete.index, + ], + throttle: throttle ?? const Duration(milliseconds: 30), + ); + + yield* stream; + } + + Future withContext( + Future Function(AttachmentContext ctx) action, + ) async { + return await _mutex.lock(() async { + try { + return await action(_context); + } catch (e, stackTrace) { + // Re-throw the error to be handled by the caller + Error.throwWithStackTrace(e, stackTrace); + } + }); + } +} diff --git a/packages/powersync_core/lib/src/attachments/io_local_storage.dart b/packages/powersync_core/lib/src/attachments/io_local_storage.dart new file mode 100644 index 00000000..67d1b578 --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/io_local_storage.dart @@ -0,0 +1,93 @@ +import 'dart:async'; +import 'dart:io'; +import 'dart:typed_data'; + +import 'package:meta/meta.dart'; +import 'package:path/path.dart' as p; + +import 'local_storage.dart'; + +/// Implements [LocalStorage] for device filesystem using Dart IO. +/// +/// Handles file and directory operations for attachments. The database only +/// stores relative paths for attachments that this implementation resolves +/// against the root path provided as a constructor argument. For that reason, +/// it's important that the root directory stays consistent, as data may be lost +/// otherwise. +/// +/// {@category attachments} +@experimental +final class IOLocalStorage implements LocalStorage { + final Directory _root; + + const IOLocalStorage(this._root); + + File _fileFor(String filePath) => File(p.join(_root.path, filePath)); + + @override + Future saveFile(String filePath, Stream> data) async { + final file = _fileFor(filePath); + await file.parent.create(recursive: true); + return (await data.pipe(_LengthTrackingSink(file.openWrite()))) as int; + } + + @override + Stream readFile(String filePath, {String? mediaType}) async* { + final file = _fileFor(filePath); + if (!await file.exists()) { + throw FileSystemException('File does not exist', filePath); + } + final source = file.openRead(); + await for (final chunk in source) { + yield chunk is Uint8List ? chunk : Uint8List.fromList(chunk); + } + } + + @override + Future deleteFile(String filePath) async { + final file = _fileFor(filePath); + if (await file.exists()) { + await file.delete(); + } + } + + @override + Future fileExists(String filePath) async { + return await _fileFor(filePath).exists(); + } + + /// Creates a directory and all necessary parent directories dynamically if they do not exist. + @override + Future initialize() async { + await _root.create(recursive: true); + } + + @override + Future clear() async { + if (await _root.exists()) { + await _root.delete(recursive: true); + } + await _root.create(recursive: true); + } +} + +final class _LengthTrackingSink implements StreamConsumer> { + final StreamConsumer> inner; + var bytesWritten = 0; + + _LengthTrackingSink(this.inner); + + @override + Future addStream(Stream> stream) { + return inner.addStream(stream.map((event) { + bytesWritten += event.length; + return event; + })); + } + + @override + Future close() async { + await inner.close(); + return bytesWritten; + } +} diff --git a/packages/powersync_core/lib/src/attachments/local_storage.dart b/packages/powersync_core/lib/src/attachments/local_storage.dart new file mode 100644 index 00000000..c43db1ef --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/local_storage.dart @@ -0,0 +1,102 @@ +/// @docImport 'package:powersync_core/attachments/io.dart'; +library; + +import 'dart:typed_data'; + +import 'package:meta/meta.dart'; +import 'package:path/path.dart' as p; + +/// An interface responsible for storing attachment data locally. +/// +/// This interface is only responsible for storing attachment content, +/// essentially acting as a key-value store of virtual paths to blobs. +/// +/// On native platforms, you can use the [IOLocalStorage] implemention. On the +/// web, no default implementation is available at the moment. +/// +/// {@category attachments} +@experimental +abstract interface class LocalStorage { + /// Returns an in-memory [LocalStorage] implementation, suitable for testing. + factory LocalStorage.inMemory() = _InMemoryStorage; + + /// Saves binary data stream to storage at the specified file path + /// + /// [filePath] - Path where the file will be stored + /// [data] - List of binary data to store + /// Returns the total size of the written data in bytes + Future saveFile(String filePath, Stream> data); + + /// Retrieves binary data stream from storage at the specified file path + /// + /// [filePath] - Path of the file to read + /// + /// Returns a stream of binary data + Stream readFile(String filePath); + + /// Deletes a file at the specified path + /// + /// [filePath] - Path of the file to delete + Future deleteFile(String filePath); + + /// Checks if a file exists at the specified path + /// + /// [filePath] - Path to check + /// + /// Returns true if the file exists, false otherwise + Future fileExists(String filePath); + + /// Initializes the storage, performing any necessary setup. + Future initialize(); + + /// Clears all data from the storage. + Future clear(); +} + +final class _InMemoryStorage implements LocalStorage { + final Map content = {}; + + String _keyForPath(String path) { + return p.normalize(path); + } + + @override + Future clear() async { + content.clear(); + } + + @override + Future deleteFile(String filePath) async { + content.remove(_keyForPath(filePath)); + } + + @override + Future fileExists(String filePath) async { + return content.containsKey(_keyForPath(filePath)); + } + + @override + Future initialize() async {} + + @override + Stream readFile(String filePath) { + return switch (content[_keyForPath(filePath)]) { + null => + Stream.error('file at $filePath does not exist in in-memory storage'), + final contents => Stream.value(contents), + }; + } + + @override + Future saveFile(String filePath, Stream> data) async { + var length = 0; + final builder = BytesBuilder(copy: false); + await for (final chunk in data) { + length += chunk.length; + builder.add(chunk); + } + + content[_keyForPath(filePath)] = builder.takeBytes(); + return length; + } +} diff --git a/packages/powersync_core/lib/src/attachments/remote_storage.dart b/packages/powersync_core/lib/src/attachments/remote_storage.dart new file mode 100644 index 00000000..35818b9b --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/remote_storage.dart @@ -0,0 +1,34 @@ +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:meta/meta.dart'; + +import 'attachment.dart'; + +/// An interface responsible for uploading and downloading attachments from a +/// remote source, like e.g. S3 or Firebase cloud storage. +/// +/// {@category attachments} +@experimental +abstract interface class RemoteStorage { + /// Uploads a file to remote storage. + /// + /// [fileData] is a stream of byte arrays representing the file data. + /// [attachment] is the attachment record associated with the file. + Future uploadFile( + Stream fileData, + Attachment attachment, + ); + + /// Downloads a file from remote storage. + /// + /// [attachment] is the attachment record associated with the file. + /// + /// Returns a stream of byte arrays representing the file data. + Future>> downloadFile(Attachment attachment); + + /// Deletes a file from remote storage. + /// + /// [attachment] is the attachment record associated with the file. + Future deleteFile(Attachment attachment); +} diff --git a/packages/powersync_core/lib/src/attachments/sync/syncing_service.dart b/packages/powersync_core/lib/src/attachments/sync/syncing_service.dart new file mode 100644 index 00000000..4bc1a266 --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/sync/syncing_service.dart @@ -0,0 +1,281 @@ +import 'dart:async'; + +import 'package:meta/meta.dart'; +import 'package:logging/logging.dart'; +import 'package:async/async.dart'; + +import '../attachment.dart'; +import '../implementations/attachment_context.dart'; +import '../implementations/attachment_service.dart'; +import '../local_storage.dart'; +import '../remote_storage.dart'; +import '../sync_error_handler.dart'; + +/// SyncingService is responsible for syncing attachments between local and remote storage. +/// +/// This service handles downloading, uploading, and deleting attachments, as well as +/// periodically syncing attachment states. It ensures proper lifecycle management +/// of sync operations and provides mechanisms for error handling and retries. +/// +/// Properties: +/// - [remoteStorage]: The remote storage implementation for handling file operations. +/// - [localStorage]: The local storage implementation for managing files locally. +/// - [attachmentsService]: The service for managing attachment states and operations. +/// - [errorHandler]: Optional error handler for managing sync-related errors. +@internal +final class SyncingService { + final RemoteStorage remoteStorage; + final LocalStorage localStorage; + final AttachmentService attachmentsService; + final AttachmentErrorHandler? errorHandler; + final Duration syncThrottle; + final Duration period; + final Logger logger; + + StreamSubscription? _syncSubscription; + StreamSubscription? _periodicSubscription; + bool _isClosed = false; + final _syncTriggerController = StreamController.broadcast(); + + SyncingService({ + required this.remoteStorage, + required this.localStorage, + required this.attachmentsService, + this.errorHandler, + this.syncThrottle = const Duration(seconds: 5), + this.period = const Duration(seconds: 30), + required this.logger, + }); + + /// Starts the syncing process, including periodic and event-driven sync operations. + Future startSync() async { + if (_isClosed) return; + + _syncSubscription?.cancel(); + _periodicSubscription?.cancel(); + + // Create a merged stream of manual triggers and attachment changes + final attachmentChanges = attachmentsService.watchActiveAttachments( + throttle: syncThrottle, + ); + final manualTriggers = _syncTriggerController.stream; + + late StreamSubscription sub; + final syncStream = + StreamGroup.merge([attachmentChanges, manualTriggers]) + .takeWhile((_) => sub == _syncSubscription) + .asyncMap((_) async { + await attachmentsService.withContext((context) async { + final attachments = await context.getActiveAttachments(); + logger.info('Found ${attachments.length} active attachments'); + await handleSync(attachments, context); + await deleteArchivedAttachments(context); + }); + }); + + _syncSubscription = sub = syncStream.listen(null); + + // Start periodic sync using instance period + _periodicSubscription = Stream.periodic(period, (_) {}).listen(( + _, + ) { + logger.info('Periodically syncing attachments'); + triggerSync(); + }); + } + + /// Enqueues a sync operation (manual trigger). + void triggerSync() { + if (!_isClosed) _syncTriggerController.add(null); + } + + /// Stops all ongoing sync operations. + Future stopSync() async { + await _periodicSubscription?.cancel(); + + final subscription = _syncSubscription; + // Add a trigger event after clearing the subscription, which will make + // the takeWhile() callback cancel. This allows us to use asFuture() here, + // ensuring that we only complete this future when the stream is actually + // done. + _syncSubscription = null; + _syncTriggerController.add(null); + await subscription?.asFuture(); + } + + /// Closes the syncing service, stopping all operations and releasing resources. + Future close() async { + _isClosed = true; + await stopSync(); + await _syncTriggerController.close(); + } + + /// Handles syncing operations for a list of attachments, including downloading, + /// uploading, and deleting files based on their states. + /// + /// [attachments]: The list of attachments to process. + /// [context]: The attachment context used for managing attachment states. + Future handleSync( + List attachments, + AttachmentContext context, + ) async { + logger.info('Starting handleSync with ${attachments.length} attachments'); + final updatedAttachments = []; + + for (final attachment in attachments) { + logger.info( + 'Processing attachment ${attachment.id} with state: ${attachment.state}', + ); + try { + switch (attachment.state) { + case AttachmentState.queuedDownload: + logger.info('Downloading [${attachment.filename}]'); + updatedAttachments.add(await downloadAttachment(attachment)); + break; + case AttachmentState.queuedUpload: + logger.info('Uploading [${attachment.filename}]'); + updatedAttachments.add(await uploadAttachment(attachment)); + break; + case AttachmentState.queuedDelete: + logger.info('Deleting [${attachment.filename}]'); + updatedAttachments.add(await deleteAttachment(attachment, context)); + break; + case AttachmentState.synced: + logger.info('Attachment ${attachment.id} is already synced'); + break; + case AttachmentState.archived: + logger.info('Attachment ${attachment.id} is archived'); + break; + } + } catch (e, st) { + logger.warning('Error during sync for ${attachment.id}', e, st); + } + } + + if (updatedAttachments.isNotEmpty) { + logger.info('Saving ${updatedAttachments.length} updated attachments'); + await context.saveAttachments(updatedAttachments); + } + } + + /// Uploads an attachment from local storage to remote storage. + /// + /// [attachment]: The attachment to upload. + /// Returns the updated attachment with its new state. + Future uploadAttachment(Attachment attachment) async { + logger.info('Starting upload for attachment ${attachment.id}'); + try { + if (attachment.localUri == null) { + throw Exception('No localUri for attachment $attachment'); + } + await remoteStorage.uploadFile( + localStorage.readFile(attachment.localUri!), + attachment, + ); + logger.info( + 'Successfully uploaded attachment "${attachment.id}" to Cloud Storage', + ); + return attachment.copyWith( + state: AttachmentState.synced, + hasSynced: true, + ); + } catch (e, st) { + logger.warning( + 'Upload attachment error for attachment $attachment', + e, + st, + ); + if (errorHandler != null) { + final shouldRetry = + await errorHandler!.onUploadError(attachment, e, st); + if (!shouldRetry) { + logger.info('Attachment with ID ${attachment.id} has been archived'); + return attachment.copyWith(state: AttachmentState.archived); + } + } + return attachment; + } + } + + /// Downloads an attachment from remote storage and saves it to local storage. + /// + /// [attachment]: The attachment to download. + /// Returns the updated attachment with its new state. + Future downloadAttachment(Attachment attachment) async { + logger.info('Starting download for attachment ${attachment.id}'); + final attachmentPath = attachment.filename; + try { + final fileStream = await remoteStorage.downloadFile(attachment); + await localStorage.saveFile(attachmentPath, fileStream); + logger.info('Successfully downloaded file "${attachment.id}"'); + + return attachment.copyWith( + localUri: attachmentPath, + state: AttachmentState.synced, + hasSynced: true, + ); + } catch (e, st) { + if (errorHandler != null) { + final shouldRetry = + await errorHandler!.onDownloadError(attachment, e, st); + if (!shouldRetry) { + logger.info('Attachment with ID ${attachment.id} has been archived'); + return attachment.copyWith(state: AttachmentState.archived); + } + } + logger.warning( + 'Download attachment error for attachment $attachment', + e, + st, + ); + return attachment; + } + } + + /// Deletes an attachment from remote and local storage, and removes it from the queue. + /// + /// [attachment]: The attachment to delete. + /// Returns the updated attachment with its new state. + Future deleteAttachment( + Attachment attachment, AttachmentContext context) async { + try { + logger.info('Deleting attachment ${attachment.id} from remote storage'); + await remoteStorage.deleteFile(attachment); + + if (attachment.localUri != null && + await localStorage.fileExists(attachment.localUri!)) { + await localStorage.deleteFile(attachment.localUri!); + } + // Remove the attachment record from the queue in a transaction. + await context.deleteAttachment(attachment.id); + return attachment.copyWith(state: AttachmentState.archived); + } catch (e, st) { + if (errorHandler != null) { + final shouldRetry = + await errorHandler!.onDeleteError(attachment, e, st); + if (!shouldRetry) { + logger.info('Attachment with ID ${attachment.id} has been archived'); + return attachment.copyWith(state: AttachmentState.archived); + } + } + logger.warning('Error deleting attachment: $e', e, st); + return attachment; + } + } + + /// Deletes archived attachments from local storage. + /// + /// [context]: The attachment context used to retrieve and manage archived attachments. + /// Returns `true` if all archived attachments were successfully deleted, `false` otherwise. + Future deleteArchivedAttachments( + AttachmentContext context, + ) async { + return context.deleteArchivedAttachments((pendingDelete) async { + for (final attachment in pendingDelete) { + if (attachment.localUri == null) continue; + if (!await localStorage.fileExists(attachment.localUri!)) continue; + await localStorage.deleteFile(attachment.localUri!); + } + }); + } +} diff --git a/packages/powersync_core/lib/src/attachments/sync_error_handler.dart b/packages/powersync_core/lib/src/attachments/sync_error_handler.dart new file mode 100644 index 00000000..30aafcf4 --- /dev/null +++ b/packages/powersync_core/lib/src/attachments/sync_error_handler.dart @@ -0,0 +1,102 @@ +import 'package:meta/meta.dart'; + +import 'attachment.dart'; + +/// The signature of a function handling an exception when uploading, +/// downloading or deleting an exception. +/// +/// It returns `true` if the operation should be retried. +/// +/// {@category attachments} +typedef AttachmentExceptionHandler = Future Function( + Attachment attachment, + Object exception, + StackTrace stackTrace, +); + +/// Interface for handling errors during attachment operations. +/// Implementations determine whether failed operations should be retried. +/// Attachment records are archived if an operation fails and should not be retried. +/// +/// {@category attachments} +@experimental +abstract interface class AttachmentErrorHandler { + /// Creates an implementation of an error handler by delegating to the + /// individual functions for delete, download and upload errors. + const factory AttachmentErrorHandler({ + required AttachmentExceptionHandler onDeleteError, + required AttachmentExceptionHandler onDownloadError, + required AttachmentExceptionHandler onUploadError, + }) = _FunctionBasedErrorHandler; + + /// Determines whether the provided attachment download operation should be retried. + /// + /// [attachment] The attachment involved in the failed download operation. + /// [exception] The exception that caused the download failure. + /// [stackTrace] The [StackTrace] when the exception was caught. + /// + /// Returns `true` if the download operation should be retried, `false` otherwise. + Future onDownloadError( + Attachment attachment, + Object exception, + StackTrace stackTrace, + ); + + /// Determines whether the provided attachment upload operation should be retried. + /// + /// [attachment] The attachment involved in the failed upload operation. + /// [exception] The exception that caused the upload failure. + /// [stackTrace] The [StackTrace] when the exception was caught. + /// + /// Returns `true` if the upload operation should be retried, `false` otherwise. + Future onUploadError( + Attachment attachment, + Object exception, + StackTrace stackTrace, + ); + + /// Determines whether the provided attachment delete operation should be retried. + /// + /// [attachment] The attachment involved in the failed delete operation. + /// [exception] The exception that caused the delete failure. + /// [stackTrace] The [StackTrace] when the exception was caught. + /// + /// Returns `true` if the delete operation should be retried, `false` otherwise. + Future onDeleteError( + Attachment attachment, + Object exception, + StackTrace stackTrace, + ); +} + +final class _FunctionBasedErrorHandler implements AttachmentErrorHandler { + final AttachmentExceptionHandler _onDeleteError; + final AttachmentExceptionHandler _onDownloadError; + final AttachmentExceptionHandler _onUploadError; + + const _FunctionBasedErrorHandler( + {required AttachmentExceptionHandler onDeleteError, + required AttachmentExceptionHandler onDownloadError, + required AttachmentExceptionHandler onUploadError}) + : _onDeleteError = onDeleteError, + _onDownloadError = onDownloadError, + _onUploadError = onUploadError; + + @override + Future onDeleteError( + Attachment attachment, Object exception, StackTrace stackTrace) { + return _onDeleteError(attachment, exception, stackTrace); + } + + @override + Future onDownloadError( + Attachment attachment, Object exception, StackTrace stackTrace) { + return _onDownloadError(attachment, exception, stackTrace); + } + + @override + Future onUploadError( + Attachment attachment, Object exception, StackTrace stackTrace) { + return _onUploadError(attachment, exception, stackTrace); + } +} diff --git a/packages/powersync_core/pubspec.yaml b/packages/powersync_core/pubspec.yaml index 7a4c9f7a..fa3dd02a 100644 --- a/packages/powersync_core/pubspec.yaml +++ b/packages/powersync_core/pubspec.yaml @@ -39,6 +39,8 @@ dev_dependencies: stream_channel: ^2.1.2 fake_async: ^1.3.3 bson: ^5.0.7 + test_descriptor: ^2.0.2 + mockito: ^5.5.0 platforms: android: diff --git a/packages/powersync_core/test/attachments/attachment_test.dart b/packages/powersync_core/test/attachments/attachment_test.dart new file mode 100644 index 00000000..90cd1f37 --- /dev/null +++ b/packages/powersync_core/test/attachments/attachment_test.dart @@ -0,0 +1,372 @@ +import 'dart:typed_data'; + +import 'package:async/async.dart'; +import 'package:logging/logging.dart'; +import 'package:mockito/mockito.dart'; +import 'package:powersync_core/attachments/attachments.dart'; +import 'package:powersync_core/powersync_core.dart'; +import 'package:test/test.dart'; + +import '../utils/abstract_test_utils.dart'; +import '../utils/test_utils_impl.dart'; + +void main() { + late TestPowerSyncFactory factory; + late PowerSyncDatabase db; + late MockRemoteStorage remoteStorage; + late LocalStorage localStorage; + late AttachmentQueue queue; + late StreamQueue> attachments; + + Stream> watchAttachments() { + return db + .watch('SELECT photo_id FROM users WHERE photo_id IS NOT NULL') + .map( + (rs) => [ + for (final row in rs) + WatchedAttachmentItem( + id: row['photo_id'] as String, fileExtension: 'jpg') + ], + ); + } + + setUpAll(() async { + factory = await TestUtils().testFactory(); + }); + + setUp(() async { + remoteStorage = MockRemoteStorage(); + localStorage = LocalStorage.inMemory(); + + final (raw, database) = await factory.openInMemoryDatabase( + schema: _schema, + // Uncomment to see test logs + logger: Logger.detached('PowerSyncTest'), + ); + await database.initialize(); + db = database; + + queue = AttachmentQueue( + db: db, + remoteStorage: remoteStorage, + watchAttachments: watchAttachments, + localStorage: localStorage, + archivedCacheLimit: 0, + ); + + attachments = StreamQueue(db.attachments); + await expectLater(attachments, emits(isEmpty)); + }); + + tearDown(() async { + await attachments.cancel(); + await queue.stopSyncing(); + await queue.close(); + + await db.close(); + }); + + test('downloads attachments', () async { + await queue.startSync(); + + // Create a user with a photo_id specified. Since we didn't save an + // attachment before assigning a photo_id, this is equivalent to reuqiring + // an attachment download. + await db.execute( + 'INSERT INTO users (id, name, email, photo_id) VALUES (uuid(), ?, ?, uuid())', + ['steven', 'steven@journeyapps.com'], + ); + + var [attachment] = await attachments.next; + if (attachment.state == AttachmentState.queuedDownload) { + // Depending on timing with the queue scanning for items asynchronously, + // we may see a queued download or a synced event initially. + [attachment] = await attachments.next; + } + + expect(attachment.state, AttachmentState.synced); + final localUri = attachment.localUri!; + + // A download should he been attempted for this file. + verify(remoteStorage.downloadFile(argThat(isAttachment(attachment.id)))); + + // A file should now exist. + expect(await localStorage.fileExists(localUri), isTrue); + + // Now clear the user's photo_id, which should archive the attachment. + await db.execute('UPDATE users SET photo_id = NULL'); + + var nextAttachment = (await attachments.next).firstOrNull; + if (nextAttachment != null) { + expect(nextAttachment.state, AttachmentState.archived); + nextAttachment = (await attachments.next).firstOrNull; + } + + expect(nextAttachment, isNull); + + // File should have been deleted too + expect(await localStorage.fileExists(localUri), isFalse); + }); + + test('stores relative paths', () async { + // Regression test we had in the Kotlin/Swift implementation: + // https://github.com/powersync-ja/powersync-swift/pull/74 + await queue.startSync(); + await db.execute( + 'INSERT INTO users (id, name, email, photo_id) VALUES (uuid(), ?, ?, ?)', + ['steven', 'steven@journeyapps.com', 'picture_id'], + ); + + // Wait for attachment to sync. + await expectLater( + attachments, + emitsThrough([ + isA() + .having((e) => e.state, 'state', AttachmentState.synced) + ])); + + expect(await localStorage.fileExists('picture_id.jpg'), isTrue); + }); + + test('recovers from deleted local files', () async { + // Create an attachments record which has an invalid local_uri. + await db.execute( + 'INSERT OR REPLACE INTO attachments_queue ' + '(id, timestamp, filename, local_uri, media_type, size, state, has_synced, meta_data) ' + 'VALUES (uuid(), current_timestamp, ?, ?, ?, ?, ?, ?, ?)', + [ + 'attachment.jpg', + 'invalid/dir/attachment.jpg', + 'application/jpeg', + 1, + AttachmentState.synced.toInt(), + 1, + "" + ], + ); + await attachments.next; + + queue = AttachmentQueue( + db: db, + remoteStorage: remoteStorage, + watchAttachments: watchAttachments, + localStorage: localStorage, + archivedCacheLimit: 1, + ); + + // The attachment should be marked as archived, and the local URI should be + // removed. + await queue.startSync(); + + final [attachment] = await attachments.next; + expect(attachment.filename, 'attachment.jpg'); + expect(attachment.localUri, isNull); + expect(attachment.state, AttachmentState.archived); + }); + + test('uploads attachments', () async { + await queue.startSync(); + + final record = await queue.saveFile( + data: Stream.value(Uint8List(123)), + mediaType: 'image/jpg', + updateHook: (tx, attachment) async { + await tx.execute( + 'INSERT INTO users (id, name, email, photo_id) VALUES (uuid(), ?, ?, ?);', + ['steven', 'steven@journeyapps.com', attachment.id], + ); + }, + ); + expect(record.size, 123); + + var [attachment] = await attachments.next; + if (attachment.state == AttachmentState.queuedUpload) { + // Wait for it to be synced + [attachment] = await attachments.next; + } + + expect(attachment.state, AttachmentState.synced); + + // An upload should have been attempted for this file. + verify(remoteStorage.uploadFile(any, argThat(isAttachment(record.id)))); + expect(await localStorage.fileExists(record.localUri!), isTrue); + + // Now clear the user's photo_id, which should archive the attachment. + await db.execute('UPDATE users SET photo_id = NULL'); + + // Should delete attachment from database + await expectLater(attachments, emitsThrough(isEmpty)); + + // File should have been deleted too + expect(await localStorage.fileExists(record.localUri!), isFalse); + }); + + test('delete attachments', () async { + await queue.startSync(); + + final id = await queue.generateAttachmentId(); + await db.execute( + 'INSERT INTO users (id, name, email, photo_id) VALUES (uuid(), ?, ?, ?)', + ['steven', 'steven@journeyapps.com', id], + ); + + // Wait for the attachment to be synced. + await expectLater( + attachments, + emitsThrough([ + isA() + .having((e) => e.state, 'state', AttachmentState.synced) + ]), + ); + + await queue.deleteFile( + attachmentId: id, + updateHook: (tx, attachment) async { + await tx.execute( + 'UPDATE users SET photo_id = NULL WHERE photo_id = ?', + [attachment.id], + ); + }, + ); + + // Record should be deleted. + await expectLater(attachments, emitsThrough(isEmpty)); + verify(remoteStorage.deleteFile(argThat(isAttachment(id)))); + }); + + test('cached download', () async { + queue = AttachmentQueue( + db: db, + remoteStorage: remoteStorage, + watchAttachments: watchAttachments, + localStorage: localStorage, + archivedCacheLimit: 10, + ); + + await queue.startSync(); + + // Create attachment and wait for download. + await db.execute( + 'INSERT INTO users (id, name, email, photo_id) VALUES (uuid(), ?, ?, uuid())', + ['steven', 'steven@journeyapps.com'], + ); + await expectLater( + attachments, + emitsThrough([ + isA() + .having((e) => e.state, 'state', AttachmentState.synced) + ]), + ); + final [id as String, localUri as String] = + (await db.get('SELECT id, local_uri FROM attachments_queue')).values; + verify(remoteStorage.downloadFile(argThat(isAttachment(id)))); + expect(await localStorage.fileExists(localUri), isTrue); + + // Archive attachment by not referencing it anymore. + await db.execute('UPDATE users SET photo_id = NULL'); + await expectLater( + attachments, + emitsThrough([ + isA() + .having((e) => e.state, 'state', AttachmentState.archived) + ]), + ); + + // Restore from cache + await db.execute('UPDATE users SET photo_id = ?', [id]); + await expectLater( + attachments, + emitsThrough([ + isA() + .having((e) => e.state, 'state', AttachmentState.synced) + ]), + ); + expect(await localStorage.fileExists(localUri), isTrue); + + // Should not have downloaded attachment again because we have it locally. + verifyNoMoreInteractions(remoteStorage); + }); + + test('skip failed download', () async { + Future errorHandler( + Attachment attachment, Object exception, StackTrace trace) async { + return false; + } + + queue = AttachmentQueue( + db: db, + remoteStorage: remoteStorage, + watchAttachments: watchAttachments, + localStorage: localStorage, + errorHandler: AttachmentErrorHandler( + onDeleteError: expectAsync3(errorHandler, count: 0), + onDownloadError: expectAsync3(errorHandler, count: 1), + onUploadError: expectAsync3(errorHandler, count: 0), + ), + ); + + when(remoteStorage.downloadFile(any)).thenAnswer((_) async { + throw 'test error'; + }); + + await queue.startSync(); + await db.execute( + 'INSERT INTO users (id, name, email, photo_id) VALUES (uuid(), ?, ?, uuid())', + ['steven', 'steven@journeyapps.com'], + ); + + expect(await attachments.next, [ + isA() + .having((e) => e.state, 'state', AttachmentState.queuedDownload) + ]); + expect(await attachments.next, [ + isA() + .having((e) => e.state, 'state', AttachmentState.archived) + ]); + }); +} + +extension on PowerSyncDatabase { + Stream> get attachments { + return watch('SELECT * FROM attachments_queue') + .map((rs) => rs.map(Attachment.fromRow).toList()); + } +} + +final class MockRemoteStorage extends Mock implements RemoteStorage { + MockRemoteStorage() { + when(uploadFile(any, any)).thenAnswer((_) async {}); + when(downloadFile(any)).thenAnswer((_) async { + return Stream.empty(); + }); + when(deleteFile(any)).thenAnswer((_) async {}); + } + + @override + Future uploadFile( + Stream? fileData, Attachment? attachment) async { + await noSuchMethod(Invocation.method(#uploadFile, [fileData, attachment])); + } + + @override + Future>> downloadFile(Attachment? attachment) { + return (noSuchMethod(Invocation.method(#downloadFile, [attachment])) ?? + Future.value(const Stream>.empty())) + as Future>>; + } + + @override + Future deleteFile(Attachment? attachment) async { + await noSuchMethod(Invocation.method(#deleteFile, [attachment])); + } +} + +final _schema = Schema([ + Table('users', + [Column.text('name'), Column.text('email'), Column.text('photo_id')]), + AttachmentsQueueTable(), +]); + +TypeMatcher isAttachment(String id) { + return isA().having((e) => e.id, 'id', id); +} diff --git a/packages/powersync_core/test/attachments/local_storage_test.dart b/packages/powersync_core/test/attachments/local_storage_test.dart new file mode 100644 index 00000000..9ceabac1 --- /dev/null +++ b/packages/powersync_core/test/attachments/local_storage_test.dart @@ -0,0 +1,365 @@ +@TestOn('vm') +library; + +import 'dart:async'; +import 'dart:io'; +import 'dart:typed_data'; + +import 'package:test/test.dart'; +import 'package:path/path.dart' as p; +import 'package:powersync_core/src/attachments/io_local_storage.dart'; +import 'package:test_descriptor/test_descriptor.dart' as d; + +void main() { + group('IOLocalStorage', () { + late IOLocalStorage storage; + + setUp(() async { + storage = IOLocalStorage(Directory(d.sandbox)); + }); + + tearDown(() async { + // Clean up is handled automatically by test_descriptor + // No manual cleanup needed + }); + + group('saveFile and readFile', () { + test('saves and reads binary data successfully', () async { + const filePath = 'test_file'; + final data = Uint8List.fromList([1, 2, 3, 4, 5]); + final size = await storage.saveFile(filePath, Stream.value(data)); + expect(size, equals(data.length)); + + final resultStream = storage.readFile(filePath); + final result = await resultStream.toList(); + expect(result, equals([data])); + + // Assert filesystem state using test_descriptor + await d.file(filePath, data).validate(); + }); + + test('throws when reading non-existent file', () async { + const filePath = 'non_existent'; + expect( + () => storage.readFile(filePath).toList(), + throwsA(isA()), + ); + + // Assert file does not exist using Dart's File API + expect(await File(p.join(d.sandbox, filePath)).exists(), isFalse); + }); + + test('creates parent directories if they do not exist', () async { + const filePath = 'subdir/nested/test'; + final nonExistentDir = Directory(p.join(d.sandbox, 'subdir', 'nested')); + final data = Uint8List.fromList([1, 2, 3]); + + expect(await nonExistentDir.exists(), isFalse); + + final size = await storage.saveFile(filePath, Stream.value(data)); + expect(size, equals(data.length)); + expect(await nonExistentDir.exists(), isTrue); + + final resultStream = storage.readFile(filePath); + final result = await resultStream.toList(); + expect(result, equals([data])); + + // Assert directory structure + await d.dir('subdir/nested', [d.file('test', data)]).validate(); + }); + + test('creates all parent directories for deeply nested file', () async { + const filePath = 'a/b/c/d/e/f/g/h/i/j/testfile'; + final nestedDir = Directory( + p.join(d.sandbox, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'), + ); + final data = Uint8List.fromList([42, 43, 44]); + + expect(await nestedDir.exists(), isFalse); + + final size = await storage.saveFile(filePath, Stream.value(data)); + expect(size, equals(data.length)); + expect(await nestedDir.exists(), isTrue); + + final resultStream = storage.readFile(filePath); + final result = await resultStream.toList(); + expect(result, equals([data])); + + // Assert deep directory structure + await d.dir('a/b/c/d/e/f/g/h/i/j', [ + d.file('testfile', data), + ]).validate(); + }); + + test('overwrites existing file', () async { + const filePath = 'overwrite_test'; + final originalData = Uint8List.fromList([1, 2, 3]); + final newData = Uint8List.fromList([4, 5, 6, 7]); + + await storage.saveFile(filePath, Stream.value(originalData)); + final size = await storage.saveFile(filePath, Stream.value(newData)); + expect(size, equals(newData.length)); + + final resultStream = storage.readFile(filePath); + final result = await resultStream.toList(); + expect(result, equals([newData])); + + // Assert file content + await d.file(filePath, newData).validate(); + }); + }); + + group('edge cases and robustness', () { + test('saveFile with empty data writes empty file and returns 0 size', + () async { + const filePath = 'empty_file'; + + final size = await storage.saveFile(filePath, Stream.empty()); + expect(size, 0); + + final resultStream = storage.readFile(filePath); + final chunks = await resultStream.toList(); + expect(chunks, isEmpty); + + final file = File(p.join(d.sandbox, filePath)); + expect(await file.exists(), isTrue); + expect(await file.length(), 0); + }); + + test('readFile preserves byte order (chunking may differ)', () async { + const filePath = 'ordered_chunks'; + final chunks = [ + Uint8List.fromList([0, 1, 2]), + Uint8List.fromList([3, 4]), + Uint8List.fromList([5, 6, 7, 8]), + ]; + final expectedBytes = + Uint8List.fromList(chunks.expand((c) => c).toList()); + await storage.saveFile(filePath, Stream.value(expectedBytes)); + + final outChunks = await storage.readFile(filePath).toList(); + final outBytes = Uint8List.fromList( + outChunks.expand((c) => c).toList(), + ); + expect(outBytes, equals(expectedBytes)); + }); + + test('fileExists becomes false after deleteFile', () async { + const filePath = 'exists_then_delete'; + await storage.saveFile(filePath, Stream.value(Uint8List.fromList([1]))); + expect(await storage.fileExists(filePath), isTrue); + await storage.deleteFile(filePath); + expect(await storage.fileExists(filePath), isFalse); + }); + + test('initialize is idempotent', () async { + await storage.initialize(); + await storage.initialize(); + + // Create a file, then re-initialize again + const filePath = 'idempotent_test'; + await storage.saveFile(filePath, Stream.value(Uint8List.fromList([9]))); + await storage.initialize(); + + // File should still exist (initialize should not clear data) + expect(await storage.fileExists(filePath), isTrue); + }); + + test('clear works even if base directory was removed externally', + () async { + await storage.initialize(); + + // Remove the base dir manually + final baseDir = Directory(d.sandbox); + if (await baseDir.exists()) { + await baseDir.delete(recursive: true); + } + + // Calling clear should recreate base dir + await storage.clear(); + expect(await baseDir.exists(), isTrue); + }); + + test('supports unicode and emoji filenames', () async { + const filePath = '測試_файл_📷.bin'; + final bytes = Uint8List.fromList([10, 20, 30, 40]); + await storage.saveFile(filePath, Stream.value(bytes)); + + final out = await storage.readFile(filePath).toList(); + expect(out, equals([bytes])); + + await d.file(filePath, bytes).validate(); + }); + + test('readFile accepts mediaType parameter (ignored by IO impl)', + () async { + const filePath = 'with_media_type'; + final data = Uint8List.fromList([1, 2, 3]); + await storage.saveFile(filePath, Stream.value(data)); + + final result = + await storage.readFile(filePath, mediaType: 'image/jpeg').toList(); + expect(result, equals([data])); + }); + }); + + group('deleteFile', () { + test('deletes existing file', () async { + const filePath = 'delete_test'; + final data = Uint8List.fromList([1, 2, 3]); + + await storage.saveFile(filePath, Stream.value(data)); + expect(await storage.fileExists(filePath), isTrue); + + await storage.deleteFile(filePath); + expect(await storage.fileExists(filePath), isFalse); + + // Assert file does not exist + expect(await File(p.join(d.sandbox, filePath)).exists(), isFalse); + }); + + test('does not throw when deleting non-existent file', () async { + const filePath = 'non_existent'; + await storage.deleteFile(filePath); + expect(await File(p.join(d.sandbox, filePath)).exists(), isFalse); + }); + }); + + group('initialize and clear', () { + test('initialize creates the base directory', () async { + final newStorage = + IOLocalStorage(Directory(p.join(d.sandbox, 'new_dir'))); + final baseDir = Directory(p.join(d.sandbox, 'new_dir')); + + expect(await baseDir.exists(), isFalse); + + await newStorage.initialize(); + + expect(await baseDir.exists(), isTrue); + }); + + test('clear removes and recreates the base directory', () async { + await storage.initialize(); + final testFile = p.join(d.sandbox, 'test_file'); + await File(testFile).writeAsString('test'); + + expect(await File(testFile).exists(), isTrue); + + await storage.clear(); + + expect(await Directory(d.sandbox).exists(), isTrue); + expect(await File(testFile).exists(), isFalse); + }); + }); + + group('fileExists', () { + test('returns true for existing file', () async { + const filePath = 'exists_test'; + final data = Uint8List.fromList([1, 2, 3]); + + await storage.saveFile(filePath, Stream.value(data)); + expect(await storage.fileExists(filePath), isTrue); + + await d.file(filePath, data).validate(); + }); + + test('returns false for non-existent file', () async { + const filePath = 'non_existent'; + expect(await storage.fileExists(filePath), isFalse); + expect(await File(p.join(d.sandbox, filePath)).exists(), isFalse); + }); + }); + + group('file system integration', () { + test('handles special characters in file path', () async { + const filePath = 'file with spaces & symbols!@#'; + final data = Uint8List.fromList([1, 2, 3]); + + final size = await storage.saveFile(filePath, Stream.value(data)); + expect(size, equals(data.length)); + + final resultStream = storage.readFile(filePath); + final result = await resultStream.toList(); + expect(result, equals([data])); + + await d.file(filePath, data).validate(); + }); + + test('handles large binary data stream', () async { + const filePath = 'large_file'; + final data = Uint8List.fromList(List.generate(10000, (i) => i % 256)); + final chunkSize = 1000; + final chunks = []; + for (var i = 0; i < data.length; i += chunkSize) { + chunks.add( + Uint8List.fromList( + data.sublist( + i, + i + chunkSize < data.length ? i + chunkSize : data.length, + ), + ), + ); + } + final size = await storage.saveFile(filePath, Stream.value(data)); + expect(size, equals(data.length)); + + final resultStream = storage.readFile(filePath); + final result = Uint8List.fromList( + (await resultStream.toList()).expand((chunk) => chunk).toList(), + ); + expect(result, equals(data)); + + await d.file(filePath, data).validate(); + }); + }); + + group('concurrent operations', () { + test('handles concurrent saves to different files', () async { + final futures = >[]; + final fileCount = 10; + + for (int i = 0; i < fileCount; i++) { + final data = Uint8List.fromList([i, i + 1, i + 2]); + futures.add(storage.saveFile('file_$i', Stream.value(data))); + } + + await Future.wait(futures); + + for (int i = 0; i < fileCount; i++) { + final resultStream = storage.readFile('file_$i'); + final result = await resultStream.toList(); + expect( + result, + equals([ + Uint8List.fromList([i, i + 1, i + 2]), + ]), + ); + await d + .file('file_$i', Uint8List.fromList([i, i + 1, i + 2])) + .validate(); + } + }); + + test('handles concurrent saves to the same file', () async { + const filePath = 'concurrent_test'; + final data1 = Uint8List.fromList([1, 2, 3]); + final data2 = Uint8List.fromList([4, 5, 6]); + final futures = [ + storage.saveFile(filePath, Stream.value(data1)), + storage.saveFile(filePath, Stream.value(data2)), + ]; + + await Future.wait(futures); + + final resultStream = storage.readFile(filePath); + final result = await resultStream.toList(); + expect(result, anyOf(equals([data1]), equals([data2]))); + + // Assert one of the possible outcomes + final file = File(p.join(d.sandbox, filePath)); + final fileData = await file.readAsBytes(); + expect(fileData, anyOf(equals(data1), equals(data2))); + }); + }); + }); +} diff --git a/packages/powersync_core/test/utils/abstract_test_utils.dart b/packages/powersync_core/test/utils/abstract_test_utils.dart index 2b456429..a95d2604 100644 --- a/packages/powersync_core/test/utils/abstract_test_utils.dart +++ b/packages/powersync_core/test/utils/abstract_test_utils.dart @@ -63,17 +63,21 @@ Logger _makeTestLogger({Level level = Level.ALL, String? name}) { abstract mixin class TestPowerSyncFactory implements PowerSyncOpenFactory { Future openRawInMemoryDatabase(); - Future<(CommonDatabase, PowerSyncDatabase)> openInMemoryDatabase() async { + Future<(CommonDatabase, PowerSyncDatabase)> openInMemoryDatabase({ + Schema? schema, + Logger? logger, + }) async { final raw = await openRawInMemoryDatabase(); - return (raw, wrapRaw(raw)); + return (raw, wrapRaw(raw, customSchema: schema, logger: logger)); } PowerSyncDatabase wrapRaw( CommonDatabase raw, { Logger? logger, + Schema? customSchema, }) { return PowerSyncDatabase.withDatabase( - schema: schema, + schema: customSchema ?? schema, database: SqliteDatabase.singleConnection( SqliteConnection.synchronousWrapper(raw)), logger: logger,