@@ -259,21 +259,24 @@ final class ExportedJsonFile<T> extends ExportedObject {
259259 this ._maxAge,
260260 ) : super ._(_owner, _objectName);
261261
262- late final _metadata = ObjectMetadata (
263- contentType: 'application/json; charset="utf-8"' ,
264- contentEncoding: 'gzip' ,
265- cacheControl: 'public, max-age=${_maxAge .inSeconds }' ,
266- );
267-
268262 /// Write [data] as gzipped JSON in UTF-8 format.
269263 Future <void > write (T data) async {
270264 final gzipped = _jsonGzip.encode (data);
265+ final metadata = ObjectMetadata (
266+ contentType: 'application/json; charset="utf-8"' ,
267+ contentEncoding: 'gzip' ,
268+ cacheControl: 'public, max-age=${_maxAge .inSeconds }' ,
269+ custom: {
270+ 'updated' : clock.now ().toIso8601String (),
271+ },
272+ );
273+
271274 await Future .wait (_owner._prefixes.map ((prefix) async {
272275 await _owner._pool.withResource (() async {
273276 await _owner._bucket.writeBytesIfDifferent (
274277 prefix + _objectName,
275278 gzipped,
276- metadata: _metadata ,
279+ metadata,
277280 );
278281 });
279282 }));
@@ -299,52 +302,88 @@ final class ExportedBlob extends ExportedObject {
299302 this ._maxAge,
300303 ) : super ._(_owner, _objectName);
301304
302- late final _metadata = ObjectMetadata (
303- contentType: _contentType,
304- cacheControl: 'public, max-age=${_maxAge .inSeconds }' ,
305- contentDisposition: 'attachment; filename="$_filename "' ,
306- );
305+ ObjectMetadata _metadata () {
306+ return ObjectMetadata (
307+ contentType: _contentType,
308+ cacheControl: 'public, max-age=${_maxAge .inSeconds }' ,
309+ contentDisposition: 'attachment; filename="$_filename "' ,
310+ custom: {
311+ 'updated' : clock.now ().toIso8601String (),
312+ },
313+ );
314+ }
307315
308316 /// Write binary blob to this file.
309317 Future <void > write (List <int > data) async {
318+ final metadata = _metadata ();
310319 await Future .wait (_owner._prefixes.map ((prefix) async {
311320 await _owner._pool.withResource (() async {
312321 await _owner._bucket.writeBytesIfDifferent (
313322 prefix + _objectName,
314323 data,
315- metadata: _metadata ,
324+ metadata,
316325 );
317326 });
318327 }));
319328 }
320329
321- /// Copy binary blob from [absoluteObjectName] to this file.
322- ///
323- /// Notice that [absoluteObjectName] must be an a GCS URI including `gs://` .
324- /// This means that it must include bucket name.
325- /// Such URIs can be created with [Bucket.absoluteObjectName] .
326- Future <void > copyFrom (String absoluteObjectName) async {
330+ /// Copy binary blob from [bucket] and [source] to this file.
331+ Future <void > copyFrom (Bucket bucket, String source) async {
332+ final metadata = _metadata ();
333+ Future <ObjectInfo ?>? srcInfo;
334+
327335 await Future .wait (_owner._prefixes.map ((prefix) async {
328336 await _owner._pool.withResource (() async {
337+ final dst = prefix + _objectName;
338+
339+ // Check if the dst already exists
340+ if (await _owner._bucket.tryInfo (dst) case final dstInfo? ) {
341+ // Fetch info for source object (if we haven't already done this)
342+ srcInfo ?? = bucket.tryInfo (source);
343+ if (await srcInfo case final srcInfo? ) {
344+ if (dstInfo.contentEquals (srcInfo)) {
345+ // If both source and dst exists, and their content matches, then
346+ // we only need to update the "updated" metadata. And we only need
347+ // to update the "updated" timestamp if it's older than
348+ // _retouchDeadline
349+ final retouchDeadline = clock.agoBy (_retouchDeadline);
350+ if (dstInfo.metadata.updated.isBefore (retouchDeadline)) {
351+ await _owner._bucket.updateMetadata (dst, metadata);
352+ }
353+ return ;
354+ }
355+ }
356+ }
357+
358+ // If dst or source doesn't exist, then we shall attempt to make a copy.
359+ // (if source doesn't exist we'll consistently get an error from here!)
329360 await _owner._storage.copyObject (
330- absoluteObjectName,
331- _owner._bucket.absoluteObjectName (prefix + _objectName ),
332- metadata: _metadata ,
361+ bucket. absoluteObjectName (source) ,
362+ _owner._bucket.absoluteObjectName (dst ),
363+ metadata: metadata ,
333364 );
334365 });
335366 }));
336367 }
337368}
338369
370+ const _retouchDeadline = Duration (days: 1 );
371+
339372extension on Bucket {
340373 Future <void > writeBytesIfDifferent (
341374 String name,
342- List <int > bytes, {
343- ObjectMetadata ? metadata,
344- }) async {
345- if (await _hasSameContent (name, bytes)) {
346- return ;
375+ List <int > bytes,
376+ ObjectMetadata metadata,
377+ ) async {
378+ if (await tryInfo (name) case final info? ) {
379+ if (info.isSameContent (bytes)) {
380+ if (info.metadata.updated.isBefore (clock.agoBy (_retouchDeadline))) {
381+ await updateMetadata (name, metadata);
382+ }
383+ return ;
384+ }
347385 }
386+
348387 await uploadWithRetry (
349388 this ,
350389 name,
@@ -353,16 +392,27 @@ extension on Bucket {
353392 metadata: metadata,
354393 );
355394 }
395+ }
356396
357- Future < bool > _hasSameContent ( String name, List < int > bytes) async {
358- final info = await tryInfo (name);
359- if (info == null ) {
397+ extension on ObjectInfo {
398+ bool isSameContent ( List < int > bytes) {
399+ if (length != bytes.length ) {
360400 return false ;
361401 }
362- if (info.length != bytes.length) {
402+ final bytesHash = md5.convert (bytes).bytes;
403+ return fixedTimeIntListEquals (md5Hash, bytesHash);
404+ }
405+
406+ bool contentEquals (ObjectInfo info) {
407+ if (length != info.length) {
363408 return false ;
364409 }
365- final md5Hash = md5.convert (bytes).bytes;
366- return fixedTimeIntListEquals (info.md5Hash, md5Hash);
410+ return fixedTimeIntListEquals (md5Hash, info.md5Hash);
411+ }
412+ }
413+
414+ extension on ObjectMetadata {
415+ DateTime get updated {
416+ return DateTime .tryParse (custom? ['updated' ] ?? '' ) ?? DateTime (0 );
367417 }
368418}
0 commit comments