diff --git a/app/lib/admin/backend.dart b/app/lib/admin/backend.dart index dc2603851e..e857d8b9a7 100644 --- a/app/lib/admin/backend.dart +++ b/app/lib/admin/backend.dart @@ -400,6 +400,8 @@ class AdminBackend { await _db .deleteWithQuery(_db.query(ancestorKey: packageKey)); + await purgePackageCache(packageName); + _logger.info('Package "$packageName" got successfully removed.'); return ( deletedPackages: deletedPackages, diff --git a/app/lib/frontend/handlers/atom_feed.dart b/app/lib/frontend/handlers/atom_feed.dart index 07ef7da69d..ba6e2fbea3 100644 --- a/app/lib/frontend/handlers/atom_feed.dart +++ b/app/lib/frontend/handlers/atom_feed.dart @@ -19,11 +19,8 @@ import '../dom/dom.dart' as d; /// Handles requests for /feed.atom Future atomFeedHandler(shelf.Request request) async { - final feedContent = await cache.atomFeedXml().get(() async { - final versions = await packageBackend.latestPackageVersions(limit: 100); - final feed = _feedFromPackageVersions(request.requestedUri, versions); - return feed.toXmlDocument(); - }); + final feedContent = + await cache.atomFeedXml().get(buildAllPackagesAtomFeedContent); return shelf.Response.ok( feedContent, headers: { @@ -33,6 +30,13 @@ Future atomFeedHandler(shelf.Request request) async { ); } +/// Builds the content of the /feed.atom endpoint. +Future buildAllPackagesAtomFeedContent() async { + final versions = await packageBackend.latestPackageVersions(limit: 100); + final feed = _feedFromPackageVersions(versions); + return feed.toXmlDocument(); +} + class FeedEntry { final String id; final String title; @@ -126,10 +130,7 @@ class Feed { } } -Feed _feedFromPackageVersions( - Uri requestedUri, - List versions, -) { +Feed _feedFromPackageVersions(List versions) { final entries = []; for (var i = 0; i < versions.length; i++) { final version = versions[i]; @@ -157,7 +158,11 @@ Feed _feedFromPackageVersions( final alternateUrl = activeConfiguration.primarySiteUri.resolve('/').toString(); final author = 'Dart Team'; - final updated = clock.now().toUtc(); + // Set the updated timestamp to the latest version timestamp. This prevents + // unnecessary updates in the exported API bucket and makes tests consistent. + final updated = versions.isNotEmpty + ? versions.map((v) => v.created!).reduce((a, b) => a.isAfter(b) ? a : b) + : clock.now().toUtc(); return Feed(id, title, subTitle, updated, author, alternateUrl, selfUrl, 'Pub Feed Generator', '0.1.0', entries); diff --git a/app/lib/package/api_export/api_exporter.dart b/app/lib/package/api_export/api_exporter.dart index c854fbd432..2c29c0f47b 100644 --- a/app/lib/package/api_export/api_exporter.dart +++ b/app/lib/package/api_export/api_exporter.dart @@ -9,6 +9,7 @@ import 'package:clock/clock.dart'; import 'package:gcloud/service_scope.dart' as ss; import 'package:gcloud/storage.dart'; import 'package:logging/logging.dart'; +import 'package:pub_dev/frontend/handlers/atom_feed.dart'; import 'package:pub_dev/service/security_advisories/backend.dart'; import 'package:pub_dev/shared/exceptions.dart'; import 'package:pub_dev/shared/parallel_foreach.dart'; @@ -157,6 +158,7 @@ final class ApiExporter { }); await synchronizePackageNameCompletionData(forceWrite: forceWrite); + await synchronizeAllPackagesAtomFeed(forceWrite: forceWrite); await _api.notFound.write({ 'error': { @@ -305,4 +307,14 @@ final class ApiExporter { await abort.future.timeout(Duration(minutes: 10), onTimeout: () => null); } } + + /// Synchronize the `/feed.atom` file into [ExportedApi]. + Future synchronizeAllPackagesAtomFeed({ + bool forceWrite = false, + }) async { + await _api.allPackagesFeedAtomFile.write( + await buildAllPackagesAtomFeedContent(), + forceWrite: forceWrite, + ); + } } diff --git a/app/lib/package/api_export/exported_api.dart b/app/lib/package/api_export/exported_api.dart index eedf540452..2cd7f4c736 100644 --- a/app/lib/package/api_export/exported_api.dart +++ b/app/lib/package/api_export/exported_api.dart @@ -66,6 +66,10 @@ final class ExportedApi { Duration(hours: 8), ); + /// Interface for writing `/feed.atom` + ExportedAtomFeedFile get allPackagesFeedAtomFile => + ExportedAtomFeedFile._(this, '/feed.atom', Duration(hours: 12)); + /// Interface for writing `/api/not-found.json` which is what the bucket will /// use as 404 response when serving a website. ExportedJsonFile> get notFound => @@ -502,7 +506,7 @@ final class ExportedJsonFile extends ExportedObject { /// Write [data] as gzipped JSON in UTF-8 format. /// - /// This will only write of `Content-Length` and `md5Hash` doesn't match the + /// This will only write if `Content-Length` and `md5Hash` doesn't match the /// existing file, or if [forceWrite] is given. Future write(T data, {bool forceWrite = false}) async { final gzipped = _jsonGzip.encode(data); @@ -521,6 +525,53 @@ final class ExportedJsonFile extends ExportedObject { } } +/// Interface for an exported atom feed file. +/// +/// This will write an atom feed as gzipped UTF-8, adding headers for +/// * `Content-Type`, +/// * `Content-Encoding`, and, +/// * `Cache-Control`. +final class ExportedAtomFeedFile extends ExportedObject { + final Duration _maxAge; + + ExportedAtomFeedFile._( + super._owner, + super._objectName, + this._maxAge, + ) : super._(); + + ObjectMetadata _metadata() { + return ObjectMetadata( + contentType: 'application/atom+xml; charset="utf-8"', + contentEncoding: 'gzip', + cacheControl: 'public, max-age=${_maxAge.inSeconds}', + custom: { + _validatedCustomHeader: clock.now().toIso8601String(), + }, + ); + } + + /// Write [content] as gzipped text in UTF-8 format. + /// + /// This will only write if `Content-Length` and `md5Hash` doesn't match the + /// existing file, or if [forceWrite] is given. + Future write(String content, {bool forceWrite = false}) async { + final gzipped = gzip.encode(utf8.encode(content)); + final metadata = _metadata(); + + await Future.wait(_owner._prefixes.map((prefix) async { + await _owner._pool.withResource(() async { + await _owner._bucket.writeBytesIfDifferent( + prefix + _objectName, + gzipped, + metadata, + forceWrite: forceWrite, + ); + }); + })); + } +} + /// Interface for an exported binary file. /// /// This will write a binary blob as is, adding headers for diff --git a/app/lib/package/backend.dart b/app/lib/package/backend.dart index e1d81cf123..3101ba7153 100644 --- a/app/lib/package/backend.dart +++ b/app/lib/package/backend.dart @@ -1271,8 +1271,10 @@ class PackageBackend { if (activeConfiguration.isPublishedEmailNotificationEnabled) emailBackend.trySendOutgoingEmail(outgoingEmail), taskBackend.trackPackage(newVersion.package, updateDependents: true), - if (apiExporter != null) + if (apiExporter != null) ...[ apiExporter!.synchronizePackage(newVersion.package), + apiExporter!.synchronizeAllPackagesAtomFeed(), + ], ]); await tarballStorage.updateContentDispositionOnPublicBucket( newVersion.package, newVersion.version!); diff --git a/app/test/package/api_export/api_exporter_test.dart b/app/test/package/api_export/api_exporter_test.dart index 608159df36..27c264ef70 100644 --- a/app/test/package/api_export/api_exporter_test.dart +++ b/app/test/package/api_export/api_exporter_test.dart @@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +import 'dart:convert'; import 'dart:io'; import 'dart:typed_data'; @@ -12,7 +13,7 @@ import 'package:googleapis/storage/v1.dart' show DetailedApiRequestError; import 'package:logging/logging.dart'; import 'package:pub_dev/fake/backend/fake_auth_provider.dart'; import 'package:pub_dev/package/api_export/api_exporter.dart'; -import 'package:pub_dev/shared/datastore.dart'; +import 'package:pub_dev/shared/configuration.dart'; import 'package:pub_dev/shared/storage.dart'; import 'package:pub_dev/shared/utils.dart'; import 'package:pub_dev/shared/versions.dart'; @@ -48,15 +49,19 @@ void main() { 'SHOUT Deleting object from public bucket: "packages/bar-2.0.0.tar.gz".', 'SHOUT Deleting object from public bucket: "packages/bar-3.0.0.tar.gz".', ], (fakeTime) async { - await storageService.createBucket('bucket'); - final bucket = storageService.bucket('bucket'); - final apiExporter = - ApiExporter(dbService, storageService: storageService, bucket: bucket); + // Since we want to verify post-upload tasks triggering API exporter, + // we cannot use an isolated instance, we need to use the same setup. + // However, for better control and consistency, we can remove all the + // existing files from the bucket at the start of this test: + await apiExporter!.stop(); + final bucket = + storageService.bucket(activeConfiguration.exportedApiBucketName!); + await _deleteAll(bucket); await _testExportedApiSynchronization( fakeTime, bucket, - apiExporter.synchronizeExportedApi, + apiExporter!.synchronizeExportedApi, ); }); @@ -68,14 +73,18 @@ void main() { ], testProfile: _testProfile, (fakeTime) async { - await storageService.createBucket('bucket'); - final bucket = storageService.bucket('bucket'); - final apiExporter = ApiExporter(dbService, - storageService: storageService, bucket: bucket); + // Since we want to verify post-upload tasks triggering API exporter, + // we cannot use an isolated instance, we need to use the same setup. + // However, for better control and consistency, we can remove all the + // existing files from the bucket at the start of this test: + await apiExporter!.stop(); + final bucket = + storageService.bucket(activeConfiguration.exportedApiBucketName!); + await _deleteAll(bucket); - await apiExporter.synchronizeExportedApi(); + await apiExporter!.synchronizeExportedApi(); - await apiExporter.start(); + await apiExporter!.start(); await _testExportedApiSynchronization( fakeTime, @@ -83,11 +92,19 @@ void main() { () async => await fakeTime.elapse(minutes: 15), ); - await apiExporter.stop(); + await apiExporter!.stop(); }, ); } +Future _deleteAll(Bucket bucket) async { + await for (final entry in bucket.list(delimiter: '')) { + if (entry.isObject) { + await bucket.delete(entry.name); + } + } +} + Future _testExportedApiSynchronization( FakeTime fakeTime, Bucket bucket, @@ -131,6 +148,10 @@ Future _testExportedApiSynchronization( await bucket.readBytes('$runtimeVersion/api/archives/foo-1.0.0.tar.gz'), isNotNull, ); + expect( + await bucket.readString('$runtimeVersion/feed.atom'), + contains('v1.0.0 of foo'), + ); } _log.info('## New package'); @@ -160,6 +181,10 @@ Future _testExportedApiSynchronization( await bucket.readBytes('latest/api/archives/foo-1.0.0.tar.gz'), isNotNull, ); + expect( + await bucket.readString('latest/feed.atom'), + contains('v1.0.0 of foo'), + ); // Note. that name completion data won't be updated until search caches // are purged, so we won't test that it is updated. @@ -176,6 +201,10 @@ Future _testExportedApiSynchronization( await bucket.readBytes('latest/api/archives/bar-2.0.0.tar.gz'), isNotNull, ); + expect( + await bucket.readString('latest/feed.atom'), + contains('v2.0.0 of bar'), + ); } _log.info('## New package version'); @@ -214,6 +243,10 @@ Future _testExportedApiSynchronization( await bucket.readBytes('latest/api/archives/bar-3.0.0.tar.gz'), isNotNull, ); + expect( + await bucket.readString('$runtimeVersion/feed.atom'), + contains('v3.0.0 of bar'), + ); } _log.info('## Discontinued flipped on'); @@ -439,7 +472,7 @@ Future _testExportedApiSynchronization( } extension on Bucket { - /// Read bytes from bucket, retur null if missing + /// Read bytes from bucket, return null if missing Future readBytes(String path) async { try { return await readAsBytes(path); @@ -457,4 +490,10 @@ extension on Bucket { } return utf8JsonDecoder.convert(gzip.decode(bytes)); } + + /// Read bytes from bucket and decode as UTF-8 text. + Future readString(String path) async { + final bytes = await readBytes(path); + return utf8.decode(gzip.decode(bytes!)); + } }