Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions app/lib/admin/backend.dart
Original file line number Diff line number Diff line change
Expand Up @@ -400,6 +400,8 @@ class AdminBackend {
await _db
.deleteWithQuery(_db.query<PackageVersion>(ancestorKey: packageKey));

await purgePackageCache(packageName);

_logger.info('Package "$packageName" got successfully removed.');
return (
deletedPackages: deletedPackages,
Expand Down
25 changes: 15 additions & 10 deletions app/lib/frontend/handlers/atom_feed.dart
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,8 @@ import '../dom/dom.dart' as d;

/// Handles requests for /feed.atom
Future<shelf.Response> atomFeedHandler(shelf.Request request) async {
final feedContent = await cache.atomFeedXml().get(() async {
final versions = await packageBackend.latestPackageVersions(limit: 100);
final feed = _feedFromPackageVersions(request.requestedUri, versions);
return feed.toXmlDocument();
});
final feedContent =
await cache.atomFeedXml().get(buildAllPackagesAtomFeedContent);
return shelf.Response.ok(
feedContent,
headers: {
Expand All @@ -33,6 +30,13 @@ Future<shelf.Response> atomFeedHandler(shelf.Request request) async {
);
}

/// Builds the content of the /feed.atom endpoint.
Future<String> buildAllPackagesAtomFeedContent() async {
final versions = await packageBackend.latestPackageVersions(limit: 100);
final feed = _feedFromPackageVersions(versions);
return feed.toXmlDocument();
}

class FeedEntry {
final String id;
final String title;
Expand Down Expand Up @@ -126,10 +130,7 @@ class Feed {
}
}

Feed _feedFromPackageVersions(
Uri requestedUri,
List<PackageVersion> versions,
) {
Feed _feedFromPackageVersions(List<PackageVersion> versions) {
final entries = <FeedEntry>[];
for (var i = 0; i < versions.length; i++) {
final version = versions[i];
Expand Down Expand Up @@ -157,7 +158,11 @@ Feed _feedFromPackageVersions(
final alternateUrl =
activeConfiguration.primarySiteUri.resolve('/').toString();
final author = 'Dart Team';
final updated = clock.now().toUtc();
// Set the updated timestamp to the latest version timestamp. This prevents
// unnecessary updates in the exported API bucket and makes tests consistent.
final updated = versions.isNotEmpty
? versions.map((v) => v.created!).reduce((a, b) => a.isAfter(b) ? a : b)
: clock.now().toUtc();

return Feed(id, title, subTitle, updated, author, alternateUrl, selfUrl,
'Pub Feed Generator', '0.1.0', entries);
Expand Down
12 changes: 12 additions & 0 deletions app/lib/package/api_export/api_exporter.dart
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import 'package:clock/clock.dart';
import 'package:gcloud/service_scope.dart' as ss;
import 'package:gcloud/storage.dart';
import 'package:logging/logging.dart';
import 'package:pub_dev/frontend/handlers/atom_feed.dart';
import 'package:pub_dev/service/security_advisories/backend.dart';
import 'package:pub_dev/shared/exceptions.dart';
import 'package:pub_dev/shared/parallel_foreach.dart';
Expand Down Expand Up @@ -157,6 +158,7 @@ final class ApiExporter {
});

await synchronizePackageNameCompletionData(forceWrite: forceWrite);
await synchronizeAllPackagesAtomFeed(forceWrite: forceWrite);

await _api.notFound.write({
'error': {
Expand Down Expand Up @@ -305,4 +307,14 @@ final class ApiExporter {
await abort.future.timeout(Duration(minutes: 10), onTimeout: () => null);
}
}

/// Synchronize the `/feed.atom` file into [ExportedApi].
Future<void> synchronizeAllPackagesAtomFeed({
bool forceWrite = false,
}) async {
await _api.allPackagesFeedAtomFile.write(
await buildAllPackagesAtomFeedContent(),
forceWrite: forceWrite,
);
}
}
53 changes: 52 additions & 1 deletion app/lib/package/api_export/exported_api.dart
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,10 @@ final class ExportedApi {
Duration(hours: 8),
);

/// Interface for writing `/feed.atom`
ExportedAtomFeedFile get allPackagesFeedAtomFile =>
ExportedAtomFeedFile._(this, '/feed.atom', Duration(hours: 12));

/// Interface for writing `/api/not-found.json` which is what the bucket will
/// use as 404 response when serving a website.
ExportedJsonFile<Map<String, Object?>> get notFound =>
Expand Down Expand Up @@ -502,7 +506,7 @@ final class ExportedJsonFile<T> extends ExportedObject {

/// Write [data] as gzipped JSON in UTF-8 format.
///
/// This will only write of `Content-Length` and `md5Hash` doesn't match the
/// This will only write if `Content-Length` and `md5Hash` doesn't match the
/// existing file, or if [forceWrite] is given.
Future<void> write(T data, {bool forceWrite = false}) async {
final gzipped = _jsonGzip.encode(data);
Expand All @@ -521,6 +525,53 @@ final class ExportedJsonFile<T> extends ExportedObject {
}
}

/// Interface for an exported atom feed file.
///
/// This will write an atom feed as gzipped UTF-8, adding headers for
/// * `Content-Type`,
/// * `Content-Encoding`, and,
/// * `Cache-Control`.
final class ExportedAtomFeedFile<T> extends ExportedObject {
final Duration _maxAge;

ExportedAtomFeedFile._(
super._owner,
super._objectName,
this._maxAge,
) : super._();

ObjectMetadata _metadata() {
return ObjectMetadata(
contentType: 'application/atom+xml; charset="utf-8"',
contentEncoding: 'gzip',
cacheControl: 'public, max-age=${_maxAge.inSeconds}',
custom: {
_validatedCustomHeader: clock.now().toIso8601String(),
},
);
}

/// Write [content] as gzipped text in UTF-8 format.
///
/// This will only write if `Content-Length` and `md5Hash` doesn't match the
/// existing file, or if [forceWrite] is given.
Future<void> write(String content, {bool forceWrite = false}) async {
final gzipped = gzip.encode(utf8.encode(content));
final metadata = _metadata();

await Future.wait(_owner._prefixes.map((prefix) async {
await _owner._pool.withResource(() async {
await _owner._bucket.writeBytesIfDifferent(
prefix + _objectName,
gzipped,
metadata,
forceWrite: forceWrite,
);
});
}));
}
}

/// Interface for an exported binary file.
///
/// This will write a binary blob as is, adding headers for
Expand Down
4 changes: 3 additions & 1 deletion app/lib/package/backend.dart
Original file line number Diff line number Diff line change
Expand Up @@ -1271,8 +1271,10 @@ class PackageBackend {
if (activeConfiguration.isPublishedEmailNotificationEnabled)
emailBackend.trySendOutgoingEmail(outgoingEmail),
taskBackend.trackPackage(newVersion.package, updateDependents: true),
if (apiExporter != null)
if (apiExporter != null) ...[
apiExporter!.synchronizePackage(newVersion.package),
apiExporter!.synchronizeAllPackagesAtomFeed(),
],
]);
await tarballStorage.updateContentDispositionOnPublicBucket(
newVersion.package, newVersion.version!);
Expand Down
67 changes: 53 additions & 14 deletions app/test/package/api_export/api_exporter_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.

import 'dart:convert';
import 'dart:io';
import 'dart:typed_data';

Expand All @@ -12,7 +13,7 @@ import 'package:googleapis/storage/v1.dart' show DetailedApiRequestError;
import 'package:logging/logging.dart';
import 'package:pub_dev/fake/backend/fake_auth_provider.dart';
import 'package:pub_dev/package/api_export/api_exporter.dart';
import 'package:pub_dev/shared/datastore.dart';
import 'package:pub_dev/shared/configuration.dart';
import 'package:pub_dev/shared/storage.dart';
import 'package:pub_dev/shared/utils.dart';
import 'package:pub_dev/shared/versions.dart';
Expand Down Expand Up @@ -48,15 +49,19 @@ void main() {
'SHOUT Deleting object from public bucket: "packages/bar-2.0.0.tar.gz".',
'SHOUT Deleting object from public bucket: "packages/bar-3.0.0.tar.gz".',
], (fakeTime) async {
await storageService.createBucket('bucket');
final bucket = storageService.bucket('bucket');
final apiExporter =
ApiExporter(dbService, storageService: storageService, bucket: bucket);
// Since we want to verify post-upload tasks triggering API exporter,
// we cannot use an isolated instance, we need to use the same setup.
// However, for better control and consistency, we can remove all the
// existing files from the bucket at the start of this test:
await apiExporter!.stop();
final bucket =
storageService.bucket(activeConfiguration.exportedApiBucketName!);
await _deleteAll(bucket);

await _testExportedApiSynchronization(
fakeTime,
bucket,
apiExporter.synchronizeExportedApi,
apiExporter!.synchronizeExportedApi,
);
});

Expand All @@ -68,26 +73,38 @@ void main() {
],
testProfile: _testProfile,
(fakeTime) async {
await storageService.createBucket('bucket');
final bucket = storageService.bucket('bucket');
final apiExporter = ApiExporter(dbService,
storageService: storageService, bucket: bucket);
// Since we want to verify post-upload tasks triggering API exporter,
// we cannot use an isolated instance, we need to use the same setup.
// However, for better control and consistency, we can remove all the
// existing files from the bucket at the start of this test:
await apiExporter!.stop();
final bucket =
storageService.bucket(activeConfiguration.exportedApiBucketName!);
await _deleteAll(bucket);

await apiExporter.synchronizeExportedApi();
await apiExporter!.synchronizeExportedApi();

await apiExporter.start();
await apiExporter!.start();

await _testExportedApiSynchronization(
fakeTime,
bucket,
() async => await fakeTime.elapse(minutes: 15),
);

await apiExporter.stop();
await apiExporter!.stop();
},
);
}

Future<void> _deleteAll(Bucket bucket) async {
await for (final entry in bucket.list(delimiter: '')) {
if (entry.isObject) {
await bucket.delete(entry.name);
}
}
}

Future<void> _testExportedApiSynchronization(
FakeTime fakeTime,
Bucket bucket,
Expand Down Expand Up @@ -131,6 +148,10 @@ Future<void> _testExportedApiSynchronization(
await bucket.readBytes('$runtimeVersion/api/archives/foo-1.0.0.tar.gz'),
isNotNull,
);
expect(
await bucket.readString('$runtimeVersion/feed.atom'),
contains('v1.0.0 of foo'),
);
}

_log.info('## New package');
Expand Down Expand Up @@ -160,6 +181,10 @@ Future<void> _testExportedApiSynchronization(
await bucket.readBytes('latest/api/archives/foo-1.0.0.tar.gz'),
isNotNull,
);
expect(
await bucket.readString('latest/feed.atom'),
contains('v1.0.0 of foo'),
);
// Note. that name completion data won't be updated until search caches
// are purged, so we won't test that it is updated.

Expand All @@ -176,6 +201,10 @@ Future<void> _testExportedApiSynchronization(
await bucket.readBytes('latest/api/archives/bar-2.0.0.tar.gz'),
isNotNull,
);
expect(
await bucket.readString('latest/feed.atom'),
contains('v2.0.0 of bar'),
);
}

_log.info('## New package version');
Expand Down Expand Up @@ -214,6 +243,10 @@ Future<void> _testExportedApiSynchronization(
await bucket.readBytes('latest/api/archives/bar-3.0.0.tar.gz'),
isNotNull,
);
expect(
await bucket.readString('$runtimeVersion/feed.atom'),
contains('v3.0.0 of bar'),
);
}

_log.info('## Discontinued flipped on');
Expand Down Expand Up @@ -439,7 +472,7 @@ Future<void> _testExportedApiSynchronization(
}

extension on Bucket {
/// Read bytes from bucket, retur null if missing
/// Read bytes from bucket, return null if missing
Future<Uint8List?> readBytes(String path) async {
try {
return await readAsBytes(path);
Expand All @@ -457,4 +490,10 @@ extension on Bucket {
}
return utf8JsonDecoder.convert(gzip.decode(bytes));
}

/// Read bytes from bucket and decode as UTF-8 text.
Future<String> readString(String path) async {
final bytes = await readBytes(path);
return utf8.decode(gzip.decode(bytes!));
}
}