@@ -404,6 +404,75 @@ TEST(MemProf, RecordSerializationRoundTripVersion2HotColdSchema) {
404404 EXPECT_EQ (Record, GotRecord);
405405}
406406
407+ TEST (MemProf, RecordSerializationRoundTripVersion4HotColdSchema) {
408+ const auto Schema = getHotColdSchema ();
409+
410+ MemInfoBlock Info;
411+ Info.AllocCount = 11 ;
412+ Info.TotalSize = 22 ;
413+ Info.TotalLifetime = 33 ;
414+ Info.TotalLifetimeAccessDensity = 44 ;
415+
416+ llvm::SmallVector<CallStackId> CallStackIds = {0x123 , 0x456 };
417+
418+ llvm::SmallVector<CallStackId> CallSiteIds = {0x333 , 0x444 };
419+
420+ IndexedMemProfRecord Record;
421+ for (const auto &CSId : CallStackIds) {
422+ // Use the same info block for both allocation sites.
423+ Record.AllocSites .emplace_back (CSId, Info, Schema);
424+ }
425+ for (auto CSId : CallSiteIds)
426+ Record.CallSites .push_back (IndexedCallSiteInfo (CSId));
427+
428+ std::bitset<llvm::to_underlying (Meta::Size)> SchemaBitSet;
429+ for (auto Id : Schema)
430+ SchemaBitSet.set (llvm::to_underlying (Id));
431+
432+ // Verify that SchemaBitSet has the fields we expect and nothing else, which
433+ // we check with count().
434+ EXPECT_EQ (SchemaBitSet.count (), 4U );
435+ EXPECT_TRUE (SchemaBitSet[llvm::to_underlying (Meta::AllocCount)]);
436+ EXPECT_TRUE (SchemaBitSet[llvm::to_underlying (Meta::TotalSize)]);
437+ EXPECT_TRUE (SchemaBitSet[llvm::to_underlying (Meta::TotalLifetime)]);
438+ EXPECT_TRUE (
439+ SchemaBitSet[llvm::to_underlying (Meta::TotalLifetimeAccessDensity)]);
440+
441+ // Verify that Schema has propagated all the way to the Info field in each
442+ // IndexedAllocationInfo.
443+ ASSERT_THAT (Record.AllocSites , SizeIs (2 ));
444+ EXPECT_EQ (Record.AllocSites [0 ].Info .getSchema (), SchemaBitSet);
445+ EXPECT_EQ (Record.AllocSites [1 ].Info .getSchema (), SchemaBitSet);
446+
447+ std::string Buffer;
448+ llvm::raw_string_ostream OS (Buffer);
449+ // Need a dummy map for V4 serialization
450+ llvm::DenseMap<CallStackId, LinearCallStackId> DummyMap = {
451+ {0x123 , 1 }, {0x456 , 2 }, {0x333 , 3 }, {0x444 , 4 }};
452+ Record.serialize (Schema, OS, Version4, &DummyMap);
453+
454+ const IndexedMemProfRecord GotRecord = IndexedMemProfRecord::deserialize (
455+ Schema, reinterpret_cast <const unsigned char *>(Buffer.data ()), Version4);
456+
457+ // Verify that Schema comes back correctly after deserialization. Technically,
458+ // the comparison between Record and GotRecord below includes the comparison
459+ // of their Schemas, but we'll verify the Schemas on our own.
460+ ASSERT_THAT (GotRecord.AllocSites , SizeIs (2 ));
461+ EXPECT_EQ (GotRecord.AllocSites [0 ].Info .getSchema (), SchemaBitSet);
462+ EXPECT_EQ (GotRecord.AllocSites [1 ].Info .getSchema (), SchemaBitSet);
463+
464+ // Create the expected record using the linear IDs from the dummy map.
465+ IndexedMemProfRecord ExpectedRecord;
466+ for (const auto &CSId : CallStackIds) {
467+ ExpectedRecord.AllocSites .emplace_back (DummyMap[CSId], Info, Schema);
468+ }
469+ for (const auto &CSId : CallSiteIds) {
470+ ExpectedRecord.CallSites .emplace_back (DummyMap[CSId]);
471+ }
472+
473+ EXPECT_EQ (ExpectedRecord, GotRecord);
474+ }
475+
407476TEST (MemProf, SymbolizationFilter) {
408477 auto Symbolizer = std::make_unique<MockSymbolizer>();
409478
0 commit comments