@@ -23,18 +23,6 @@ MemProfSchema getHotColdSchema() {
2323 Meta::TotalLifetimeAccessDensity};
2424}
2525
26- static size_t serializedSizeV0 (const IndexedAllocationInfo &IAI,
27- const MemProfSchema &Schema) {
28- size_t Size = 0 ;
29- // The number of frames to serialize.
30- Size += sizeof (uint64_t );
31- // The callstack frame ids.
32- Size += sizeof (FrameId) * IAI.CallStack .size ();
33- // The size of the payload.
34- Size += PortableMemInfoBlock::serializedSize (Schema);
35- return Size;
36- }
37-
3826static size_t serializedSizeV2 (const IndexedAllocationInfo &IAI,
3927 const MemProfSchema &Schema) {
4028 size_t Size = 0 ;
@@ -58,8 +46,6 @@ static size_t serializedSizeV3(const IndexedAllocationInfo &IAI,
5846size_t IndexedAllocationInfo::serializedSize (const MemProfSchema &Schema,
5947 IndexedVersion Version) const {
6048 switch (Version) {
61- case Version1:
62- return serializedSizeV0 (*this , Schema);
6349 case Version2:
6450 return serializedSizeV2 (*this , Schema);
6551 case Version3:
@@ -68,23 +54,6 @@ size_t IndexedAllocationInfo::serializedSize(const MemProfSchema &Schema,
6854 llvm_unreachable (" unsupported MemProf version" );
6955}
7056
71- static size_t serializedSizeV1 (const IndexedMemProfRecord &Record,
72- const MemProfSchema &Schema) {
73- // The number of alloc sites to serialize.
74- size_t Result = sizeof (uint64_t );
75- for (const IndexedAllocationInfo &N : Record.AllocSites )
76- Result += N.serializedSize (Schema, Version1);
77-
78- // The number of callsites we have information for.
79- Result += sizeof (uint64_t );
80- for (const auto &Frames : Record.CallSites ) {
81- // The number of frame ids to serialize.
82- Result += sizeof (uint64_t );
83- Result += Frames.size () * sizeof (FrameId);
84- }
85- return Result;
86- }
87-
8857static size_t serializedSizeV2 (const IndexedMemProfRecord &Record,
8958 const MemProfSchema &Schema) {
9059 // The number of alloc sites to serialize.
@@ -116,8 +85,6 @@ static size_t serializedSizeV3(const IndexedMemProfRecord &Record,
11685size_t IndexedMemProfRecord::serializedSize (const MemProfSchema &Schema,
11786 IndexedVersion Version) const {
11887 switch (Version) {
119- case Version1:
120- return serializedSizeV1 (*this , Schema);
12188 case Version2:
12289 return serializedSizeV2 (*this , Schema);
12390 case Version3:
@@ -126,29 +93,6 @@ size_t IndexedMemProfRecord::serializedSize(const MemProfSchema &Schema,
12693 llvm_unreachable (" unsupported MemProf version" );
12794}
12895
129- static void serializeV1 (const IndexedMemProfRecord &Record,
130- const MemProfSchema &Schema, raw_ostream &OS) {
131- using namespace support ;
132-
133- endian::Writer LE (OS, llvm::endianness::little);
134-
135- LE.write <uint64_t >(Record.AllocSites .size ());
136- for (const IndexedAllocationInfo &N : Record.AllocSites ) {
137- LE.write <uint64_t >(N.CallStack .size ());
138- for (const FrameId &Id : N.CallStack )
139- LE.write <FrameId>(Id);
140- N.Info .serialize (Schema, OS);
141- }
142-
143- // Related contexts.
144- LE.write <uint64_t >(Record.CallSites .size ());
145- for (const auto &Frames : Record.CallSites ) {
146- LE.write <uint64_t >(Frames.size ());
147- for (const FrameId &Id : Frames)
148- LE.write <FrameId>(Id);
149- }
150- }
151-
15296static void serializeV2 (const IndexedMemProfRecord &Record,
15397 const MemProfSchema &Schema, raw_ostream &OS) {
15498 using namespace support ;
@@ -195,9 +139,6 @@ void IndexedMemProfRecord::serialize(
195139 llvm::DenseMap<CallStackId, LinearCallStackId> *MemProfCallStackIndexes)
196140 const {
197141 switch (Version) {
198- case Version1:
199- serializeV1 (*this , Schema, OS);
200- return ;
201142 case Version2:
202143 serializeV2 (*this , Schema, OS);
203144 return ;
@@ -208,50 +149,6 @@ void IndexedMemProfRecord::serialize(
208149 llvm_unreachable (" unsupported MemProf version" );
209150}
210151
211- static IndexedMemProfRecord deserializeV1 (const MemProfSchema &Schema,
212- const unsigned char *Ptr) {
213- using namespace support ;
214-
215- IndexedMemProfRecord Record;
216-
217- // Read the meminfo nodes.
218- const uint64_t NumNodes =
219- endian::readNext<uint64_t , llvm::endianness::little>(Ptr);
220- for (uint64_t I = 0 ; I < NumNodes; I++) {
221- IndexedAllocationInfo Node;
222- const uint64_t NumFrames =
223- endian::readNext<uint64_t , llvm::endianness::little>(Ptr);
224- for (uint64_t J = 0 ; J < NumFrames; J++) {
225- const FrameId Id =
226- endian::readNext<FrameId, llvm::endianness::little>(Ptr);
227- Node.CallStack .push_back (Id);
228- }
229- Node.CSId = hashCallStack (Node.CallStack );
230- Node.Info .deserialize (Schema, Ptr);
231- Ptr += PortableMemInfoBlock::serializedSize (Schema);
232- Record.AllocSites .push_back (Node);
233- }
234-
235- // Read the callsite information.
236- const uint64_t NumCtxs =
237- endian::readNext<uint64_t , llvm::endianness::little>(Ptr);
238- for (uint64_t J = 0 ; J < NumCtxs; J++) {
239- const uint64_t NumFrames =
240- endian::readNext<uint64_t , llvm::endianness::little>(Ptr);
241- llvm::SmallVector<FrameId> Frames;
242- Frames.reserve (NumFrames);
243- for (uint64_t K = 0 ; K < NumFrames; K++) {
244- const FrameId Id =
245- endian::readNext<FrameId, llvm::endianness::little>(Ptr);
246- Frames.push_back (Id);
247- }
248- Record.CallSites .push_back (Frames);
249- Record.CallSiteIds .push_back (hashCallStack (Frames));
250- }
251-
252- return Record;
253- }
254-
255152static IndexedMemProfRecord deserializeV2 (const MemProfSchema &Schema,
256153 const unsigned char *Ptr) {
257154 using namespace support ;
@@ -324,8 +221,6 @@ IndexedMemProfRecord::deserialize(const MemProfSchema &Schema,
324221 const unsigned char *Ptr,
325222 IndexedVersion Version) {
326223 switch (Version) {
327- case Version1:
328- return deserializeV1 (Schema, Ptr);
329224 case Version2:
330225 return deserializeV2 (Schema, Ptr);
331226 case Version3:
0 commit comments