@@ -951,7 +951,7 @@ bool Memory::mayalias(const Pointer &p, bool local, unsigned bid0,
951951 if (state->isUndef (offset0))
952952 return false ;
953953
954- if (p.blockSizeAligned ().ult (bytes).isTrue ())
954+ if (p.blockSize ().ult (bytes).isTrue ())
955955 return false ;
956956
957957 // expr offset = offset0.sextOrTrunc(bits_size_t);
@@ -1172,10 +1172,9 @@ vector<Byte> Memory::load(const Pointer &ptr, unsigned bytes, set<expr> &undef,
11721172 }
11731173 } else {
11741174 uint64_t blk_size = UINT64_MAX;
1175- bool single_load
1176- = ptr.blockSizeAligned ().isUInt (blk_size) && blk_size == bytes;
1177- auto offset = ptr.getShortOffset ();
1178- expr blk_offset = single_load ? expr::mkUInt (0 , offset) : offset;
1175+ bool single_load = ptr.blockSize ().isUInt (blk_size) && blk_size == bytes;
1176+ auto offset = ptr.getShortOffset ();
1177+ expr blk_offset = single_load ? expr::mkUInt (0 , offset) : offset;
11791178
11801179 for (unsigned i = 0 ; i < loaded_bytes; ++i) {
11811180 unsigned idx = left2right ? i : (loaded_bytes - i - 1 );
@@ -1246,8 +1245,7 @@ void Memory::store(const Pointer &ptr,
12461245 auto mem = blk.val ;
12471246
12481247 uint64_t blk_size;
1249- bool full_write
1250- = ptr.blockSizeAligned ().isUInt (blk_size) && blk_size == bytes;
1248+ bool full_write = ptr.blockSize ().isUInt (blk_size) && blk_size == bytes;
12511249
12521250 // optimization: if fully rewriting the block, don't bother with the old
12531251 // contents. Pick a value as the default one.
@@ -1313,7 +1311,7 @@ void Memory::storeLambda(const Pointer &ptr, const expr &offset,
13131311 auto orig_val = ::raw_load (blk.val , offset);
13141312
13151313 // optimization: full rewrite
1316- if (full_write || bytes.eq (ptr.blockSizeAligned ())) {
1314+ if (full_write || bytes.eq (ptr.blockSize ())) {
13171315 blk.val = val_no_offset
13181316 ? mk_block_if (cond, val, std::move (blk.val ))
13191317 : expr::mkLambda (offset, " #offset" ,
@@ -1611,12 +1609,6 @@ void Memory::mkAxioms(const Memory &tgt) const {
16111609 if (!p_align.isConst () || !q_align.isConst ())
16121610 state->addAxiom (p_align == q_align);
16131611 }
1614- for (unsigned bid = num_nonlocals_src; bid < num_nonlocals; ++bid) {
1615- if (skip_bid (bid))
1616- continue ;
1617- Pointer q (tgt, bid, false );
1618- state->addAxiom (q.isHeapAllocated ().implies (q.blockAlignment () == align));
1619- }
16201612
16211613 for (unsigned bid = skip_null (); bid < num_nonlocals_src; ++bid) {
16221614 Pointer p (*this , bid, false );
@@ -1666,17 +1658,13 @@ void Memory::mkAxioms(const Memory &tgt) const {
16661658 expr::mkUInt (0 , 1 ).concat (last.extract (bits_ptr_address-2 , 0 )))
16671659 : addr != last);
16681660 } else {
1669- sz = p1.blockMaxSizeAligned ().zextOrTrunc (bits_ptr_address);
16701661 state->addAxiom (
16711662 Pointer::hasLocalBit ()
16721663 // don't spill to local addr section
16731664 ? (addr + sz).sign () == 0
16741665 : addr.add_no_uoverflow (sz));
16751666 }
16761667
1677- state->addAxiom (p1.blockMaxSize ()
1678- .round_up_bits_no_overflow (p1.blockAlignment ()));
1679-
16801668 if (num_nonlocals > max_quadratic_disjoint)
16811669 continue ;
16821670
@@ -1686,8 +1674,7 @@ void Memory::mkAxioms(const Memory &tgt) const {
16861674 continue ;
16871675 Pointer p2 (tgt, bid2, false );
16881676 state->addAxiom (disjoint (addr, sz, align, p2.getAddress (),
1689- p2.blockMaxSizeAligned ()
1690- .zextOrTrunc (bits_ptr_address),
1677+ p2.blockMaxSize ().zextOrTrunc (bits_ptr_address),
16911678 p2.blockAlignment ()));
16921679 }
16931680 }
@@ -1706,10 +1693,10 @@ void Memory::mkAxioms(const Memory &tgt) const {
17061693 expr::mkForAll (2 , vars, names,
17071694 bid1 == bid2 ||
17081695 disjoint (p1.getAddress (),
1709- p1.blockMaxSizeAligned ().zextOrTrunc (bits_ptr_address),
1696+ p1.blockMaxSize ().zextOrTrunc (bits_ptr_address),
17101697 p1.blockAlignment (),
17111698 p2.getAddress (),
1712- p2.blockMaxSizeAligned ().zextOrTrunc (bits_ptr_address),
1699+ p2.blockMaxSize ().zextOrTrunc (bits_ptr_address),
17131700 p2.blockAlignment ())));
17141701 }
17151702}
@@ -2150,8 +2137,7 @@ static expr disjoint_local_blocks(const Memory &m, const expr &addr,
21502137 Pointer p2 (m, Pointer::mkLongBid (sbid, true ), zero);
21512138 disj &= p2.isBlockAlive ()
21522139 .implies (disjoint (addr, sz, align, p2.getAddress (),
2153- p2.blockSizeAligned ()
2154- .zextOrTrunc (bits_ptr_address),
2140+ p2.blockSize ().zextOrTrunc (bits_ptr_address),
21552141 p2.blockAlignment ()));
21562142 }
21572143 return disj;
@@ -2305,7 +2291,7 @@ void Memory::startLifetime(const StateValue &ptr) {
23052291 !ptr.non_poison ||
23062292 p.isBlockAlive () ||
23072293 disjoint_local_blocks (*this , p.getAddress (),
2308- p.blockSizeAligned ().zextOrTrunc (bits_ptr_address),
2294+ p.blockSize ().zextOrTrunc (bits_ptr_address),
23092295 p.blockAlignment (), local_blk_addr));
23102296
23112297 store_bv (p, true , local_block_liveness, non_local_block_liveness, true ,
@@ -2481,7 +2467,7 @@ void Memory::memset(const expr &p, const StateValue &val, const expr &bytesize,
24812467 unsigned bytesz = bits_byte / 8 ;
24822468 Pointer ptr (*this , p);
24832469 if (deref_check)
2484- state->addUB (ptr.isDereferenceable (bytesize, align, true , false , false ));
2470+ state->addUB (ptr.isDereferenceable (bytesize, align, true , false ));
24852471
24862472 auto wval = val;
24872473 for (unsigned i = 1 ; i < bytesz; ++i) {
@@ -2545,8 +2531,8 @@ void Memory::memcpy(const expr &d, const expr &s, const expr &bytesize,
25452531 unsigned bytesz = bits_byte / 8 ;
25462532
25472533 Pointer dst (*this , d), src (*this , s);
2548- state->addUB (dst.isDereferenceable (bytesize, align_dst, true , false , false ));
2549- state->addUB (src.isDereferenceable (bytesize, align_src, false , false , false ));
2534+ state->addUB (dst.isDereferenceable (bytesize, align_dst, true , false ));
2535+ state->addUB (src.isDereferenceable (bytesize, align_src, false , false ));
25502536 if (!is_move)
25512537 src.isDisjointOrEqual (bytesize, dst, bytesize);
25522538
@@ -2623,7 +2609,7 @@ void Memory::copy(const Pointer &src, const Pointer &dst) {
26232609
26242610void Memory::fillPoison (const expr &bid) {
26252611 Pointer p (*this , bid, expr::mkUInt (0 , bits_for_offset));
2626- expr blksz = p.blockSizeAligned ();
2612+ expr blksz = p.blockSize ();
26272613 memset (std::move (p).release (), IntType (" i8" , 8 ).getDummyValue (false ),
26282614 std::move (blksz), bits_byte / 8 , {}, false );
26292615}
@@ -2708,7 +2694,7 @@ expr Memory::blockValRefined(const Pointer &src, const Memory &tgt,
27082694 expr ptr_offset = src.getShortOffset ();
27092695 uint64_t bytes;
27102696 if (full_check &&
2711- src.blockSizeAligned ().isUInt (bytes) && (bytes / bytes_per_byte) <= 8 ) {
2697+ src.blockSize ().isUInt (bytes) && (bytes / bytes_per_byte) <= 8 ) {
27122698 expr val_refines = true ;
27132699 for (unsigned off = 0 ; off < (bytes / bytes_per_byte); ++off) {
27142700 expr off_expr = expr::mkUInt (off, ptr_offset);
@@ -2719,8 +2705,7 @@ expr Memory::blockValRefined(const Pointer &src, const Memory &tgt,
27192705 expr cnstr = refined (ptr_offset);
27202706 if (!full_check)
27212707 return cnstr;
2722- return
2723- src.getOffsetSizet ().ult (src.blockSizeAlignedOffsetT ()).implies (cnstr);
2708+ return src.getOffsetSizet ().ult (src.blockSizeOffsetT ()).implies (cnstr);
27242709 }
27252710}
27262711
@@ -2797,8 +2782,7 @@ Memory::refined(const Memory &other, bool fncall,
27972782 !stored_tgt.second && stored_tgt.first .empty ()) {
27982783 // block not stored; no need to verify
27992784 }
2800- else if (p.blockSizeAligned ().isUInt (bytes) &&
2801- (bytes / (bits_byte / 8 )) <= 8 ) {
2785+ else if (p.blockSize ().isUInt (bytes) && (bytes / (bits_byte / 8 )) <= 8 ) {
28022786 // this is a small block; just check it thoroughly
28032787 val_refined = blockValRefined (p, other, bid, undef_vars, true );
28042788 }
0 commit comments