Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions mlir/lib/Analysis/Presburger/IntegerRelation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2265,11 +2265,11 @@ IntegerRelation::unionBoundingBox(const IntegerRelation &otherCst) {
newLb[d] = lbFloorDivisor;
newUb[d] = -lbFloorDivisor;
// Copy over the symbolic part + constant term.
std::copy(minLb.begin(), minLb.end(), newLb.begin() + getNumDimVars());
llvm::copy(minLb, newLb.begin() + getNumDimVars());
std::transform(newLb.begin() + getNumDimVars(), newLb.end(),
newLb.begin() + getNumDimVars(),
std::negate<DynamicAPInt>());
std::copy(maxUb.begin(), maxUb.end(), newUb.begin() + getNumDimVars());
llvm::copy(maxUb, newUb.begin() + getNumDimVars());

boundingLbs.emplace_back(newLb);
boundingUbs.emplace_back(newUb);
Expand Down
2 changes: 1 addition & 1 deletion mlir/lib/CAPI/IR/BuiltinTypes.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ MlirLogicalResult mlirMemRefTypeGetStridesAndOffset(MlirType type,
if (failed(memrefType.getStridesAndOffset(strides_, *offset)))
return mlirLogicalResultFailure();

(void)std::copy(strides_.begin(), strides_.end(), strides);
(void)llvm::copy(strides_, strides);
return mlirLogicalResultSuccess();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ static bool doubleBuffer(Value oldMemRef, AffineForOp forOp) {
ArrayRef<int64_t> oldShape = oldMemRefType.getShape();
SmallVector<int64_t, 4> newShape(1 + oldMemRefType.getRank());
newShape[0] = 2;
std::copy(oldShape.begin(), oldShape.end(), newShape.begin() + 1);
llvm::copy(oldShape, newShape.begin() + 1);
return MemRefType::Builder(oldMemRefType).setShape(newShape).setLayout({});
};

Expand Down
3 changes: 1 addition & 2 deletions mlir/lib/Dialect/Linalg/Transforms/HoistPadding.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -913,8 +913,7 @@ static Value replaceByPackingResult(RewriterBase &rewriter,
llvm_unreachable("loop independence prerequisite not met");

// offsets = [maybe_leading_ivs = originalLoopIvs, 0 .. 0].
std::copy(loopIterationCounts.begin(), loopIterationCounts.end(),
offsets.begin());
llvm::copy(loopIterationCounts, offsets.begin());
hoistedPackedTensor =
scf::getForInductionVarOwner(packingResult.clonedLoopIvs.front())
->getResult(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ class SparseIterator {
// Sets the iterate to the specified position.
void seek(ValueRange vals) {
assert(vals.size() == cursorValsCnt);
std::copy(vals.begin(), vals.end(), cursorValsStorageRef.begin());
llvm::copy(vals, cursorValsStorageRef.begin());
// Now that the iterator is re-positioned, the coordinate becomes invalid.
crd = nullptr;
}
Expand Down
2 changes: 1 addition & 1 deletion mlir/lib/Dialect/XeGPU/Utils/XeGPUUtils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ xegpu::extractVectorsWithShapeFromValue(OpBuilder &builder, Location loc,
int64_t rankDiff = srcShapeRank - targetShapeRank;
std::fill(adjustedTargetShape.begin(), adjustedTargetShape.begin() + rankDiff,
1);
std::copy(shape.begin(), shape.end(), adjustedTargetShape.begin() + rankDiff);
llvm::copy(shape, adjustedTargetShape.begin() + rankDiff);

SmallVector<Value> result;
for (SmallVector<int64_t> offsets :
Expand Down
2 changes: 1 addition & 1 deletion mlir/lib/Tools/PDLL/AST/Nodes.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ static StringRef copyStringWithNull(Context &ctx, StringRef str) {
return str;

char *data = ctx.getAllocator().Allocate<char>(str.size() + 1);
std::copy(str.begin(), str.end(), data);
llvm::copy(str, data);
data[str.size()] = 0;
return StringRef(data, str.size());
}
Expand Down
Loading