Skip to content

[WIP][API-Compat] Add paddle.compat.min/max and new PHI kernel (min/max_with_index) #74512

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 40 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
255f8f1
[API-Compat] paddle.compat.split is added and tested
Enigmatisms Aug 5, 2025
95aedbe
[API-Compat] paddle.compat.split is rigorously tested
Enigmatisms Aug 6, 2025
6166b8d
[API-Compat] Fixed erroneous func help doc
Enigmatisms Aug 6, 2025
41a0775
[API-Compat] Make the forbid_keywords decorator transparent
Enigmatisms Aug 6, 2025
c8930f8
[API-Compat] Fixed decorator str input
Enigmatisms Aug 6, 2025
d66c5f1
[API-Compat] Fixed type annotation and removed legacy graph branch
Enigmatisms Aug 6, 2025
b40feed
[API-Compat] More unittest & static graph check & updated decorator
Enigmatisms Aug 9, 2025
4eb4925
[API-Compat] Force update (local and not reproduce the bug)
Enigmatisms Aug 9, 2025
48f5bb0
[API-Compat] Removed unittest that paddle.split will also fail
Enigmatisms Aug 9, 2025
933c7c0
[API-Compat] Add paddle.compat.min/max and new PHI kernel (min/max_wi…
Enigmatisms Aug 8, 2025
93d7c86
[API-Compat] Add compat.min/max EN doc
Enigmatisms Aug 9, 2025
8ebe825
[WIP][API-Compat] Add dyna-graph unittests for min/max
Enigmatisms Aug 10, 2025
3d42943
[WIP][API-Compat] Fixed CPU failure
Enigmatisms Aug 10, 2025
ff38ddb
[API-Compat] Correct min/max_with index gradient behavior
Enigmatisms Aug 10, 2025
09aeb0d
[API-Compat] XPU fix (attempt)
Enigmatisms Aug 11, 2025
2f77d94
[API-Compat] Updated ForbidKeywordsDecorator
Enigmatisms Aug 11, 2025
e05a82d
Rename ctx to dev_ctx in paddle/phi/kernels/ [fluid_ops] (#74479)
co63oc Aug 11, 2025
5ed7519
refine some error message to avoid linking words together part7 (#74519)
ooooo-create Aug 11, 2025
73b235f
refine some error message to avoid linking words together part6 (#74520)
ooooo-create Aug 11, 2025
580d4a9
[AutoParallel] fix the grad_clip logic of auto_hybrid_pp (#74409)
zty-king Aug 11, 2025
9076968
test/cpp rename use_mkldnn (#74501)
co63oc Aug 11, 2025
fc98858
test/ directory modify use_mkldnn [fluid_ops] - part (#74487)
co63oc Aug 11, 2025
e237e01
test/deprecated/cpp modify use_mkldnn [fluid_ops] (#74502)
co63oc Aug 11, 2025
4a9c975
rename ctx to dev_ctx,xpu_ctx (#74513)
co63oc Aug 11, 2025
6b71a4b
is_test_pass_tester.cc modify use_mkldnn [fluid_ops] (#74518)
co63oc Aug 11, 2025
f962b15
create_inference_config modify use_mkldnn [fluid_ops] (#74516)
co63oc Aug 11, 2025
157bbb2
fix typos (#74497)
ooooo-create Aug 11, 2025
c55f40b
[xpu] fix compile (#74492)
zhupengyang Aug 11, 2025
026dd5f
CINN Add more Simplify scenario: Select2MinMax, BoundSimplify, PowerL…
YuhanXu Aug 11, 2025
c8f5638
dygraph support input a out Tensor (#74484)
wanghuancoder Aug 11, 2025
07368c2
[Fix Issue] paddle.unique Exhibits Inconsistent Behavior with NaN Val…
enkilee Aug 11, 2025
cd23a02
correct copysign backward (#74322)
wangyuwen1999 Aug 11, 2025
c91fe43
Enhancing fused_transpose_split_quant with fp8 capability. (#74471)
A-nnonymous Aug 11, 2025
fbd9f59
test_mkldnn_matmul_v2_elementwise_add_fuse_pass.py modify use_mkldnn …
co63oc Aug 11, 2025
17b20dd
some create api support more usage (#74494)
zhwesky2010 Aug 11, 2025
761cd99
[API-Compat] More unittest & static graph check & updated decorator
Enigmatisms Aug 9, 2025
bf0b293
[API-Compat] Updated ForbidKeywordsDecorator
Enigmatisms Aug 11, 2025
1514511
[API-Compat] Static Graph and CPU end debug
Enigmatisms Aug 11, 2025
c55dc29
[API-Compat] Revert erroneous rebase
Enigmatisms Aug 11, 2025
9f7d036
[API-Compat] Removed one split unittest, since the former PR is not m…
Enigmatisms Aug 11, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions cmake/external/xpu.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -253,9 +253,9 @@ if(WITH_XPU_XRE5)
DOWNLOAD_COMMAND
bash ${CMAKE_SOURCE_DIR}/tools/xpu/pack_paddle_dependence.sh
${XPU_XRE_URL} ${XPU_XRE_DIR_NAME} ${XPU_XHPC_URL} ${XPU_XHPC_DIR_NAME}
${XPU_XCCL_URL} ${XPU_XCCL_DIR_NAME} 1 ${WITH_MKL}
"${CMAKE_SOURCE_DIR}/build" && wget ${XPU_XFT_GET_DEPENCE_URL} && bash
${XFT_COMMAND} ${XPU_XFT_URL} ${XPU_XFT_DIR_NAME} && bash
${XPU_XCCL_URL} ${XPU_XCCL_DIR_NAME} 1 ${WITH_MKL} "${CMAKE_BINARY_DIR}"
&& wget ${XPU_XFT_GET_DEPENCE_URL} && bash ${XFT_COMMAND} ${XPU_XFT_URL}
${XPU_XFT_DIR_NAME} && bash
${CMAKE_SOURCE_DIR}/tools/xpu/get_xpti_dependence.sh ${XPU_XPTI_URL}
${XPU_XPTI_DIR_NAME} && bash
${CMAKE_SOURCE_DIR}/tools/xpu/get_xpufft_dependence.sh ${XPU_FFT_URL}
Expand Down
13 changes: 11 additions & 2 deletions paddle/cinn/common/integer_set.cc
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,8 @@ cas_intervals_t CollectVarIntervalsOfExprs(const std::vector<ir::Expr>& exprs,
lower_bound = ir::Expr(1);
}
var_intervals.insert(
{var->name, CasInterval(lower_bound, upper_bound)});
{var->name,
CasInterval(lower_bound, NormalizeUpperBound(upper_bound))});
}
return false;
});
Expand Down Expand Up @@ -572,14 +573,21 @@ class BoundReplacer : public ir::IRMutator<> {
ir::Expr SymbolicExprAnalyzer::LowerBound(const ir::Expr& expr) const {
BoundReplacer bound_replacer(var_intervals_, true);
ir::Expr bound = ir::ir_utils::IRCopy(expr);
if (bound.is_index()) {
bound = bound.as_index().Normalize(ir::IndexExpr::OptLevel::kLevel3);
}
bound_replacer(&bound);
return optim::ArithSimplify(bound);
}

ir::Expr SymbolicExprAnalyzer::UpperBound(const ir::Expr& expr) const {
BoundReplacer bound_replacer(var_intervals_, false);
ir::Expr bound = ir::ir_utils::IRCopy(expr);
if (bound.is_index()) {
bound = bound.as_index().Normalize(ir::IndexExpr::OptLevel::kLevel3);
}
bound_replacer(&bound);

return optim::ArithSimplify(bound);
}

Expand Down Expand Up @@ -709,7 +717,8 @@ SingleIntervalIntSet::SingleIntervalIntSet(const ir::Expr& min,
? x->as_var()->upper_bound
: SymbolicExprLimit::positive_inf;
var_intervals_.insert(
{x->as_var()->name, CasInterval(lower_bound, upper_bound)});
{x->as_var()->name,
CasInterval(lower_bound, NormalizeUpperBound(upper_bound))});
}
return false;
};
Expand Down
10 changes: 10 additions & 0 deletions paddle/cinn/common/ir_util.cc
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,16 @@ bool is_zero(Expr v) {
return false;
}

Expr NormalizeUpperBound(Expr upper_bound, bool minus_one /* = true */) {
if (upper_bound == SymbolicExprLimit::positive_inf) {
return upper_bound;
}
if (minus_one) {
return upper_bound - ir::Expr(1); // [lower, upper) to [lower, upper]
}
return upper_bound + ir::Expr(1); // (lower, upper] to [lower, upper)
}

Expr CastIfNeeded(Expr body, Type type) {
if (body.type() == type) return body;
return ir::Cast::Make(type, body);
Expand Down
2 changes: 2 additions & 0 deletions paddle/cinn/common/ir_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ std::vector<std::string> GatherItersToTensorProducer(

bool is_zero(Expr v);

Expr NormalizeUpperBound(Expr upper_bound, bool minus_one = true);

bool MathEqual(const Expr &a, const Expr &b);

//! helper function to get a ir::Select node.
Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/ir/group_schedule/dy_shape_group_scheduler.cc
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ std::vector<std::pair<SymbolicPredicate, ir::Expr>>
DynamicShapeGroupScheduler::GetCX86IRs() {
std::vector<std::pair<SymbolicPredicate, ir::Expr>> irs(1);
irs[0].first = ir::EQ::Make(ir::Expr(1), ir::Expr(1));
irs[1].second = ir_sch_->GetModule().GetExprs()[0];
irs[0].second = ir_sch_->GetModule().GetExprs()[0];
return irs;
}

Expand Down
10 changes: 7 additions & 3 deletions paddle/cinn/ir/group_schedule/tactic/arrange_storage_tactic.cc
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ IntSet Evaluate(Expr expr,
const std::unordered_map<ir::Var, IntSet>& var_domain) {
Expr copy_for_upper_bound = ir::ir_utils::IRCopy(expr);
Expr copy_for_lower_bound = ir::ir_utils::IRCopy(expr);
common::cas_intervals_t var_intervals;
common::cas_intervals_t
var_intervals; // variable name -> CasIntervals[lower_bound, upper_bound]
std::vector<ir::Expr> var_vec = ir::ir_utils::CollectIRNodesWithoutTensor(
expr, [](const ir::Expr* x) { return x->as_var(); });
for (Expr var_expr : var_vec) {
Expand All @@ -150,7 +151,9 @@ IntSet Evaluate(Expr expr,
const ir::Var& fixed_var = fixed.at(var);
var_intervals.emplace(
fixed_var->name,
common::CasInterval(fixed_var->lower_bound, fixed_var->upper_bound));
common::CasInterval(
fixed_var->lower_bound,
cinn::common::NormalizeUpperBound(fixed_var->upper_bound)));
optim::ReplaceVarWithExpr(&copy_for_lower_bound, var, Expr(fixed_var));
optim::ReplaceVarWithExpr(&copy_for_upper_bound, var, Expr(fixed_var));
} else if (var_domain.count(var) != 0) {
Expand All @@ -172,7 +175,8 @@ IntSet Evaluate(Expr expr,
::common::errors::InvalidArgument(
"The 'upper_bound' of the variable must be defined."));
optim::ReplaceVarWithExpr(&copy_for_lower_bound, var, var->lower_bound);
optim::ReplaceVarWithExpr(&copy_for_upper_bound, var, var->upper_bound);
optim::ReplaceVarWithExpr(
&copy_for_upper_bound, var, NormalizeUpperBound(var->upper_bound));
}
}
ir::Expr lower_bound = optim::ArithSimplify(copy_for_lower_bound);
Expand Down
2 changes: 2 additions & 0 deletions paddle/cinn/ir/ir.h
Original file line number Diff line number Diff line change
Expand Up @@ -421,6 +421,7 @@ struct _Var_ : public ExprNode<_Var_> {
};

//! A named variable.
// i ∈ [lower_bound, upper_bound)
struct Var : public IrNodeRef {
Var() = default;
explicit Var(IrNode* n) : IrNodeRef(n) {}
Expand Down Expand Up @@ -846,6 +847,7 @@ struct For : public ExprNode<For>, public ForBase {
//! The minimum value of the iteration.
Expr min;
//! The extent of the iteration.
// loop_var ∈ [min, min + extent)
Expr extent;

Expr body;
Expand Down
12 changes: 8 additions & 4 deletions paddle/cinn/ir/ir_analyzer/ir_analyzer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,8 @@ std::vector<ir::Var> IndicesToVars(const std::vector<ir::Expr>& indices) {
if (e.is_constant()) {
std::string var_name =
cinn::UniqName("constant" + static_cast<int>(e.get_constant()));
result.emplace_back(e, e, var_name, /* is_reduce = */ false);
result.emplace_back(
e, NormalizeUpperBound(e, false), var_name, /* is_reduce = */ false);
} else if (e.As<ir::_Var_>() != nullptr) {
ir::Expr copy_e = ir::ir_utils::IRCopy(e);
ir::_Var_* var_ref = copy_e.As<ir::_Var_>();
Expand All @@ -635,14 +636,17 @@ std::vector<ir::Var> IndicesToVars(const std::vector<ir::Expr>& indices) {
ir::Var var = x->as_var_ref();
var_intervals.insert(
{var->name,
common::CasInterval{var->lower_bound, var->upper_bound}});
common::CasInterval{var->lower_bound,
NormalizeUpperBound(var->upper_bound)}});
if (var->is_reduce_axis) is_reduce = true;
}
return false;
});
common::SymbolicExprAnalyzer analyzer(var_intervals);
result.emplace_back(
analyzer.LowerBound(e), analyzer.UpperBound(e), var_name, is_reduce);
result.emplace_back(analyzer.LowerBound(e),
NormalizeUpperBound(analyzer.UpperBound(e), false),
var_name,
is_reduce);
}
}
return result;
Expand Down
Loading