Skip to content

Commit ef908d6

Browse files
committed
Try build v3.2.1
1 parent 36d07ef commit ef908d6

File tree

2 files changed

+16
-110
lines changed

2 files changed

+16
-110
lines changed

.github/workflows/paddle-build.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,16 @@
22
on:
33
push:
44
branches:
5-
- feature/3.1
5+
- feature/3.2
66
- master
77
paths:
88
- .github/workflows/paddle-build.yml
99
- build/capi.patch
1010
workflow_dispatch:
1111

1212
env:
13-
PADDLE_VERSION: "3.1.0"
14-
PADDLE_BRANCH: "v3.1.0"
13+
PADDLE_VERSION: "3.2.1"
14+
PADDLE_BRANCH: "v3.2.1"
1515

1616
jobs:
1717
build-all:
@@ -113,7 +113,7 @@ jobs:
113113
shell: pwsh
114114
run: |
115115
cd ./paddle-src
116-
Invoke-WebRequest -Uri https://raw.githubusercontent.com/sdcb/PaddleSharp/refs/heads/feature/3.1/build/capi.patch -OutFile capi.patch
116+
Invoke-WebRequest -Uri https://raw.githubusercontent.com/sdcb/PaddleSharp/refs/heads/feature/3.2/build/capi.patch -OutFile capi.patch
117117
git apply --ignore-whitespace capi.patch
118118
rm capi.patch
119119

build/capi.patch

Lines changed: 12 additions & 106 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,5 @@
1-
diff --git a/cmake/flags.cmake b/cmake/flags.cmake
2-
index e49922c355..74857fa8f4 100644
3-
--- a/cmake/flags.cmake
4-
+++ b/cmake/flags.cmake
5-
@@ -244,7 +244,6 @@ if(APPLE)
6-
set(COMMON_FLAGS
7-
-Wno-deprecated-register
8-
-Werror=format
9-
- -Werror=inconsistent-missing-override
10-
-Werror=braced-scalar-init
11-
-Werror=uninitialized
12-
-Werror=tautological-constant-out-of-range-compare
131
diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc
14-
index 51ecf263ca..456f90c202 100644
2+
index 28f8233117..df8220b119 100644
153
--- a/paddle/fluid/inference/api/analysis_predictor.cc
164
+++ b/paddle/fluid/inference/api/analysis_predictor.cc
175
@@ -464,7 +464,9 @@ bool AnalysisPredictor::Init(
@@ -25,16 +13,7 @@ index 51ecf263ca..456f90c202 100644
2513
load_pir_model_ =
2614
model_path.substr(model_path.find_last_of(".") + 1) == "json";
2715
} else if (!config_.model_dir().empty()) {
28-
@@ -540,7 +542,7 @@ bool AnalysisPredictor::Init(
29-
if (!PrepareExecutor()) {
30-
return true;
31-
}
32-
-
33-
+
34-
#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
35-
// TODO(inference): Now only gpu with external stream support private
36-
// device_context.
37-
@@ -1376,7 +1378,7 @@ bool AnalysisPredictor::SaveOrLoadPirParameters(bool for_save) {
16+
@@ -1379,7 +1381,7 @@ bool AnalysisPredictor::SaveOrLoadPirParameters(bool for_save) {
3817
}
3918

4019
} else {
@@ -43,87 +22,6 @@ index 51ecf263ca..456f90c202 100644
4322
pir::LoadCombineFunction(config_.params_file(),
4423
filter_param_names,
4524
&tensor_out,
46-
diff --git a/paddle/fluid/inference/capi_exp/pd_config.cc b/paddle/fluid/inference/capi_exp/pd_config.cc
47-
index 9238599111..8e38198227 100644
48-
--- a/paddle/fluid/inference/capi_exp/pd_config.cc
49-
+++ b/paddle/fluid/inference/capi_exp/pd_config.cc
50-
@@ -508,4 +508,32 @@ __pd_give PD_Cstr* PD_ConfigSummary(__pd_keep PD_Config* pd_config) {
51-
return paddle_infer::CvtStrToCstr(sum_str);
52-
}
53-
54-
+void PD_ConfigEnableNewExecutor(__pd_keep PD_Config* pd_config,
55-
+ PD_Bool x) {
56-
+ CHECK_AND_CONVERT_PD_CONFIG;
57-
+ config->EnableNewExecutor(x);
58-
+}
59-
+
60-
+PD_Bool PD_ConfigNewExecutorEnabled(__pd_keep PD_Config* pd_config) {
61-
+ CHECK_AND_CONVERT_PD_CONFIG;
62-
+ return config->new_executor_enabled(); // NOLINT
63-
+}
64-
+
65-
+void PD_ConfigEnableNewIR(__pd_keep PD_Config* pd_config,
66-
+ PD_Bool x) {
67-
+ CHECK_AND_CONVERT_PD_CONFIG;
68-
+ config->EnableNewIR(x);
69-
+}
70-
+
71-
+PD_Bool PD_ConfigNewIREnabled(__pd_keep PD_Config* pd_config) {
72-
+ CHECK_AND_CONVERT_PD_CONFIG;
73-
+ return config->new_ir_enabled(); // NOLINT
74-
+}
75-
+
76-
+void PD_ConfigUseOptimizedModel(__pd_keep PD_Config* pd_config,
77-
+ PD_Bool x) {
78-
+ CHECK_AND_CONVERT_PD_CONFIG;
79-
+ config->UseOptimizedModel(x);
80-
+}
81-
+
82-
} // extern "C"
83-
diff --git a/paddle/fluid/inference/capi_exp/pd_config.h b/paddle/fluid/inference/capi_exp/pd_config.h
84-
index b611328030..3dfe02b258 100644
85-
--- a/paddle/fluid/inference/capi_exp/pd_config.h
86-
+++ b/paddle/fluid/inference/capi_exp/pd_config.h
87-
@@ -743,6 +743,39 @@ PADDLE_CAPI_EXPORT extern __pd_give PD_OneDimArrayCstr* PD_ConfigAllPasses(
88-
PADDLE_CAPI_EXPORT extern __pd_give PD_Cstr* PD_ConfigSummary(
89-
__pd_keep PD_Config* pd_config);
90-
91-
+/// \brief A boolean state telling whether to use new executor.
92-
+/// \param[in] pd_config config
93-
+/// \param[in] x enable new executor or not
94-
+PADDLE_CAPI_EXPORT extern void PD_ConfigEnableNewExecutor(
95-
+ __pd_keep PD_Config* pd_config,
96-
+ PD_Bool x);
97-
+
98-
+/// \brief A boolean state telling whether the new executor is enabled.
99-
+/// \param[in] pd_config config
100-
+/// \return Whether new executor is enabled
101-
+PADDLE_CAPI_EXPORT extern PD_Bool PD_ConfigNewExecutorEnabled(
102-
+ __pd_keep PD_Config* pd_config);
103-
+
104-
+/// \brief A boolean state telling whether to use new IR.
105-
+/// \param[in] pd_config config
106-
+/// \param[in] x enable new IR or not
107-
+PADDLE_CAPI_EXPORT extern void PD_ConfigEnableNewIR(
108-
+ __pd_keep PD_Config* pd_config,
109-
+ PD_Bool x);
110-
+
111-
+/// \brief A boolean state telling whether the new IR is enabled.
112-
+/// \param[in] pd_config config
113-
+/// \return Whether new IR is enabled
114-
+PADDLE_CAPI_EXPORT extern PD_Bool PD_ConfigNewIREnabled(
115-
+ __pd_keep PD_Config* pd_config);
116-
+
117-
+/// \brief Control whether to use optimized model to inference.
118-
+/// \param[in] pd_config config
119-
+/// \param[in] x whether to use optimized model
120-
+PADDLE_CAPI_EXPORT extern void PD_ConfigUseOptimizedModel(
121-
+ __pd_keep PD_Config* pd_config,
122-
+ PD_Bool x);
123-
+
124-
#ifdef __cplusplus
125-
} // extern "C"
126-
#endif
12725
diff --git a/paddle/fluid/pir/serialize_deserialize/src/interface.cc b/paddle/fluid/pir/serialize_deserialize/src/interface.cc
12826
index bc597db6d1..6c45afcd46 100644
12927
--- a/paddle/fluid/pir/serialize_deserialize/src/interface.cc
@@ -146,10 +44,18 @@ index bc597db6d1..6c45afcd46 100644
14644
pir_version = DEVELOP_VERSION;
14745
VLOG(6) << "pir_version is null, get pir_version: " << pir_version;
14846
diff --git a/paddle/fluid/pir/serialize_deserialize/src/save_load_parameters.cc b/paddle/fluid/pir/serialize_deserialize/src/save_load_parameters.cc
149-
index 1d563c326a..9c3318099d 100644
47+
index 1d563c326a..0871d34170 100644
15048
--- a/paddle/fluid/pir/serialize_deserialize/src/save_load_parameters.cc
15149
+++ b/paddle/fluid/pir/serialize_deserialize/src/save_load_parameters.cc
152-
@@ -188,13 +188,20 @@ void LoadCombineFunction(const std::string& file_path,
50+
@@ -12,6 +12,7 @@ limitations under the License. */
51+
#include <cstdint>
52+
#include <fstream>
53+
#include <numeric>
54+
+#include <sstream>
55+
56+
#include "glog/logging.h"
57+
#include "paddle/fluid/framework/lod_tensor.h"
58+
@@ -188,13 +189,20 @@ void LoadCombineFunction(const std::string& file_path,
15359
std::vector<phi::DenseTensor*>* out,
15460
bool load_as_fp16,
15561
phi::Place place) {

0 commit comments

Comments
 (0)