@@ -12,7 +12,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
See the License for the specific language governing permissions and
13
13
limitations under the License. */
14
14
#include < fstream>
15
- #include < iostream>
16
15
17
16
#include " paddle/fluid/framework/data_type_transform.h"
18
17
#include " paddle/fluid/framework/op_registry.h"
@@ -53,30 +52,22 @@ class LoadOp : public framework::OperatorBase {
53
52
auto in_dtype = framework::ToDataType (tensor->type ());
54
53
auto out_dtype = load_as_fp16 ? framework::proto::VarType::FP16 : in_dtype;
55
54
56
- std::cout << " In load op: " << std::endl;
57
- std::cout << " before conversion block" << std::endl;
58
-
59
55
if (in_dtype != out_dtype) {
60
56
// convert to float16 tensor
61
57
auto in_kernel_type = framework::OpKernelType (in_dtype, place);
62
58
auto out_kernel_type = framework::OpKernelType (out_dtype, place);
63
59
framework::LoDTensor fp16_tensor;
64
60
// copy LoD info to the new tensor
65
61
fp16_tensor.set_lod (tensor->lod ());
66
- std::cout << " before TransDataType" << std::endl;
67
62
framework::TransDataType (in_kernel_type, out_kernel_type, *tensor,
68
63
&fp16_tensor);
69
- std::cout << " after TransDataType " << std::endl;
64
+
70
65
// reset output tensor
71
66
out_var->Clear ();
72
67
tensor = out_var->GetMutable <framework::LoDTensor>();
73
68
tensor->set_lod (fp16_tensor.lod ());
74
- std::cout << " before TransDataType" << std::endl;
75
69
tensor->ShareDataWith (fp16_tensor);
76
- std::cout << " after TransDataType" << std::endl;
77
70
}
78
-
79
- std::cout << " Out of load op: " << std::endl;
80
71
}
81
72
};
82
73
0 commit comments