File: | home/liujun/Analysis/pyrefcon_ws/Paddle/build/paddle/fluid/eager/pylayer/../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc |
Warning: | line 125, column 21 PyObject ownership leak with reference count of 1 |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | // Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. | |||
2 | // | |||
3 | // Licensed under the Apache License, Version 2.0 (the "License"); | |||
4 | // you may not use this file except in compliance with the License. | |||
5 | // You may obtain a copy of the License at | |||
6 | // | |||
7 | // http://www.apache.org/licenses/LICENSE-2.0 | |||
8 | // | |||
9 | // Unless required by applicable law or agreed to in writing, software | |||
10 | // distributed under the License is distributed on an "AS IS" BASIS, | |||
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
12 | // See the License for the specific language governing permissions and | |||
13 | // limitations under the License. | |||
14 | ||||
15 | #include "paddle/fluid/eager/pylayer/py_layer_node.h" | |||
16 | ||||
17 | #include "glog/logging.h" | |||
18 | #include "paddle/fluid/eager/eager_tensor.h" | |||
19 | #include "paddle/fluid/platform/device_context.h" | |||
20 | #include "paddle/fluid/platform/enforce.h" | |||
21 | #include "paddle/fluid/platform/errors.h" | |||
22 | #include "paddle/fluid/pybind/eager.h" | |||
23 | #include "paddle/fluid/pybind/eager_utils.h" | |||
24 | #include "paddle/phi/api/all.h" | |||
25 | #include "paddle/phi/core/dense_tensor.h" | |||
26 | #pragma GCC diagnostic ignored "-Wattributes" | |||
27 | #include "pybind11/pytypes.h" | |||
28 | ||||
29 | namespace egr { | |||
30 | GradNodePyLayer::~GradNodePyLayer() { // NOLINT | |||
31 | pybind11::gil_scoped_acquire gil; | |||
32 | Py_XDECREF(ctx_)_Py_XDECREF(((PyObject*)(ctx_))); | |||
33 | } | |||
34 | ||||
35 | paddle::small_vector<std::vector<paddle::Tensor>, kSlotSmallVectorSize> | |||
36 | GradNodePyLayer::operator()( | |||
37 | paddle::small_vector<std::vector<paddle::Tensor>, | |||
38 | kSlotSmallVectorSize>& grads, // NOLINT | |||
39 | bool create_graph, | |||
40 | bool is_new_grad) { | |||
41 | pybind11::gil_scoped_acquire gil; | |||
42 | VLOG(3)static_cast<void>(0), !(__extension__ ({ static google:: int32* vlocal__ = &google::kLogSiteUninitialized; google:: int32 verbose_level__ = (3); (*vlocal__ >= verbose_level__ ) && ((vlocal__ != &google::kLogSiteUninitialized ) || (google::InitVLOG3__(&vlocal__, &FLAGS_v, "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , verbose_level__))); })) ? (void) 0 : google::LogMessageVoidify () & google::LogMessage( "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 42).stream() << "Running Eager Backward Node: " << name(); | |||
| ||||
43 | ||||
44 | paddle::small_vector<std::vector<paddle::Tensor>, kSlotSmallVectorSize> | |||
45 | hooked_grads = GradNodePyLayer::ApplyGradientHooks(grads); | |||
46 | ||||
47 | paddle::pybind::PyLayerObject* ctx = | |||
48 | reinterpret_cast<paddle::pybind::PyLayerObject*>(ctx_); | |||
49 | ||||
50 | PADDLE_ENFORCE_EQ(ctx->forward_output_tensor_is_duplicable.size(),do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0) | |||
51 | grads.size(),do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0) | |||
52 | paddle::platform::errors::InvalidArgument(do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0) | |||
53 | "%s's grad input size(%s) mast be equal with it's "do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0) | |||
54 | "forward's output size(%s).",do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0) | |||
55 | name(),do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0) | |||
56 | grads.size(),do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0) | |||
57 | ctx->forward_output_tensor_is_duplicable.size()))do { auto __val1 = (ctx->forward_output_tensor_is_duplicable .size()); auto __val2 = (grads.size()); using __TYPE1__ = decltype (__val1); using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's grad input size(%s) mast be equal with it's " "forward's output size(%s).", name(), grads.size(), ctx-> forward_output_tensor_is_duplicable.size())); constexpr bool __kCanToString__ = ::phi::details::CanToString<__TYPE1__>::kValue && ::phi::details::CanToString<__TYPE2__>::kValue; auto __message__ = ::paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message (), "ctx->forward_output_tensor_is_duplicable.size()", "grads.size()" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("ctx->forward_output_tensor_is_duplicable.size()" , __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("grads.size()", __val2)); do { throw ::phi::enforce ::EnforceNotMet(phi::ErrorSummary(__summary__.code(), std::move (__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 57); } while (0); } } while (0); | |||
58 | ||||
59 | auto backward_args = PyTuple_New(grads.size()); | |||
60 | for (size_t i = 0; i < grads.size(); i++) { | |||
61 | if (ctx->forward_output_tensor_is_duplicable[i]) { | |||
62 | PyObject* pylist = PyList_New((Py_ssize_t)grads[i].size()); | |||
63 | for (size_t j = 0; j < grads[i].size(); j++) { | |||
64 | if (ctx->materialize_grads && !grads[i][j].initialized()) { | |||
65 | paddle::Tensor tensor_tmp; | |||
66 | auto dense_tensor = std::make_shared<phi::DenseTensor>(); | |||
67 | dense_tensor->set_meta(forward_outputs_meta_[i][j]); | |||
68 | tensor_tmp.set_impl(dense_tensor); | |||
69 | PyList_SET_ITEM(PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(paddle::experimental::zeros_like( tensor_tmp , tensor_tmp.dtype(), forward_outputs_place_[i][j]))) | |||
70 | pylist,PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(paddle::experimental::zeros_like( tensor_tmp , tensor_tmp.dtype(), forward_outputs_place_[i][j]))) | |||
71 | static_cast<Py_ssize_t>(i),PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(paddle::experimental::zeros_like( tensor_tmp , tensor_tmp.dtype(), forward_outputs_place_[i][j]))) | |||
72 | paddle::pybind::ToPyObject(paddle::experimental::zeros_like(PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(paddle::experimental::zeros_like( tensor_tmp , tensor_tmp.dtype(), forward_outputs_place_[i][j]))) | |||
73 | tensor_tmp,PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(paddle::experimental::zeros_like( tensor_tmp , tensor_tmp.dtype(), forward_outputs_place_[i][j]))) | |||
74 | tensor_tmp.dtype(),PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(paddle::experimental::zeros_like( tensor_tmp , tensor_tmp.dtype(), forward_outputs_place_[i][j]))) | |||
75 | forward_outputs_place_[i][j])))PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(paddle::experimental::zeros_like( tensor_tmp , tensor_tmp.dtype(), forward_outputs_place_[i][j]))); | |||
76 | } else { | |||
77 | PyList_SET_ITEM(pylist,PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(grads[i][0], true)) | |||
78 | static_cast<Py_ssize_t>(i),PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(grads[i][0], true)) | |||
79 | paddle::pybind::ToPyObject(grads[i][0], true))PyList_SetItem(pylist, static_cast<Py_ssize_t>(i), paddle ::pybind::ToPyObject(grads[i][0], true)); | |||
80 | } | |||
81 | } | |||
82 | PyTuple_SET_ITEM(backward_args, i, pylist)PyTuple_SetItem(backward_args, i, pylist); | |||
83 | } else { | |||
84 | if (ctx->materialize_grads && !grads[i][0].initialized()) { | |||
85 | paddle::Tensor tensor_tmp; | |||
86 | auto dense_tensor = std::make_shared<phi::DenseTensor>(); | |||
87 | dense_tensor->set_meta(forward_outputs_meta_[i][0]); | |||
88 | tensor_tmp.set_impl(dense_tensor); | |||
89 | PyTuple_SET_ITEM(PyTuple_SetItem(backward_args, i, paddle::pybind::ToPyObject( paddle::experimental::zeros_like( tensor_tmp, tensor_tmp.dtype (), forward_outputs_place_[i][0]))) | |||
90 | backward_args,PyTuple_SetItem(backward_args, i, paddle::pybind::ToPyObject( paddle::experimental::zeros_like( tensor_tmp, tensor_tmp.dtype (), forward_outputs_place_[i][0]))) | |||
91 | i,PyTuple_SetItem(backward_args, i, paddle::pybind::ToPyObject( paddle::experimental::zeros_like( tensor_tmp, tensor_tmp.dtype (), forward_outputs_place_[i][0]))) | |||
92 | paddle::pybind::ToPyObject(paddle::experimental::zeros_like(PyTuple_SetItem(backward_args, i, paddle::pybind::ToPyObject( paddle::experimental::zeros_like( tensor_tmp, tensor_tmp.dtype (), forward_outputs_place_[i][0]))) | |||
93 | tensor_tmp, tensor_tmp.dtype(), forward_outputs_place_[i][0])))PyTuple_SetItem(backward_args, i, paddle::pybind::ToPyObject( paddle::experimental::zeros_like( tensor_tmp, tensor_tmp.dtype (), forward_outputs_place_[i][0]))); | |||
94 | } else { | |||
95 | PyTuple_SET_ITEM(PyTuple_SetItem(backward_args, i, paddle::pybind::ToPyObject( grads[i][0], true)) | |||
96 | backward_args, i, paddle::pybind::ToPyObject(grads[i][0], true))PyTuple_SetItem(backward_args, i, paddle::pybind::ToPyObject( grads[i][0], true)); | |||
97 | } | |||
98 | } | |||
99 | } | |||
100 | ||||
101 | VLOG(6)static_cast<void>(0), !(__extension__ ({ static google:: int32* vlocal__ = &google::kLogSiteUninitialized; google:: int32 verbose_level__ = (6); (*vlocal__ >= verbose_level__ ) && ((vlocal__ != &google::kLogSiteUninitialized ) || (google::InitVLOG3__(&vlocal__, &FLAGS_v, "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , verbose_level__))); })) ? (void) 0 : google::LogMessageVoidify () & google::LogMessage( "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 101).stream() << "PyLayer backward args is ready, begin call user's backward " | |||
102 | "function..."; | |||
103 | ||||
104 | auto backward_fn = | |||
105 | PyObject_GetAttrString(reinterpret_cast<PyObject*>(ctx), "backward"); | |||
106 | if (!backward_fn) { | |||
107 | PADDLE_THROW(paddle::platform::errors::InvalidArgument(do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::InvalidArgument( "Get backward function faild." )), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 108); } while (0) | |||
108 | "Get backward function faild."))do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::InvalidArgument( "Get backward function faild." )), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 108); } while (0); | |||
109 | } | |||
110 | bool need_grad_tmp = egr::Controller::Instance().HasGrad(); | |||
111 | egr::Controller::Instance().SetHasGrad(create_graph && need_grad_tmp); | |||
112 | auto outputs = PyObject_CallObject(backward_fn, backward_args); | |||
113 | egr::Controller::Instance().SetHasGrad(need_grad_tmp); | |||
114 | if (!outputs) { | |||
115 | PADDLE_THROW(paddle::platform::errors::External(do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::External( pybind11::detail::error_string ().c_str())), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 116); } while (0) | |||
116 | pybind11::detail::error_string().c_str()))do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::External( pybind11::detail::error_string ().c_str())), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 116); } while (0); | |||
117 | } | |||
118 | ||||
119 | VLOG(6)static_cast<void>(0), !(__extension__ ({ static google:: int32* vlocal__ = &google::kLogSiteUninitialized; google:: int32 verbose_level__ = (6); (*vlocal__ >= verbose_level__ ) && ((vlocal__ != &google::kLogSiteUninitialized ) || (google::InitVLOG3__(&vlocal__, &FLAGS_v, "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , verbose_level__))); })) ? (void) 0 : google::LogMessageVoidify () & google::LogMessage( "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 119).stream() << "PyLayer backward function finish..."; | |||
120 | ||||
121 | PyObject* outputs_tuple = nullptr; | |||
122 | if (PyTuple_Check(outputs)((((((PyObject*)(outputs))->ob_type))->tp_flags & ( (1UL << 26))) != 0)) { | |||
123 | outputs_tuple = outputs; | |||
124 | } else { | |||
125 | outputs_tuple = PyTuple_New(1); | |||
| ||||
126 | Py_INCREF(outputs)_Py_INCREF(((PyObject*)(outputs))); | |||
127 | PyTuple_SET_ITEM(outputs_tuple, 0, outputs)PyTuple_SetItem(outputs_tuple, 0, outputs); | |||
128 | } | |||
129 | ||||
130 | size_t outputs_size = PyTuple_GET_SIZE(outputs_tuple)(((PyVarObject*)(((PyTupleObject *)(outputs_tuple))))->ob_size ); | |||
131 | ||||
132 | if (outputs_size > ctx->forward_input_tensor_is_duplicable.size()) { | |||
133 | PADDLE_THROW(paddle::platform::errors::InvalidArgument(do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::InvalidArgument( "The number of outputs of `PyLayer.backward` should be %d, but " "received %d.", ctx->forward_input_tensor_is_duplicable.size (), outputs_size)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 137); } while (0) | |||
134 | "The number of outputs of `PyLayer.backward` should be %d, but "do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::InvalidArgument( "The number of outputs of `PyLayer.backward` should be %d, but " "received %d.", ctx->forward_input_tensor_is_duplicable.size (), outputs_size)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 137); } while (0) | |||
135 | "received %d.",do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::InvalidArgument( "The number of outputs of `PyLayer.backward` should be %d, but " "received %d.", ctx->forward_input_tensor_is_duplicable.size (), outputs_size)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 137); } while (0) | |||
136 | ctx->forward_input_tensor_is_duplicable.size(),do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::InvalidArgument( "The number of outputs of `PyLayer.backward` should be %d, but " "received %d.", ctx->forward_input_tensor_is_duplicable.size (), outputs_size)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 137); } while (0) | |||
137 | outputs_size))do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (paddle::platform::errors::InvalidArgument( "The number of outputs of `PyLayer.backward` should be %d, but " "received %d.", ctx->forward_input_tensor_is_duplicable.size (), outputs_size)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 137); } while (0); | |||
138 | } | |||
139 | ||||
140 | paddle::small_vector<std::vector<paddle::Tensor>, kSlotSmallVectorSize> | |||
141 | grad_out; | |||
142 | grad_out.reserve(ctx->forward_input_tensor_is_duplicable.size()); | |||
143 | for (size_t i = 0; i < ctx->forward_input_tensor_is_duplicable.size(); i++) { | |||
144 | if (i < outputs_size) { | |||
145 | PyObject* obj = PyTuple_GET_ITEM(outputs_tuple, i)(((PyTupleObject *)(outputs_tuple))->ob_item[i]); | |||
146 | if (this->OutputMeta()[i][0].IsStopGradient()) { | |||
147 | PADDLE_ENFORCE_EQ(do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0) | |||
148 | obj,do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0) | |||
149 | Py_None,do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0) | |||
150 | paddle::platform::errors::InvalidArgument(do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0) | |||
151 | "%s's backward function should return None at %d position, "do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0) | |||
152 | "because it's forward Tensor's stopgradient is true.",do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0) | |||
153 | name(),do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0) | |||
154 | i))do { auto __val1 = (obj); auto __val2 = ((&_Py_NoneStruct )); using __TYPE1__ = decltype(__val1); using __TYPE2__ = decltype (__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1 <__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi:: details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))==( static_cast <__COMMON_TYPE2__>(__val2)); if (__builtin_expect(static_cast <bool>(!__is_not_error), 0)) { auto __summary__ = phi:: ErrorSummary(paddle::platform::errors::InvalidArgument( "%s's backward function should return None at %d position, " "because it's forward Tensor's stopgradient is true.", name( ), i)); constexpr bool __kCanToString__ = ::phi::details::CanToString <__TYPE1__>::kValue && ::phi::details::CanToString <__TYPE2__>::kValue; auto __message__ = ::paddle::string ::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "obj", "(&_Py_NoneStruct)" , ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("obj", __val1), ::phi::details::BinaryCompareMessageConverter < __kCanToString__>::Convert("(&_Py_NoneStruct)", __val2 )); do { throw ::phi::enforce::EnforceNotMet(phi::ErrorSummary (__summary__.code(), std::move(__message__)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 154); } while (0); } } while (0); | |||
155 | grad_out.push_back({}); | |||
156 | } else { | |||
157 | if (ctx->forward_input_tensor_is_duplicable[i]) { | |||
158 | grad_out.push_back( | |||
159 | paddle::pybind::GetTensorListFromPyObject(obj, true)); | |||
160 | } else { | |||
161 | if (paddle::pybind::PyCheckTensor(obj)) { | |||
162 | grad_out.push_back( | |||
163 | {paddle::pybind::UnSafeGetTensorFromPyObject(obj)}); | |||
164 | } else if (obj == Py_None(&_Py_NoneStruct)) { | |||
165 | VLOG(4)static_cast<void>(0), !(__extension__ ({ static google:: int32* vlocal__ = &google::kLogSiteUninitialized; google:: int32 verbose_level__ = (4); (*vlocal__ >= verbose_level__ ) && ((vlocal__ != &google::kLogSiteUninitialized ) || (google::InitVLOG3__(&vlocal__, &FLAGS_v, "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , verbose_level__))); })) ? (void) 0 : google::LogMessageVoidify () & google::LogMessage( "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 165).stream() << "Got None for Tensor with pos: " << i; | |||
166 | grad_out.push_back({paddle::Tensor()}); | |||
167 | } else { | |||
168 | PADDLE_THROW(phi::errors::InvalidArgument(do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (phi::errors::InvalidArgument( "We can only support Tensor or None for backward output, " ", but got %s, please check your PyLayer code and make it fits" , reinterpret_cast<PyTypeObject*>(obj->ob_type)-> tp_name)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 171); } while (0) | |||
169 | "We can only support Tensor or None for backward output, "do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (phi::errors::InvalidArgument( "We can only support Tensor or None for backward output, " ", but got %s, please check your PyLayer code and make it fits" , reinterpret_cast<PyTypeObject*>(obj->ob_type)-> tp_name)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 171); } while (0) | |||
170 | ", but got %s, please check your PyLayer code and make it fits",do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (phi::errors::InvalidArgument( "We can only support Tensor or None for backward output, " ", but got %s, please check your PyLayer code and make it fits" , reinterpret_cast<PyTypeObject*>(obj->ob_type)-> tp_name)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 171); } while (0) | |||
171 | reinterpret_cast<PyTypeObject*>(obj->ob_type)->tp_name))do { throw ::phi::enforce::EnforceNotMet( ::phi::ErrorSummary (phi::errors::InvalidArgument( "We can only support Tensor or None for backward output, " ", but got %s, please check your PyLayer code and make it fits" , reinterpret_cast<PyTypeObject*>(obj->ob_type)-> tp_name)), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 171); } while (0); | |||
172 | } | |||
173 | } | |||
174 | } | |||
175 | } else { | |||
176 | PADDLE_ENFORCE_EQ(do { auto __val1 = (this->OutputMeta()[i][0].IsStopGradient ()); auto __val2 = (true); using __TYPE1__ = decltype(__val1) ; using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's backward function should not return empty at %d position." , name(), i)); constexpr bool __kCanToString__ = ::phi::details ::CanToString<__TYPE1__>::kValue && ::phi::details ::CanToString<__TYPE2__>::kValue; auto __message__ = :: paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "this->OutputMeta()[i][0].IsStopGradient()" , "true", ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("this->OutputMeta()[i][0].IsStopGradient()", __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("true", __val2)); do { throw ::phi::enforce::EnforceNotMet (phi::ErrorSummary(__summary__.code(), std::move(__message__) ), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 182); } while (0); } } while (0) | |||
177 | this->OutputMeta()[i][0].IsStopGradient(),do { auto __val1 = (this->OutputMeta()[i][0].IsStopGradient ()); auto __val2 = (true); using __TYPE1__ = decltype(__val1) ; using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's backward function should not return empty at %d position." , name(), i)); constexpr bool __kCanToString__ = ::phi::details ::CanToString<__TYPE1__>::kValue && ::phi::details ::CanToString<__TYPE2__>::kValue; auto __message__ = :: paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "this->OutputMeta()[i][0].IsStopGradient()" , "true", ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("this->OutputMeta()[i][0].IsStopGradient()", __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("true", __val2)); do { throw ::phi::enforce::EnforceNotMet (phi::ErrorSummary(__summary__.code(), std::move(__message__) ), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 182); } while (0); } } while (0) | |||
178 | true,do { auto __val1 = (this->OutputMeta()[i][0].IsStopGradient ()); auto __val2 = (true); using __TYPE1__ = decltype(__val1) ; using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's backward function should not return empty at %d position." , name(), i)); constexpr bool __kCanToString__ = ::phi::details ::CanToString<__TYPE1__>::kValue && ::phi::details ::CanToString<__TYPE2__>::kValue; auto __message__ = :: paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "this->OutputMeta()[i][0].IsStopGradient()" , "true", ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("this->OutputMeta()[i][0].IsStopGradient()", __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("true", __val2)); do { throw ::phi::enforce::EnforceNotMet (phi::ErrorSummary(__summary__.code(), std::move(__message__) ), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 182); } while (0); } } while (0) | |||
179 | paddle::platform::errors::InvalidArgument(do { auto __val1 = (this->OutputMeta()[i][0].IsStopGradient ()); auto __val2 = (true); using __TYPE1__ = decltype(__val1) ; using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's backward function should not return empty at %d position." , name(), i)); constexpr bool __kCanToString__ = ::phi::details ::CanToString<__TYPE1__>::kValue && ::phi::details ::CanToString<__TYPE2__>::kValue; auto __message__ = :: paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "this->OutputMeta()[i][0].IsStopGradient()" , "true", ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("this->OutputMeta()[i][0].IsStopGradient()", __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("true", __val2)); do { throw ::phi::enforce::EnforceNotMet (phi::ErrorSummary(__summary__.code(), std::move(__message__) ), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 182); } while (0); } } while (0) | |||
180 | "%s's backward function should not return empty at %d position.",do { auto __val1 = (this->OutputMeta()[i][0].IsStopGradient ()); auto __val2 = (true); using __TYPE1__ = decltype(__val1) ; using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's backward function should not return empty at %d position." , name(), i)); constexpr bool __kCanToString__ = ::phi::details ::CanToString<__TYPE1__>::kValue && ::phi::details ::CanToString<__TYPE2__>::kValue; auto __message__ = :: paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "this->OutputMeta()[i][0].IsStopGradient()" , "true", ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("this->OutputMeta()[i][0].IsStopGradient()", __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("true", __val2)); do { throw ::phi::enforce::EnforceNotMet (phi::ErrorSummary(__summary__.code(), std::move(__message__) ), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 182); } while (0); } } while (0) | |||
181 | name(),do { auto __val1 = (this->OutputMeta()[i][0].IsStopGradient ()); auto __val2 = (true); using __TYPE1__ = decltype(__val1) ; using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's backward function should not return empty at %d position." , name(), i)); constexpr bool __kCanToString__ = ::phi::details ::CanToString<__TYPE1__>::kValue && ::phi::details ::CanToString<__TYPE2__>::kValue; auto __message__ = :: paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "this->OutputMeta()[i][0].IsStopGradient()" , "true", ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("this->OutputMeta()[i][0].IsStopGradient()", __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("true", __val2)); do { throw ::phi::enforce::EnforceNotMet (phi::ErrorSummary(__summary__.code(), std::move(__message__) ), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 182); } while (0); } } while (0) | |||
182 | i))do { auto __val1 = (this->OutputMeta()[i][0].IsStopGradient ()); auto __val2 = (true); using __TYPE1__ = decltype(__val1) ; using __TYPE2__ = decltype(__val2); using __COMMON_TYPE1__ = ::phi::details::CommonType1<__TYPE1__, __TYPE2__>; using __COMMON_TYPE2__ = ::phi::details::CommonType2<__TYPE1__, __TYPE2__>; bool __is_not_error = (static_cast<__COMMON_TYPE1__ >(__val1))==( static_cast<__COMMON_TYPE2__>(__val2)) ; if (__builtin_expect(static_cast<bool>(!__is_not_error ), 0)) { auto __summary__ = phi::ErrorSummary(paddle::platform ::errors::InvalidArgument( "%s's backward function should not return empty at %d position." , name(), i)); constexpr bool __kCanToString__ = ::phi::details ::CanToString<__TYPE1__>::kValue && ::phi::details ::CanToString<__TYPE2__>::kValue; auto __message__ = :: paddle::string::Sprintf( "%s\n [Hint: Expected %s " "==" " %s, but received %s " "!=" " %s.]", __summary__.error_message(), "this->OutputMeta()[i][0].IsStopGradient()" , "true", ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("this->OutputMeta()[i][0].IsStopGradient()", __val1), ::phi::details::BinaryCompareMessageConverter< __kCanToString__ >::Convert("true", __val2)); do { throw ::phi::enforce::EnforceNotMet (phi::ErrorSummary(__summary__.code(), std::move(__message__) ), "../../../../../paddle/fluid/eager/pylayer/py_layer_node.cc" , 182); } while (0); } } while (0); | |||
183 | grad_out.push_back({}); | |||
184 | } | |||
185 | } | |||
186 | ||||
187 | Py_XDECREF(backward_fn)_Py_XDECREF(((PyObject*)(backward_fn))); | |||
188 | Py_XDECREF(backward_args)_Py_XDECREF(((PyObject*)(backward_args))); | |||
189 | if (!PyTuple_Check(outputs)((((((PyObject*)(outputs))->ob_type))->tp_flags & ( (1UL << 26))) != 0)) { | |||
190 | Py_XDECREF(outputs_tuple)_Py_XDECREF(((PyObject*)(outputs_tuple))); | |||
191 | } | |||
192 | Py_XDECREF(outputs)_Py_XDECREF(((PyObject*)(outputs))); | |||
193 | Py_XDECREF(ctx_)_Py_XDECREF(((PyObject*)(ctx_))); | |||
194 | ctx_ = nullptr; | |||
195 | ||||
196 | return grad_out; | |||
197 | } | |||
198 | } // namespace egr |
1 | #ifndef PyTuple_New |
2 | struct _object; |
3 | typedef struct _object PyObject; |
4 | PyObject* clang_analyzer_PyObject_New_Reference(); |
5 | PyObject* PyTuple_New(Py_ssize_t len) { |
6 | return clang_analyzer_PyObject_New_Reference(); |
7 | } |
8 | #else |
9 | #warning "API PyTuple_New is defined as a macro." |
10 | #endif |