| File: | build/../torch/csrc/utils/python_strings.h |
| Warning: | line 64, column 10 PyObject ownership leak with reference count of 2 |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
| 1 | #include <torch/csrc/python_headers.h> | ||||||||
| 2 | #include <sys/types.h> | ||||||||
| 3 | |||||||||
| 4 | #ifndef _MSC_VER | ||||||||
| 5 | #include <sys/socket.h> | ||||||||
| 6 | #endif | ||||||||
| 7 | |||||||||
| 8 | #include <ATen/ATen.h> | ||||||||
| 9 | #include <ATen/DLConvertor.h> | ||||||||
| 10 | #include <ATen/ExpandUtils.h> | ||||||||
| 11 | #include <ATen/Parallel.h> | ||||||||
| 12 | #include <ATen/Utils.h> | ||||||||
| 13 | #include <ATen/VmapMode.h> | ||||||||
| 14 | #include <ATen/dlpack.h> | ||||||||
| 15 | #include <ATen/core/Vitals.h> | ||||||||
| 16 | #include <TH/TH.h> | ||||||||
| 17 | #include <c10/util/Logging.h> | ||||||||
| 18 | #include <c10/util/irange.h> | ||||||||
| 19 | #include <cstdlib> | ||||||||
| 20 | #include <libshm.h> | ||||||||
| 21 | #include <pybind11/pybind11.h> | ||||||||
| 22 | #include <pybind11/stl.h> | ||||||||
| 23 | #include <unordered_map> | ||||||||
| 24 | |||||||||
| 25 | #include <torch/csrc/THP.h> | ||||||||
| 26 | #include <torch/csrc/DynamicTypes.h> | ||||||||
| 27 | #include <torch/csrc/Device.h> | ||||||||
| 28 | #include <torch/csrc/Stream.h> | ||||||||
| 29 | #include <torch/csrc/Dtype.h> | ||||||||
| 30 | #include <torch/csrc/DataLoader.h> | ||||||||
| 31 | #include <torch/csrc/Generator.h> | ||||||||
| 32 | #include <torch/csrc/Layout.h> | ||||||||
| 33 | #include <torch/csrc/MemoryFormat.h> | ||||||||
| 34 | #include <torch/csrc/QScheme.h> | ||||||||
| 35 | #include <torch/csrc/TypeInfo.h> | ||||||||
| 36 | #include <torch/csrc/autograd/python_nn_functions.h> | ||||||||
| 37 | #include <torch/csrc/autograd/python_fft_functions.h> | ||||||||
| 38 | #include <torch/csrc/autograd/python_linalg_functions.h> | ||||||||
| 39 | #include <torch/csrc/autograd/python_special_functions.h> | ||||||||
| 40 | #include <torch/csrc/autograd/python_legacy_variable.h> | ||||||||
| 41 | #include <torch/csrc/autograd/python_variable.h> | ||||||||
| 42 | #include <torch/csrc/multiprocessing/init.h> | ||||||||
| 43 | #include <torch/csrc/tensor/python_tensor.h> | ||||||||
| 44 | #include <torch/csrc/utils/disable_torch_function.h> | ||||||||
| 45 | #include <torch/csrc/utils/tensor_dtypes.h> | ||||||||
| 46 | #include <torch/csrc/utils/python_compat.h> | ||||||||
| 47 | #include <torch/csrc/utils/python_strings.h> | ||||||||
| 48 | #include <torch/csrc/utils/tensor_layouts.h> | ||||||||
| 49 | #include <torch/csrc/utils/tensor_memoryformats.h> | ||||||||
| 50 | #include <torch/csrc/utils/tensor_qschemes.h> | ||||||||
| 51 | #include <torch/csrc/utils/tensor_numpy.h> | ||||||||
| 52 | #include <torch/csrc/utils/python_dispatch.h> | ||||||||
| 53 | #include <torch/csrc/utils/crash_handler.h> | ||||||||
| 54 | #include <torch/csrc/jit/python/python_tracer.h> | ||||||||
| 55 | #include <torch/csrc/jit/python/init.h> | ||||||||
| 56 | #include <torch/csrc/jit/python/python_ir.h> | ||||||||
| 57 | #include <torch/csrc/fx/fx_init.h> | ||||||||
| 58 | #include <torch/csrc/onnx/init.h> | ||||||||
| 59 | #include <torch/csrc/utils/init.h> | ||||||||
| 60 | #include <torch/csrc/utils/crash_handler.h> | ||||||||
| 61 | #include <torch/csrc/api/include/torch/python/init.h> | ||||||||
| 62 | |||||||||
| 63 | #ifdef USE_DISTRIBUTED1 | ||||||||
| 64 | #ifdef USE_C10D1 | ||||||||
| 65 | #include <torch/csrc/distributed/autograd/python_autograd.h> | ||||||||
| 66 | #include <torch/csrc/distributed/c10d/c10d.h> | ||||||||
| 67 | #include <torch/csrc/distributed/rpc/rpc.h> | ||||||||
| 68 | #include <torch/csrc/distributed/rpc/testing/testing.h> | ||||||||
| 69 | #endif | ||||||||
| 70 | #endif | ||||||||
| 71 | |||||||||
| 72 | #if defined(USE_MLCOMPUTE) | ||||||||
| 73 | #include <mlc/torch_mlc/csrc/MLCInit.h> | ||||||||
| 74 | #endif | ||||||||
| 75 | |||||||||
| 76 | #if defined(USE_VALGRIND1) | ||||||||
| 77 | #include <callgrind.h> | ||||||||
| 78 | #endif | ||||||||
| 79 | |||||||||
| 80 | namespace py = pybind11; | ||||||||
| 81 | |||||||||
| 82 | // NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables) | ||||||||
| 83 | PyObject* module; | ||||||||
| 84 | |||||||||
| 85 | // NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables) | ||||||||
| 86 | THPGenerator *THPDefaultCPUGenerator = nullptr; | ||||||||
| 87 | |||||||||
| 88 | //////////////////////////////////////////////////////////////////////////////// | ||||||||
| 89 | //////////////////////////////////////////////////////////////////////////////// | ||||||||
| 90 | |||||||||
| 91 | static PyObject * THPModule_initNames(PyObject *self, PyObject *arg) | ||||||||
| 92 | { | ||||||||
| 93 | static std::vector<std::string> names; | ||||||||
| 94 | |||||||||
| 95 | THPObjectPtr types(PySequence_Fast(arg, "expected a sequence")); | ||||||||
| 96 | if (!types) return nullptr; | ||||||||
| 97 | |||||||||
| 98 | // NOLINTNEXTLINE(bugprone-branch-clone) | ||||||||
| 99 | auto num_classes = PySequence_Fast_GET_SIZE(types.get())(((((((PyObject*)(types.get()))->ob_type))->tp_flags & ((1UL << 25))) != 0) ? ((((PyVarObject*)(types.get())) ->ob_size)) : (((PyVarObject*)(((PyTupleObject *)(types.get ()))))->ob_size)); | ||||||||
| 100 | names.reserve(names.size() + num_classes); | ||||||||
| 101 | for (Py_ssize_t i = 0; i < num_classes; i++) { | ||||||||
| 102 | PyObject* obj = PySequence_Fast_GET_ITEM(types.get(), i)(((((((PyObject*)(types.get()))->ob_type))->tp_flags & ((1UL << 25))) != 0) ? (((PyListObject *)(types.get()) )->ob_item[i]) : (((PyTupleObject *)(types.get()))->ob_item [i])); | ||||||||
| 103 | THPUtils_assert(PyType_Check(obj), "expected a PyTypeObject")if ((__builtin_expect((!(((((((PyObject*)(obj))->ob_type)) ->tp_flags & ((1UL << 31))) != 0))), (0)))) { THPUtils_setError ("expected a PyTypeObject"); return nullptr; }; | ||||||||
| 104 | PyTypeObject* type = (PyTypeObject*)obj; | ||||||||
| 105 | |||||||||
| 106 | THPObjectPtr module_name(PyObject_GetAttrString(obj, "__module__")); | ||||||||
| 107 | if (!module_name) return nullptr; | ||||||||
| 108 | THPUtils_assert(THPUtils_checkString(module_name.get()),if ((__builtin_expect((!(THPUtils_checkString(module_name.get ()))), (0)))) { THPUtils_setError("expected __module__ to be a string" ); return nullptr; } | ||||||||
| 109 | "expected __module__ to be a string")if ((__builtin_expect((!(THPUtils_checkString(module_name.get ()))), (0)))) { THPUtils_setError("expected __module__ to be a string" ); return nullptr; }; | ||||||||
| 110 | std::string name = THPUtils_unpackString(module_name.get()); | ||||||||
| 111 | names.push_back(name + "." + type->tp_name); | ||||||||
| 112 | type->tp_name = names.back().c_str(); | ||||||||
| 113 | } | ||||||||
| 114 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 115 | } | ||||||||
| 116 | // | ||||||||
| 117 | // Callback for python part. Used for additional initialization of python classes | ||||||||
| 118 | static PyObject * THPModule_initExtension(PyObject *_unused, PyObject *shm_manager_path) | ||||||||
| 119 | { | ||||||||
| 120 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 121 | if (!THPUtils_checkString(shm_manager_path)) { | ||||||||
| 122 | THPUtils_setError("initialization error - expected bytes/string object as shm_manager_path!"); | ||||||||
| 123 | return nullptr; | ||||||||
| 124 | } | ||||||||
| 125 | torch::utils::initializeLayouts(); | ||||||||
| 126 | torch::utils::initializeMemoryFormats(); | ||||||||
| 127 | torch::utils::initializeQSchemes(); | ||||||||
| 128 | torch::utils::initializeDtypes(); | ||||||||
| 129 | torch::tensors::initialize_python_bindings(); | ||||||||
| 130 | std::string path = THPUtils_unpackString(shm_manager_path); | ||||||||
| 131 | libshm_init(path.c_str()); | ||||||||
| 132 | |||||||||
| 133 | auto module = THPObjectPtr(PyImport_ImportModule("torch")); | ||||||||
| 134 | if (!module) throw python_error(); | ||||||||
| 135 | |||||||||
| 136 | THPDoubleStorage_postInit(module); | ||||||||
| 137 | THPFloatStorage_postInit(module); | ||||||||
| 138 | THPHalfStorage_postInit(module); | ||||||||
| 139 | THPLongStorage_postInit(module); | ||||||||
| 140 | THPIntStorage_postInit(module); | ||||||||
| 141 | THPShortStorage_postInit(module); | ||||||||
| 142 | THPCharStorage_postInit(module); | ||||||||
| 143 | THPByteStorage_postInit(module); | ||||||||
| 144 | THPBoolStorage_postInit(module); | ||||||||
| 145 | THPQUInt8Storage_postInit(module); | ||||||||
| 146 | THPQUInt4x2Storage_postInit(module); | ||||||||
| 147 | THPQInt8Storage_postInit(module); | ||||||||
| 148 | THPQInt32Storage_postInit(module); | ||||||||
| 149 | THPBFloat16Storage_postInit(module); | ||||||||
| 150 | THPComplexDoubleStorage_postInit(module); | ||||||||
| 151 | THPComplexFloatStorage_postInit(module); | ||||||||
| 152 | THPAutograd_initFunctions(); | ||||||||
| 153 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 154 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 155 | } | ||||||||
| 156 | |||||||||
| 157 | // The idea behind these two functions is to make it easy to test if we are | ||||||||
| 158 | // built with ASAN: they're designed not to crash if ASAN is not enabled, but | ||||||||
| 159 | // to trigger ASAN if it is enabled. This lets us run a "canary" tests which | ||||||||
| 160 | // checks if our build environment is misconfigured. | ||||||||
| 161 | |||||||||
| 162 | static PyObject * THPModule_crashIfCsrcASAN(PyObject *module, PyObject *arg) { | ||||||||
| 163 | THPUtils_assert(THPUtils_checkLong(arg), "crash_if_csrc_asan expects an int, "if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("crash_if_csrc_asan expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; } | ||||||||
| 164 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("crash_if_csrc_asan expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; }; | ||||||||
| 165 | //NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays, modernize-avoid-c-arrays) | ||||||||
| 166 | volatile char x[3]; | ||||||||
| 167 | x[THPUtils_unpackInt(arg)] = 0; | ||||||||
| 168 | //NOLINTNEXTLINE(clang-analyzer-core.CallAndMessage) | ||||||||
| 169 | return THPUtils_packInt32(x[0]); | ||||||||
| 170 | } | ||||||||
| 171 | |||||||||
| 172 | static PyObject * THPModule_crashIfCsrcUBSAN(PyObject *module, PyObject *arg) { | ||||||||
| 173 | THPUtils_assert(THPUtils_checkLong(arg), "crash_if_csrc_ubsan expects an int, "if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("crash_if_csrc_ubsan expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; } | ||||||||
| 174 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("crash_if_csrc_ubsan expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; }; | ||||||||
| 175 | int32_t x = THPUtils_unpackInt(arg); | ||||||||
| 176 | double y = 1.0 / x; | ||||||||
| 177 | return THPUtils_packInt32((int)y); | ||||||||
| 178 | } | ||||||||
| 179 | |||||||||
| 180 | static PyObject * THPModule_crashIfATenASAN(PyObject *module, PyObject *arg) { | ||||||||
| 181 | THPUtils_assert(THPUtils_checkLong(arg), "crash_if_aten_asan expects an int, "if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("crash_if_aten_asan expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; } | ||||||||
| 182 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("crash_if_aten_asan expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; }; | ||||||||
| 183 | return THPUtils_packInt32(at::_crash_if_asan(THPUtils_unpackInt(arg))); | ||||||||
| 184 | } | ||||||||
| 185 | |||||||||
| 186 | static PyObject * THPModule_getNumThreads(PyObject *module, PyObject *noargs) | ||||||||
| 187 | { | ||||||||
| 188 | return THPUtils_packInt32(at::get_num_threads()); | ||||||||
| 189 | } | ||||||||
| 190 | |||||||||
| 191 | static PyObject * THPModule_setNumThreads(PyObject *module, PyObject *arg) | ||||||||
| 192 | { | ||||||||
| 193 | THPUtils_assert(THPUtils_checkLong(arg), "set_num_threads expects an int, "if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("set_num_threads expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; } | ||||||||
| 194 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("set_num_threads expects an int, " "but got %s", ((((PyObject *)(arg))->ob_type)->tp_name)); return nullptr; }; | ||||||||
| 195 | int nthreads = (int)THPUtils_unpackLong(arg); | ||||||||
| 196 | THPUtils_assert(nthreads > 0, "set_num_threads expects a positive integer")if ((__builtin_expect((!(nthreads > 0)), (0)))) { THPUtils_setError ("set_num_threads expects a positive integer"); return nullptr ; }; | ||||||||
| 197 | at::set_num_threads(nthreads); | ||||||||
| 198 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 199 | } | ||||||||
| 200 | |||||||||
| 201 | static PyObject * THPModule_getNumInteropThreads(PyObject *module, PyObject *noargs) | ||||||||
| 202 | { | ||||||||
| 203 | return THPUtils_packInt32(at::get_num_interop_threads()); | ||||||||
| 204 | } | ||||||||
| 205 | |||||||||
| 206 | static PyObject * THPModule_setNumInteropThreads(PyObject *module, PyObject *arg) | ||||||||
| 207 | { | ||||||||
| 208 | THPUtils_assert(THPUtils_checkLong(arg), "set_num_interop_threads expects an int, "if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("set_num_interop_threads expects an int, " "but got %s", ((( (PyObject*)(arg))->ob_type)->tp_name)); return nullptr; } | ||||||||
| 209 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("set_num_interop_threads expects an int, " "but got %s", ((( (PyObject*)(arg))->ob_type)->tp_name)); return nullptr; }; | ||||||||
| 210 | int nthreads = (int)THPUtils_unpackLong(arg); | ||||||||
| 211 | THPUtils_assert(nthreads > 0, "set_num_interop_threads expects a positive integer")if ((__builtin_expect((!(nthreads > 0)), (0)))) { THPUtils_setError ("set_num_interop_threads expects a positive integer"); return nullptr; }; | ||||||||
| 212 | at::set_num_interop_threads(nthreads); | ||||||||
| 213 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 214 | } | ||||||||
| 215 | |||||||||
| 216 | PyObject * THPModule_setDefaultTensorType(PyObject *_unused, PyObject *type) | ||||||||
| 217 | { | ||||||||
| 218 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 219 | torch::tensors::py_set_default_tensor_type(type); | ||||||||
| 220 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 221 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 222 | } | ||||||||
| 223 | |||||||||
| 224 | PyObject * THPModule_setDefaultDtype(PyObject *_unused, PyObject *dtype) | ||||||||
| 225 | { | ||||||||
| 226 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 227 | torch::tensors::py_set_default_dtype(dtype); | ||||||||
| 228 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 229 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 230 | } | ||||||||
| 231 | |||||||||
| 232 | PyObject *THPModule_addDocStr(PyObject *_unused, PyObject *args) | ||||||||
| 233 | { | ||||||||
| 234 | // adds a __doc__ string to a function, similar to numpy's arr_add_docstring | ||||||||
| 235 | static std::vector<std::string> all_docs; | ||||||||
| 236 | PyObject *obj = nullptr; | ||||||||
| 237 | PyObject *doc_obj = nullptr; | ||||||||
| 238 | if (!PyArg_ParseTuple(args, "OO", &obj, &doc_obj)) { | ||||||||
| 239 | return nullptr; | ||||||||
| 240 | } | ||||||||
| 241 | |||||||||
| 242 | const char* doc_str = "<invalid string>"; | ||||||||
| 243 | if (THPUtils_checkString(doc_obj)) { | ||||||||
| 244 | all_docs.push_back(THPUtils_unpackString(doc_obj)); | ||||||||
| 245 | doc_str = all_docs.back().c_str(); | ||||||||
| 246 | } | ||||||||
| 247 | |||||||||
| 248 | if (Py_TYPE(obj)(((PyObject*)(obj))->ob_type) == &PyCFunction_Type) { | ||||||||
| 249 | PyCFunctionObject* f = (PyCFunctionObject *)obj; | ||||||||
| 250 | if (f->m_ml->ml_doc) { | ||||||||
| 251 | return PyErr_Format(PyExc_RuntimeError, | ||||||||
| 252 | "function '%s' already has a docstring", f->m_ml->ml_name); | ||||||||
| 253 | } | ||||||||
| 254 | f->m_ml->ml_doc = doc_str; | ||||||||
| 255 | } else if (strcmp(Py_TYPE(obj)(((PyObject*)(obj))->ob_type)->tp_name, "method_descriptor") == 0) { | ||||||||
| 256 | PyMethodDescrObject* m = (PyMethodDescrObject *)obj; | ||||||||
| 257 | if (m->d_method->ml_doc) { | ||||||||
| 258 | return PyErr_Format(PyExc_RuntimeError, | ||||||||
| 259 | "method '%s' already has a docstring", m->d_method->ml_name); | ||||||||
| 260 | } | ||||||||
| 261 | m->d_method->ml_doc = doc_str; | ||||||||
| 262 | } else if (strcmp(Py_TYPE(obj)(((PyObject*)(obj))->ob_type)->tp_name, "getset_descriptor") == 0) { | ||||||||
| 263 | //NOLINTNEXTLINE(cppcoreguidelines-pro-type-cstyle-cast) | ||||||||
| 264 | PyGetSetDescrObject* m = (PyGetSetDescrObject *)obj; | ||||||||
| 265 | if (m->d_getset->doc) { | ||||||||
| 266 | //NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg) | ||||||||
| 267 | return PyErr_Format(PyExc_RuntimeError, | ||||||||
| 268 | "attribute '%s' already has a docstring", m->d_getset->name); | ||||||||
| 269 | } | ||||||||
| 270 | // This field is not const for python < 3.7 yet the content is | ||||||||
| 271 | // never modified. | ||||||||
| 272 | //NOLINTNEXTLINE(cppcoreguidelines-pro-type-const-cast) | ||||||||
| 273 | m->d_getset->doc = const_cast<char *>(doc_str); | ||||||||
| 274 | } else if (Py_TYPE(obj)(((PyObject*)(obj))->ob_type) == &PyType_Type) { | ||||||||
| 275 | PyTypeObject* t = (PyTypeObject *)obj; | ||||||||
| 276 | if (t->tp_doc) { | ||||||||
| 277 | return PyErr_Format(PyExc_RuntimeError, | ||||||||
| 278 | "Type '%s' already has a docstring", t->tp_name); | ||||||||
| 279 | } | ||||||||
| 280 | t->tp_doc = doc_str; | ||||||||
| 281 | } else { | ||||||||
| 282 | return PyErr_Format(PyExc_TypeError, | ||||||||
| 283 | "don't know how to add docstring to type '%s'", Py_TYPE(obj)(((PyObject*)(obj))->ob_type)->tp_name); | ||||||||
| 284 | } | ||||||||
| 285 | |||||||||
| 286 | Py_INCREF(obj)_Py_INCREF(((PyObject*)(obj))); | ||||||||
| 287 | return obj; | ||||||||
| 288 | } | ||||||||
| 289 | |||||||||
| 290 | |||||||||
| 291 | PyObject *THPModule_inferSize(PyObject *_unused, PyObject *args) | ||||||||
| 292 | { | ||||||||
| 293 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 294 | Py_ssize_t num_args = args ? (Py_ssize_t) PyTuple_Size(args) : 0; | ||||||||
| 295 | THPUtils_assert(num_args == 2, "expected exactly 2 arguments")if ((__builtin_expect((!(num_args == 2)), (0)))) { THPUtils_setError ("expected exactly 2 arguments"); return nullptr; }; | ||||||||
| 296 | PyObject *arg1 = PyTuple_GET_ITEM(args, 0)(((PyTupleObject *)(args))->ob_item[0]); | ||||||||
| 297 | THPUtils_assert(THPSize_Check(arg1), "expected a torch.Size as argument 1")if ((__builtin_expect((!(((((PyObject*)(arg1))->ob_type) == &THPSizeType))), (0)))) { THPUtils_setError("expected a torch.Size as argument 1" ); return nullptr; }; | ||||||||
| 298 | PyObject *arg2 = PyTuple_GET_ITEM(args, 1)(((PyTupleObject *)(args))->ob_item[1]); | ||||||||
| 299 | THPUtils_assert(THPSize_Check(arg2), "expected a torch.Size as argument 2")if ((__builtin_expect((!(((((PyObject*)(arg2))->ob_type) == &THPSizeType))), (0)))) { THPUtils_setError("expected a torch.Size as argument 2" ); return nullptr; }; | ||||||||
| 300 | |||||||||
| 301 | auto size1 = THPUtils_unpackLongs(arg1); | ||||||||
| 302 | auto size2 = THPUtils_unpackLongs(arg2); | ||||||||
| 303 | auto sizes = at::infer_size(size1, size2); | ||||||||
| 304 | return THPSize_NewFromSizes(sizes.size(), sizes.data()); | ||||||||
| 305 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 306 | } | ||||||||
| 307 | |||||||||
| 308 | static PyObject *THPModule_setBackcompatBroadcastWarn(PyObject *module, PyObject *arg) { | ||||||||
| 309 | THPUtils_assert(PyBool_Check(arg), "set_backcompat_broadcast_warn expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_backcompat_broadcast_warn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 310 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_backcompat_broadcast_warn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 311 | setBackCompatBroadcastWarn(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 312 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 313 | } | ||||||||
| 314 | |||||||||
| 315 | static PyObject *THPModule_getBackcompatBroadcastWarn(PyObject *module, PyObject *noargs) | ||||||||
| 316 | { | ||||||||
| 317 | if (getBackCompatBroadcastWarn()) Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 318 | else Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 319 | } | ||||||||
| 320 | |||||||||
| 321 | static PyObject *THPModule_setBackcompatKeepdimWarn(PyObject *module, PyObject *arg) { | ||||||||
| 322 | THPUtils_assert(PyBool_Check(arg), "set_backcompat_keepdim_warn expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_backcompat_keepdim_warn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 323 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_backcompat_keepdim_warn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 324 | setBackCompatKeepdimWarn(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 325 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 326 | } | ||||||||
| 327 | |||||||||
| 328 | static PyObject *THPModule_getBackcompatKeepdimWarn(PyObject *module, PyObject *noargs) | ||||||||
| 329 | { | ||||||||
| 330 | if (getBackCompatKeepdimWarn()) Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 331 | else Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 332 | } | ||||||||
| 333 | |||||||||
| 334 | PyObject *THPModule_hasDistributed(PyObject *_unused, PyObject *noargs) | ||||||||
| 335 | { | ||||||||
| 336 | #ifdef USE_DISTRIBUTED1 | ||||||||
| 337 | Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 338 | #else | ||||||||
| 339 | Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 340 | #endif | ||||||||
| 341 | } | ||||||||
| 342 | |||||||||
| 343 | static PyObject *THPModule_showConfig(PyObject *module, PyObject *noargs) | ||||||||
| 344 | { | ||||||||
| 345 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 346 | return THPUtils_packString(at::show_config()); | ||||||||
| 347 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 348 | } | ||||||||
| 349 | |||||||||
| 350 | static PyObject *THPModule_cxxFlags(PyObject *module, PyObject *noargs) | ||||||||
| 351 | { | ||||||||
| 352 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 353 | return THPUtils_packString(at::get_cxx_flags()); | ||||||||
| 354 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 355 | } | ||||||||
| 356 | |||||||||
| 357 | static PyObject *THPModule_parallelInfo(PyObject *module, PyObject *noargs) | ||||||||
| 358 | { | ||||||||
| 359 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 360 | return THPUtils_packString(at::get_parallel_info()); | ||||||||
| 361 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 362 | } | ||||||||
| 363 | |||||||||
| 364 | void DLPack_Capsule_Destructor(PyObject* data) { | ||||||||
| 365 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 366 | DLManagedTensor * dlMTensor = (DLManagedTensor *)PyCapsule_GetPointer(data, "dltensor"); | ||||||||
| 367 | if (dlMTensor) { | ||||||||
| 368 | // the dlMTensor has not been consumed, call deleter ourselves | ||||||||
| 369 | // NOLINTNEXTLINE(cppcoreguidelines-pro-type-const-cast) | ||||||||
| 370 | dlMTensor->deleter(const_cast<DLManagedTensor*>(dlMTensor)); | ||||||||
| 371 | } else { | ||||||||
| 372 | // the dlMTensor has been consumed | ||||||||
| 373 | // PyCapsule_GetPointer has set an error indicator | ||||||||
| 374 | PyErr_Clear(); | ||||||||
| 375 | } | ||||||||
| 376 | END_HANDLE_TH_ERRORS_RET()} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return; } catch (const c10::IndexError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_IndexError , torch::processErrorMsg(msg)); return; } catch (const c10::ValueError & e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e .what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return; } catch (const c10::TypeError & e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e .what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return; } catch (const c10::NotImplementedError & e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e .what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return; } catch (const c10::Error & e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e .what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return; } catch (torch::PyTorchError & e) { auto msg = torch::processErrorMsg(e.what()); PyErr_SetString (e.python_type(), msg); return; } catch (const std::exception & e) { auto msg = torch::processErrorMsg(e.what()); PyErr_SetString (PyExc_RuntimeError, msg); return; } | ||||||||
| 377 | } | ||||||||
| 378 | |||||||||
| 379 | PyObject *THPModule_toDLPack(PyObject *_unused, PyObject *data) | ||||||||
| 380 | { | ||||||||
| 381 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 382 | THPUtils_assert(THPVariable_Check(data), "data must be a Tensor")if ((__builtin_expect((!(THPVariable_Check(data))), (0)))) { THPUtils_setError ("data must be a Tensor"); return nullptr; }; | ||||||||
| 383 | DLManagedTensor* dlMTensor = at::toDLPack(THPVariable_Unpack(data)); | ||||||||
| 384 | return PyCapsule_New(dlMTensor, "dltensor", DLPack_Capsule_Destructor); | ||||||||
| 385 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 386 | } | ||||||||
| 387 | |||||||||
| 388 | PyObject *THPModule_fromDLPack(PyObject *_unused, PyObject *data) | ||||||||
| 389 | { | ||||||||
| 390 | using namespace torch::autograd; | ||||||||
| 391 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 392 | DLManagedTensor * dlMTensor = (DLManagedTensor *)PyCapsule_GetPointer(data, "dltensor"); | ||||||||
| 393 | THPUtils_assert(dlMTensor, "from_dlpack received an invalid capsule. "if ((__builtin_expect((!(dlMTensor)), (0)))) { THPUtils_setError ("from_dlpack received an invalid capsule. " "Note that DLTensor capsules can be consumed only once, " "so you might have already constructed a tensor from it once." ); return nullptr; } | ||||||||
| 394 | "Note that DLTensor capsules can be consumed only once, "if ((__builtin_expect((!(dlMTensor)), (0)))) { THPUtils_setError ("from_dlpack received an invalid capsule. " "Note that DLTensor capsules can be consumed only once, " "so you might have already constructed a tensor from it once." ); return nullptr; } | ||||||||
| 395 | "so you might have already constructed a tensor from it once.")if ((__builtin_expect((!(dlMTensor)), (0)))) { THPUtils_setError ("from_dlpack received an invalid capsule. " "Note that DLTensor capsules can be consumed only once, " "so you might have already constructed a tensor from it once." ); return nullptr; } | ||||||||
| 396 | // atensor steals the ownership of the underlying storage. It also passes a | ||||||||
| 397 | // destructor function that will be called when the underlying storage goes | ||||||||
| 398 | // out of scope. When the destructor is called, the dlMTensor is destructed too. | ||||||||
| 399 | auto atensor = at::fromDLPack(dlMTensor); | ||||||||
| 400 | |||||||||
| 401 | // Make sure this capsule will never be used again. | ||||||||
| 402 | PyCapsule_SetName(data, "used_dltensor"); | ||||||||
| 403 | |||||||||
| 404 | // It is possible that the call to at::fromDLPack is the very first | ||||||||
| 405 | // call to create a Tensor in PyTorch. If so, then _lazy_init has | ||||||||
| 406 | // not been called, and the attempt to call createPyObject will fail | ||||||||
| 407 | // because cuda ATen types have not been registered in Python yet. | ||||||||
| 408 | // so if we have a cuda tensor, then we need to make sure | ||||||||
| 409 | // we have called _lazy_init here | ||||||||
| 410 | if(atensor.is_cuda()) { | ||||||||
| 411 | py::module::import("torch.cuda").attr("init")(); | ||||||||
| 412 | } | ||||||||
| 413 | return THPVariable_Wrap(std::move(atensor)); | ||||||||
| 414 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 415 | } | ||||||||
| 416 | |||||||||
| 417 | PyObject *THPModule_setAllowTF32CuDNN(PyObject *_unused, PyObject *arg) | ||||||||
| 418 | { | ||||||||
| 419 | THPUtils_assert(PyBool_Check(arg), "set_allow_tf32_cublas expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_allow_tf32_cublas expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 420 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_allow_tf32_cublas expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 421 | at::globalContext().setAllowTF32CuDNN(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 422 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 423 | } | ||||||||
| 424 | |||||||||
| 425 | PyObject *THPModule_allowTF32CuDNN(PyObject *_unused, PyObject *noargs) | ||||||||
| 426 | { | ||||||||
| 427 | if (at::globalContext().allowTF32CuDNN()) Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 428 | else Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 429 | } | ||||||||
| 430 | |||||||||
| 431 | PyObject *THPModule_setUserEnabledCuDNN(PyObject *_unused, PyObject *arg) | ||||||||
| 432 | { | ||||||||
| 433 | THPUtils_assert(PyBool_Check(arg), "set_enabled_cudnn expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_enabled_cudnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 434 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_enabled_cudnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 435 | at::globalContext().setUserEnabledCuDNN(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 436 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 437 | } | ||||||||
| 438 | |||||||||
| 439 | PyObject *THPModule_userEnabledCuDNN(PyObject *_unused, PyObject *noargs) | ||||||||
| 440 | { | ||||||||
| 441 | if (at::globalContext().userEnabledCuDNN()) Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 442 | else Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 443 | } | ||||||||
| 444 | |||||||||
| 445 | PyObject *THPModule_setUserEnabledMkldnn(PyObject *_unused, PyObject *arg) | ||||||||
| 446 | { | ||||||||
| 447 | THPUtils_assert(PyBool_Check(arg), "set_enabled_mkldnn expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_enabled_mkldnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 448 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_enabled_mkldnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 449 | at::globalContext().setUserEnabledMkldnn(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 450 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 451 | } | ||||||||
| 452 | |||||||||
| 453 | PyObject *THPModule_userEnabledMkldnn(PyObject *_unused, PyObject *noargs) | ||||||||
| 454 | { | ||||||||
| 455 | if (at::globalContext().userEnabledMkldnn()) Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 456 | else Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 457 | } | ||||||||
| 458 | |||||||||
| 459 | PyObject *THPModule_setDeterministicCuDNN(PyObject *_unused, PyObject *arg) | ||||||||
| 460 | { | ||||||||
| 461 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 462 | THPUtils_assert(PyBool_Check(arg), "set_deterministic_cudnn expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_deterministic_cudnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 463 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_deterministic_cudnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 464 | at::globalContext().setDeterministicCuDNN(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 465 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 466 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 467 | } | ||||||||
| 468 | |||||||||
| 469 | PyObject *THPModule_deterministicCuDNN(PyObject *_unused, PyObject *noargs) | ||||||||
| 470 | { | ||||||||
| 471 | if (at::globalContext().deterministicCuDNN()) Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 472 | else Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 473 | } | ||||||||
| 474 | |||||||||
| 475 | PyObject *THPModule_setDeterministicAlgorithms(PyObject *_unused, PyObject *arg) | ||||||||
| 476 | { | ||||||||
| 477 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 478 | THPUtils_assert(PyBool_Check(arg), "use_deterministic_algorithms expects a "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("use_deterministic_algorithms expects a " "bool, but got %s", ((((PyObject*)(arg))->ob_type)->tp_name )); return nullptr; } | ||||||||
| 479 | "bool, but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("use_deterministic_algorithms expects a " "bool, but got %s", ((((PyObject*)(arg))->ob_type)->tp_name )); return nullptr; }; | ||||||||
| 480 | at::globalContext().setDeterministicAlgorithms(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 481 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 482 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 483 | } | ||||||||
| 484 | |||||||||
| 485 | PyObject *THPModule_deterministicAlgorithms(PyObject *_unused, PyObject *noargs) | ||||||||
| 486 | { | ||||||||
| 487 | if (at::globalContext().deterministicAlgorithms()) { | ||||||||
| 488 | Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 489 | } | ||||||||
| 490 | Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 491 | } | ||||||||
| 492 | |||||||||
| 493 | PyObject *THPModule_setWarnAlways(PyObject *_unused, PyObject *arg) | ||||||||
| 494 | { | ||||||||
| 495 | THPUtils_assert(PyBool_Check(arg), "setWarnOnlyOnce expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("setWarnOnlyOnce expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 496 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("setWarnOnlyOnce expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 497 | c10::Warning::set_warnAlways(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 498 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 499 | } | ||||||||
| 500 | |||||||||
| 501 | PyObject *THPModule_warnAlways(PyObject *_unused, PyObject *noargs) | ||||||||
| 502 | { | ||||||||
| 503 | if (c10::Warning::get_warnAlways()) { | ||||||||
| 504 | Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 505 | } | ||||||||
| 506 | Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 507 | } | ||||||||
| 508 | |||||||||
| 509 | PyObject *THPModule_setBenchmarkCuDNN(PyObject *_unused, PyObject *arg) | ||||||||
| 510 | { | ||||||||
| 511 | THPUtils_assert(PyBool_Check(arg), "set_benchmark_cudnn expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_benchmark_cudnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 512 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_benchmark_cudnn expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 513 | #ifdef __HIP_PLATFORM_HCC__ | ||||||||
| 514 | if (arg == Py_False((PyObject *) &_Py_FalseStruct)) { | ||||||||
| 515 | TORCH_WARN_ONCE("Disabling benchmark mode for MIOpen is NOT supported. Overriding value to True")if (::c10::Warning::get_warnAlways()) { ::c10::Warning::warn( {__func__, "../torch/csrc/Module.cpp", static_cast<uint32_t >(515)}, ::c10::str("Disabling benchmark mode for MIOpen is NOT supported. Overriding value to True" ), false); } else { __attribute__((__unused__)) static const auto torch_warn_once_7 = [&] { ::c10::Warning::warn( {__func__ , "../torch/csrc/Module.cpp", static_cast<uint32_t>(515 )}, ::c10::str("Disabling benchmark mode for MIOpen is NOT supported. Overriding value to True" ), false); return true; }(); }; | ||||||||
| 516 | arg = Py_True((PyObject *) &_Py_TrueStruct); | ||||||||
| 517 | } | ||||||||
| 518 | #endif | ||||||||
| 519 | at::globalContext().setBenchmarkCuDNN(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 520 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 521 | } | ||||||||
| 522 | |||||||||
| 523 | PyObject *THPModule_benchmarkCuDNN(PyObject *_unused, PyObject *noargs) | ||||||||
| 524 | { | ||||||||
| 525 | if (at::globalContext().benchmarkCuDNN()) { | ||||||||
| 526 | Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 527 | } | ||||||||
| 528 | Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 529 | } | ||||||||
| 530 | |||||||||
| 531 | PyObject *THPModule_setAllowTF32CuBLAS(PyObject *_unused, PyObject *arg) | ||||||||
| 532 | { | ||||||||
| 533 | THPUtils_assert(PyBool_Check(arg), "set_allow_tf32_cublas expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_allow_tf32_cublas expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 534 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("set_allow_tf32_cublas expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 535 | at::globalContext().setAllowTF32CuBLAS(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 536 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 537 | } | ||||||||
| 538 | |||||||||
| 539 | PyObject *THPModule_allowTF32CuBLAS(PyObject *_unused, PyObject *noargs) | ||||||||
| 540 | { | ||||||||
| 541 | if (at::globalContext().allowTF32CuBLAS()) { | ||||||||
| 542 | Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 543 | } | ||||||||
| 544 | Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 545 | } | ||||||||
| 546 | |||||||||
| 547 | PyObject *THPModule_setFlushDenormal(PyObject *_unused, PyObject *arg) { | ||||||||
| 548 | THPUtils_assert(PyBool_Check(arg), "flush_denormal expects a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("flush_denormal expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 549 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("flush_denormal expects a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 550 | if (!at::globalContext().setFlushDenormal(arg == Py_True((PyObject *) &_Py_TrueStruct))) { | ||||||||
| 551 | Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 552 | }; | ||||||||
| 553 | Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 554 | } | ||||||||
| 555 | |||||||||
| 556 | PyObject *THPModule_getDefaultDtype(PyObject *_unused, PyObject *arg) { | ||||||||
| 557 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 558 | auto scalar_type = torch::tensors::get_default_scalar_type(); | ||||||||
| 559 | auto dtype = (PyObject*)torch::getTHPDtype(scalar_type); | ||||||||
| 560 | Py_INCREF(dtype)_Py_INCREF(((PyObject*)(dtype))); | ||||||||
| 561 | return dtype; | ||||||||
| 562 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 563 | } | ||||||||
| 564 | |||||||||
| 565 | PyObject *THPModule_getDefaultDevice(PyObject *_unused, PyObject *arg) { | ||||||||
| 566 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 567 | return THPUtils_packString( | ||||||||
| 568 | c10::DeviceTypeName(dispatchKeyToDeviceType(torch::tensors::get_default_dispatch_key()), | ||||||||
| 569 | /*lower_case=*/true)); | ||||||||
| 570 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 571 | } | ||||||||
| 572 | |||||||||
| 573 | PyObject *THPModule_setQEngine(PyObject */* unused */, PyObject *arg) | ||||||||
| 574 | { | ||||||||
| 575 | THPUtils_assert(THPUtils_checkLong(arg), "set_qengine expects an int, "if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("set_qengine expects an int, " "but got %s", ((((PyObject*)( arg))->ob_type)->tp_name)); return nullptr; } | ||||||||
| 576 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(THPUtils_checkLong(arg))), (0)))) { THPUtils_setError ("set_qengine expects an int, " "but got %s", ((((PyObject*)( arg))->ob_type)->tp_name)); return nullptr; }; | ||||||||
| 577 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 578 | auto qengine = static_cast<int>(THPUtils_unpackLong(arg)); | ||||||||
| 579 | at::globalContext().setQEngine(static_cast<at::QEngine>(qengine)); | ||||||||
| 580 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 581 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 582 | } | ||||||||
| 583 | |||||||||
| 584 | PyObject *THPModule_qEngine(PyObject *_unused, PyObject *noargs) | ||||||||
| 585 | { | ||||||||
| 586 | return THPUtils_packInt64(static_cast<int>(at::globalContext().qEngine())); | ||||||||
| 587 | } | ||||||||
| 588 | |||||||||
| 589 | PyObject *THPModule_supportedQEngines(PyObject *_unused, PyObject *noargs) | ||||||||
| 590 | { | ||||||||
| 591 | auto qengines = at::globalContext().supportedQEngines(); | ||||||||
| 592 | auto list = THPObjectPtr(PyList_New(qengines.size())); | ||||||||
| 593 | for (const auto i : c10::irange(qengines.size())) { | ||||||||
| 594 | PyObject *i64 = THPUtils_packInt64(static_cast<int>(qengines[i])); | ||||||||
| 595 | if (!i64) { | ||||||||
| 596 | throw python_error(); | ||||||||
| 597 | } | ||||||||
| 598 | PyList_SET_ITEM(list.get(), i, i64)PyList_SetItem(list.get(), i, i64); | ||||||||
| 599 | } | ||||||||
| 600 | return list.release(); | ||||||||
| 601 | } | ||||||||
| 602 | |||||||||
| 603 | PyObject *THPModule_isEnabledXNNPACK(PyObject *_unused, PyObject *noargs) | ||||||||
| 604 | { | ||||||||
| 605 | if (at::globalContext().isXNNPACKAvailable()) Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 606 | else Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 607 | } | ||||||||
| 608 | |||||||||
| 609 | PyObject *THPModule_setDefaultMobileCPUAllocator(PyObject *_unused, PyObject *noargs) | ||||||||
| 610 | { | ||||||||
| 611 | try { | ||||||||
| 612 | at::globalContext().setDefaultMobileCPUAllocator(); | ||||||||
| 613 | } catch (c10::Error& e) { | ||||||||
| 614 | THPUtils_setError(e.what()); | ||||||||
| 615 | } | ||||||||
| 616 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 617 | } | ||||||||
| 618 | |||||||||
| 619 | PyObject *THPModule_unsetDefaultMobileCPUAllocator(PyObject *_unused, PyObject *noargs) | ||||||||
| 620 | { | ||||||||
| 621 | try { | ||||||||
| 622 | at::globalContext().unsetDefaultMobileCPUAllocator(); | ||||||||
| 623 | } catch (c10::Error& e) { | ||||||||
| 624 | THPUtils_setError(e.what()); | ||||||||
| 625 | } | ||||||||
| 626 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 627 | } | ||||||||
| 628 | |||||||||
| 629 | static PyObject * THPModule_vmapmode_increment_nesting(PyObject* _unused, PyObject *arg) { | ||||||||
| 630 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 631 | return THPUtils_packInt64(at::impl::VmapMode::increment_nesting()); | ||||||||
| 632 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 633 | } | ||||||||
| 634 | |||||||||
| 635 | static PyObject * THPModule_vmapmode_decrement_nesting(PyObject* _unused, PyObject *arg) { | ||||||||
| 636 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 637 | return THPUtils_packInt64(at::impl::VmapMode::decrement_nesting()); | ||||||||
| 638 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 639 | } | ||||||||
| 640 | |||||||||
| 641 | static PyObject * THPModule_set_display_vmap_fallback_warnings_mode(PyObject* _unused, PyObject *arg) { | ||||||||
| 642 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 643 | THPUtils_assert(PyBool_Check(arg), "enabled must be a bool, "if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("enabled must be a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; } | ||||||||
| 644 | "but got %s", THPUtils_typename(arg))if ((__builtin_expect((!(((((PyObject*)(arg))->ob_type) == &PyBool_Type))), (0)))) { THPUtils_setError("enabled must be a bool, " "but got %s", ((((PyObject*)(arg))->ob_type)->tp_name) ); return nullptr; }; | ||||||||
| 645 | at::globalContext().setDisplayVmapFallbackWarnings(arg == Py_True((PyObject *) &_Py_TrueStruct)); | ||||||||
| 646 | Py_RETURN_NONEreturn _Py_INCREF(((PyObject*)((&_Py_NoneStruct)))), (& _Py_NoneStruct); | ||||||||
| 647 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 648 | } | ||||||||
| 649 | |||||||||
| 650 | static PyObject * THPModule_are_vmap_fallback_warnings_enabled(PyObject* _unused, PyObject *arg) { | ||||||||
| 651 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 652 | if (at::globalContext().areVmapFallbackWarningsEnabled()) { | ||||||||
| 653 | Py_RETURN_TRUEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_TrueStruct )))), ((PyObject *) &_Py_TrueStruct); | ||||||||
| 654 | } else { | ||||||||
| 655 | Py_RETURN_FALSEreturn _Py_INCREF(((PyObject*)(((PyObject *) &_Py_FalseStruct )))), ((PyObject *) &_Py_FalseStruct); | ||||||||
| 656 | } | ||||||||
| 657 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 658 | } | ||||||||
| 659 | |||||||||
| 660 | //NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays, cppcoreguidelines-avoid-non-const-global-variables, modernize-avoid-c-arrays) | ||||||||
| 661 | static PyMethodDef TorchMethods[] = { | ||||||||
| 662 | {"_initExtension", THPModule_initExtension, METH_O0x0008, nullptr}, | ||||||||
| 663 | {"_autograd_init", THPAutograd_initExtension, METH_NOARGS0x0004, nullptr}, | ||||||||
| 664 | {"_add_docstr", THPModule_addDocStr, METH_VARARGS0x0001, nullptr}, | ||||||||
| 665 | {"_init_names", THPModule_initNames, METH_O0x0008, nullptr}, | ||||||||
| 666 | {"_has_distributed",THPModule_hasDistributed, METH_NOARGS0x0004, nullptr}, | ||||||||
| 667 | {"_set_default_tensor_type", THPModule_setDefaultTensorType, METH_O0x0008, nullptr}, | ||||||||
| 668 | {"_set_default_dtype", THPModule_setDefaultDtype, METH_O0x0008, nullptr}, | ||||||||
| 669 | {"_infer_size", THPModule_inferSize, METH_VARARGS0x0001, nullptr}, | ||||||||
| 670 | {"_crash_if_csrc_asan", THPModule_crashIfCsrcASAN, METH_O0x0008, nullptr}, | ||||||||
| 671 | {"_crash_if_csrc_ubsan", THPModule_crashIfCsrcUBSAN, METH_O0x0008, nullptr}, | ||||||||
| 672 | {"_crash_if_aten_asan", THPModule_crashIfATenASAN, METH_O0x0008, nullptr}, | ||||||||
| 673 | {"_show_config", THPModule_showConfig, METH_NOARGS0x0004, nullptr}, | ||||||||
| 674 | {"_cxx_flags", THPModule_cxxFlags, METH_NOARGS0x0004, nullptr}, | ||||||||
| 675 | {"_parallel_info", THPModule_parallelInfo, METH_NOARGS0x0004, nullptr}, | ||||||||
| 676 | {"_set_backcompat_broadcast_warn", THPModule_setBackcompatBroadcastWarn, METH_O0x0008, nullptr}, | ||||||||
| 677 | {"_get_backcompat_broadcast_warn", THPModule_getBackcompatBroadcastWarn, METH_NOARGS0x0004, nullptr}, | ||||||||
| 678 | {"_set_backcompat_keepdim_warn", THPModule_setBackcompatKeepdimWarn, METH_O0x0008, nullptr}, | ||||||||
| 679 | {"_get_backcompat_keepdim_warn", THPModule_getBackcompatKeepdimWarn, METH_NOARGS0x0004, nullptr}, | ||||||||
| 680 | {"get_num_threads", THPModule_getNumThreads, METH_NOARGS0x0004, nullptr}, | ||||||||
| 681 | {"set_num_threads", THPModule_setNumThreads, METH_O0x0008, nullptr}, | ||||||||
| 682 | {"get_num_interop_threads", THPModule_getNumInteropThreads, METH_NOARGS0x0004, nullptr}, | ||||||||
| 683 | {"set_num_interop_threads", THPModule_setNumInteropThreads, METH_O0x0008, nullptr}, | ||||||||
| 684 | {"_get_cudnn_enabled", THPModule_userEnabledCuDNN, METH_NOARGS0x0004, nullptr}, | ||||||||
| 685 | {"_set_cudnn_enabled", THPModule_setUserEnabledCuDNN, METH_O0x0008, nullptr}, | ||||||||
| 686 | {"_get_mkldnn_enabled", THPModule_userEnabledMkldnn, METH_NOARGS0x0004, nullptr}, | ||||||||
| 687 | {"_set_mkldnn_enabled", THPModule_setUserEnabledMkldnn, METH_O0x0008, nullptr}, | ||||||||
| 688 | {"_get_cudnn_allow_tf32", THPModule_allowTF32CuDNN, METH_NOARGS0x0004, nullptr}, | ||||||||
| 689 | {"_set_cudnn_allow_tf32", THPModule_setAllowTF32CuDNN, METH_O0x0008, nullptr}, | ||||||||
| 690 | {"_get_cudnn_benchmark", THPModule_benchmarkCuDNN, METH_NOARGS0x0004, nullptr}, | ||||||||
| 691 | {"_set_cudnn_benchmark", THPModule_setBenchmarkCuDNN, METH_O0x0008, nullptr}, | ||||||||
| 692 | {"_get_cudnn_deterministic", THPModule_deterministicCuDNN, METH_NOARGS0x0004, nullptr}, | ||||||||
| 693 | {"_set_cudnn_deterministic", THPModule_setDeterministicCuDNN, METH_O0x0008, nullptr}, | ||||||||
| 694 | {"_get_deterministic_algorithms", THPModule_deterministicAlgorithms, METH_NOARGS0x0004, nullptr}, | ||||||||
| 695 | {"_set_deterministic_algorithms", THPModule_setDeterministicAlgorithms, METH_O0x0008, nullptr}, | ||||||||
| 696 | {"_get_warnAlways", THPModule_warnAlways, METH_NOARGS0x0004, nullptr}, | ||||||||
| 697 | {"_set_warnAlways", THPModule_setWarnAlways, METH_O0x0008, nullptr}, | ||||||||
| 698 | {"_get_cublas_allow_tf32", THPModule_allowTF32CuBLAS, METH_NOARGS0x0004, nullptr}, | ||||||||
| 699 | {"_set_cublas_allow_tf32", THPModule_setAllowTF32CuBLAS, METH_O0x0008, nullptr}, | ||||||||
| 700 | {"_vmapmode_increment_nesting", THPModule_vmapmode_increment_nesting, METH_NOARGS0x0004, nullptr}, | ||||||||
| 701 | {"_vmapmode_decrement_nesting", THPModule_vmapmode_decrement_nesting, METH_NOARGS0x0004, nullptr}, | ||||||||
| 702 | {"_debug_only_display_vmap_fallback_warnings", THPModule_set_display_vmap_fallback_warnings_mode, METH_O0x0008, nullptr}, | ||||||||
| 703 | {"_debug_only_are_vmap_fallback_warnings_enabled", THPModule_are_vmap_fallback_warnings_enabled, METH_NOARGS0x0004, nullptr}, | ||||||||
| 704 | {"_to_dlpack", THPModule_toDLPack, METH_O0x0008, nullptr}, | ||||||||
| 705 | {"_from_dlpack", THPModule_fromDLPack, METH_O0x0008, nullptr}, | ||||||||
| 706 | {"set_flush_denormal", THPModule_setFlushDenormal, METH_O0x0008, nullptr}, | ||||||||
| 707 | {"get_default_dtype", THPModule_getDefaultDtype, METH_NOARGS0x0004, nullptr}, | ||||||||
| 708 | {"_get_default_device", THPModule_getDefaultDevice, METH_NOARGS0x0004, nullptr}, | ||||||||
| 709 | {"_get_qengine", THPModule_qEngine, METH_NOARGS0x0004, nullptr}, | ||||||||
| 710 | {"_set_qengine", THPModule_setQEngine, METH_O0x0008, nullptr}, | ||||||||
| 711 | {"_supported_qengines", THPModule_supportedQEngines, METH_NOARGS0x0004, nullptr}, | ||||||||
| 712 | {"_is_xnnpack_enabled", THPModule_isEnabledXNNPACK, METH_NOARGS0x0004, nullptr}, | ||||||||
| 713 | {"_set_default_mobile_cpu_allocator", THPModule_setDefaultMobileCPUAllocator, METH_NOARGS0x0004, nullptr}, | ||||||||
| 714 | {"_unset_default_mobile_cpu_allocator", THPModule_unsetDefaultMobileCPUAllocator, METH_NOARGS0x0004, nullptr}, | ||||||||
| 715 | {"_is_torch_function_enabled", THPModule_isEnabledTorchFunction, METH_NOARGS0x0004, nullptr}, | ||||||||
| 716 | {"_disabled_torch_function_impl", THPModule_disable_torch_function, METH_VARARGS0x0001, nullptr}, | ||||||||
| 717 | {"_has_torch_function", THPModule_has_torch_function, METH_O0x0008, nullptr}, | ||||||||
| 718 | {"_has_torch_function_unary", THPModule_has_torch_function_unary, METH_O0x0008, nullptr}, | ||||||||
| 719 | {"_has_torch_function_variadic", MAYBE_WRAP_FASTCALL(THPModule_has_torch_function_variadic)(PyCFunction)(void(*)(void))THPModule_has_torch_function_variadic, MAYBE_METH_FASTCALL0x0080, nullptr}, | ||||||||
| 720 | {nullptr, nullptr, 0, nullptr} | ||||||||
| 721 | }; | ||||||||
| 722 | |||||||||
| 723 | bool THCPDoubleStorage_init(PyObject *module); | ||||||||
| 724 | bool THCPFloatStorage_init(PyObject *module); | ||||||||
| 725 | bool THCPHalfStorage_init(PyObject *module); | ||||||||
| 726 | bool THCPLongStorage_init(PyObject *module); | ||||||||
| 727 | bool THCPIntStorage_init(PyObject *module); | ||||||||
| 728 | bool THCPShortStorage_init(PyObject *module); | ||||||||
| 729 | bool THCPCharStorage_init(PyObject *module); | ||||||||
| 730 | bool THCPByteStorage_init(PyObject *module); | ||||||||
| 731 | bool THCPBoolStorage_init(PyObject *module); | ||||||||
| 732 | bool THCPBFloat16Storage_init(PyObject *module); | ||||||||
| 733 | bool THCPComplexDoubleStorage_init(PyObject *module); | ||||||||
| 734 | bool THCPComplexFloatStorage_init(PyObject *module); | ||||||||
| 735 | |||||||||
| 736 | void THCPStream_init(PyObject *module); | ||||||||
| 737 | void THCPEvent_init(PyObject *module); | ||||||||
| 738 | void THCPGraph_init(PyObject *module); | ||||||||
| 739 | |||||||||
| 740 | #ifdef USE_CUDA | ||||||||
| 741 | PyMethodDef* THCPModule_methods(); | ||||||||
| 742 | namespace torch { namespace cuda { | ||||||||
| 743 | |||||||||
| 744 | void initModule(PyObject *module); | ||||||||
| 745 | |||||||||
| 746 | }} // namespace torch::cuda | ||||||||
| 747 | #endif | ||||||||
| 748 | |||||||||
| 749 | #ifdef USE_MLCOMPUTE | ||||||||
| 750 | PyMethodDef* ModuleMLC_methods(); | ||||||||
| 751 | namespace torch { namespace mlc { | ||||||||
| 752 | |||||||||
| 753 | void initBindings(PyObject *module); | ||||||||
| 754 | |||||||||
| 755 | }} // namespace torch::mlc | ||||||||
| 756 | #endif | ||||||||
| 757 | |||||||||
| 758 | bool THDPDoubleStorage_init(PyObject *module); | ||||||||
| 759 | bool THDPFloatStorage_init(PyObject *module); | ||||||||
| 760 | // TODO: fix | ||||||||
| 761 | //bool THDPHalfStorage_init(PyObject *module); | ||||||||
| 762 | bool THDPLongStorage_init(PyObject *module); | ||||||||
| 763 | bool THDPIntStorage_init(PyObject *module); | ||||||||
| 764 | bool THDPShortStorage_init(PyObject *module); | ||||||||
| 765 | bool THDPCharStorage_init(PyObject *module); | ||||||||
| 766 | bool THDPByteStorage_init(PyObject *module); | ||||||||
| 767 | bool THDPBoolStorage_init(PyObject *module); | ||||||||
| 768 | bool THDPBFloat16Storage_init(PyObject *module); | ||||||||
| 769 | bool THDPComplexDoubleStorage_init(PyObject *module); | ||||||||
| 770 | bool THDPComplexFloatStorage_init(PyObject *module); | ||||||||
| 771 | |||||||||
| 772 | // NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables) | ||||||||
| 773 | static std::vector<PyMethodDef> methods; | ||||||||
| 774 | |||||||||
| 775 | // In Python we can't use the trick of C10_LOG_API_USAGE_ONCE | ||||||||
| 776 | // Guaranteed to be invoked from Python under GIL, no locking on map needed | ||||||||
| 777 | static void LogAPIUsageOnceFromPython(const std::string& event) { | ||||||||
| 778 | static std::unordered_set<std::string> seen; | ||||||||
| 779 | if (!seen.count(event)) { | ||||||||
| 780 | seen.insert(event); | ||||||||
| 781 | c10::LogAPIUsage(event); | ||||||||
| 782 | } | ||||||||
| 783 | } | ||||||||
| 784 | |||||||||
| 785 | // Weak reference to tensor, used to test a tensor isn't leaked | ||||||||
| 786 | class WeakTensorRef { | ||||||||
| 787 | c10::weak_intrusive_ptr<c10::TensorImpl> weakref_; | ||||||||
| 788 | |||||||||
| 789 | public: | ||||||||
| 790 | WeakTensorRef(const at::Tensor& t): | ||||||||
| 791 | weakref_(t.getIntrusivePtr()) { | ||||||||
| 792 | } | ||||||||
| 793 | |||||||||
| 794 | bool expired() { | ||||||||
| 795 | return weakref_.expired(); | ||||||||
| 796 | } | ||||||||
| 797 | }; | ||||||||
| 798 | |||||||||
| 799 | extern "C" | ||||||||
| 800 | #ifdef _WIN32 | ||||||||
| 801 | __declspec(dllexport) | ||||||||
| 802 | #endif | ||||||||
| 803 | TORCH_API__attribute__((__visibility__("default"))) PyObject* initModule(); | ||||||||
| 804 | // separate decl and defn for msvc error C2491 | ||||||||
| 805 | PyObject* initModule() { | ||||||||
| 806 | HANDLE_TH_ERRORStry { torch::PyWarningHandler __enforce_warning_buffer; try { | ||||||||
| 807 | at::internal::lazy_init_num_threads(); | ||||||||
| 808 | |||||||||
| 809 | C10_LOG_API_USAGE_ONCE("torch.python.import")__attribute__((__unused__)) static bool logFlag8 = ::c10::detail ::LogAPIUsageFakeReturn("torch.python.import");; | ||||||||
| 810 | |||||||||
| 811 | // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) | ||||||||
| 812 | #define ASSERT_TRUE(cmd)if (!(cmd)) return nullptr if (!(cmd)) return nullptr | ||||||||
| 813 | |||||||||
| 814 | THPUtils_addPyMethodDefs(methods, TorchMethods); | ||||||||
| 815 | THPUtils_addPyMethodDefs(methods, DataLoaderMethods); | ||||||||
| 816 | THPUtils_addPyMethodDefs(methods, torch::autograd::python_functions()); | ||||||||
| 817 | THPUtils_addPyMethodDefs(methods, torch::multiprocessing::python_functions()); | ||||||||
| 818 | #ifdef USE_CUDA | ||||||||
| 819 | THPUtils_addPyMethodDefs(methods, THCPModule_methods()); | ||||||||
| 820 | #endif | ||||||||
| 821 | #ifdef USE_MLCOMPUTE | ||||||||
| 822 | THPUtils_addPyMethodDefs(methods, ModuleMLC_methods()); | ||||||||
| 823 | #endif | ||||||||
| 824 | #if defined(USE_DISTRIBUTED1) && defined(USE_C10D1) | ||||||||
| 825 | THPUtils_addPyMethodDefs(methods, torch::distributed::c10d::python_functions()); | ||||||||
| 826 | #ifndef _WIN32 | ||||||||
| 827 | THPUtils_addPyMethodDefs(methods, torch::distributed::rpc::python_functions()); | ||||||||
| 828 | THPUtils_addPyMethodDefs( | ||||||||
| 829 | methods, torch::distributed::autograd::python_functions()); | ||||||||
| 830 | THPUtils_addPyMethodDefs(methods, torch::distributed::rpc::testing::python_functions()); | ||||||||
| 831 | #endif | ||||||||
| 832 | #endif | ||||||||
| 833 | |||||||||
| 834 | static struct PyModuleDef torchmodule = { | ||||||||
| 835 | PyModuleDef_HEAD_INIT{ { 1, __null }, __null, 0, __null, }, | ||||||||
| 836 | "torch._C", | ||||||||
| 837 | nullptr, | ||||||||
| 838 | -1, | ||||||||
| 839 | methods.data() | ||||||||
| 840 | }; | ||||||||
| 841 | ASSERT_TRUE(module = PyModule_Create(&torchmodule))if (!(module = PyModule_Create2(&torchmodule, 1013))) return nullptr; | ||||||||
| |||||||||
| 842 | ASSERT_TRUE(THPGenerator_init(module))if (!(THPGenerator_init(module))) return nullptr; | ||||||||
| 843 | ASSERT_TRUE(THPException_init(module))if (!(THPException_init(module))) return nullptr; | ||||||||
| 844 | THPSize_init(module); | ||||||||
| 845 | THPDtype_init(module); | ||||||||
| 846 | THPDTypeInfo_init(module); | ||||||||
| 847 | THPLayout_init(module); | ||||||||
| 848 | THPMemoryFormat_init(module); | ||||||||
| 849 | THPQScheme_init(module); | ||||||||
| 850 | THPDevice_init(module); | ||||||||
| 851 | THPStream_init(module); | ||||||||
| 852 | ASSERT_TRUE(THPVariable_initModule(module))if (!(THPVariable_initModule(module))) return nullptr; | ||||||||
| 853 | ASSERT_TRUE(THPFunction_initModule(module))if (!(THPFunction_initModule(module))) return nullptr; | ||||||||
| 854 | ASSERT_TRUE(THPEngine_initModule(module))if (!(THPEngine_initModule(module))) return nullptr; | ||||||||
| 855 | // NOTE: We need to be able to access OperatorExportTypes from ONNX for use in | ||||||||
| 856 | // the export side of JIT, so this ONNX init needs to appear before the JIT | ||||||||
| 857 | // init. | ||||||||
| 858 | torch::onnx::initONNXBindings(module); | ||||||||
| 859 | torch::jit::initJITBindings(module); | ||||||||
| 860 | torch::fx::initFx(module); | ||||||||
| 861 | torch::impl::dispatch::initDispatchBindings(module); | ||||||||
| 862 | torch::throughput_benchmark::initThroughputBenchmarkBindings(module); | ||||||||
| 863 | torch::crash_handler::initCrashHandlerBindings(module); | ||||||||
| 864 | torch::autograd::initNNFunctions(module); | ||||||||
| 865 | torch::autograd::initFFTFunctions(module); | ||||||||
| 866 | torch::autograd::initLinalgFunctions(module); | ||||||||
| 867 | torch::autograd::initSpecialFunctions(module); | ||||||||
| 868 | torch::autograd::init_legacy_variable(module); | ||||||||
| 869 | torch::python::init_bindings(module); | ||||||||
| 870 | #ifdef USE_CUDA | ||||||||
| 871 | torch::cuda::initModule(module); | ||||||||
| 872 | #endif | ||||||||
| 873 | #ifdef USE_MLCOMPUTE | ||||||||
| 874 | torch::mlc::init_bindings(module); | ||||||||
| 875 | #endif | ||||||||
| 876 | ASSERT_TRUE(THPDoubleStorage_init(module))if (!(THPDoubleStorage_init(module))) return nullptr; | ||||||||
| 877 | ASSERT_TRUE(THPFloatStorage_init(module))if (!(THPFloatStorage_init(module))) return nullptr; | ||||||||
| 878 | ASSERT_TRUE(THPHalfStorage_init(module))if (!(THPHalfStorage_init(module))) return nullptr; | ||||||||
| 879 | ASSERT_TRUE(THPLongStorage_init(module))if (!(THPLongStorage_init(module))) return nullptr; | ||||||||
| 880 | ASSERT_TRUE(THPIntStorage_init(module))if (!(THPIntStorage_init(module))) return nullptr; | ||||||||
| 881 | ASSERT_TRUE(THPShortStorage_init(module))if (!(THPShortStorage_init(module))) return nullptr; | ||||||||
| 882 | ASSERT_TRUE(THPCharStorage_init(module))if (!(THPCharStorage_init(module))) return nullptr; | ||||||||
| 883 | ASSERT_TRUE(THPByteStorage_init(module))if (!(THPByteStorage_init(module))) return nullptr; | ||||||||
| 884 | ASSERT_TRUE(THPBoolStorage_init(module))if (!(THPBoolStorage_init(module))) return nullptr; | ||||||||
| 885 | ASSERT_TRUE(THPQUInt8Storage_init(module))if (!(THPQUInt8Storage_init(module))) return nullptr; | ||||||||
| 886 | ASSERT_TRUE(THPQInt8Storage_init(module))if (!(THPQInt8Storage_init(module))) return nullptr; | ||||||||
| 887 | ASSERT_TRUE(THPQInt32Storage_init(module))if (!(THPQInt32Storage_init(module))) return nullptr; | ||||||||
| 888 | ASSERT_TRUE(THPQUInt4x2Storage_init(module))if (!(THPQUInt4x2Storage_init(module))) return nullptr; | ||||||||
| 889 | ASSERT_TRUE(THPBFloat16Storage_init(module))if (!(THPBFloat16Storage_init(module))) return nullptr; | ||||||||
| 890 | ASSERT_TRUE(THPComplexDoubleStorage_init(module))if (!(THPComplexDoubleStorage_init(module))) return nullptr; | ||||||||
| 891 | ASSERT_TRUE(THPComplexFloatStorage_init(module))if (!(THPComplexFloatStorage_init(module))) return nullptr; | ||||||||
| 892 | |||||||||
| 893 | #ifdef USE_CUDA | ||||||||
| 894 | // This will only initialise base classes and attach them to library namespace | ||||||||
| 895 | // They won't be ready for real usage until importing cuda module, that will | ||||||||
| 896 | // complete the process (but it defines Python classes before calling back into | ||||||||
| 897 | // C, so these lines have to execute first).. | ||||||||
| 898 | ASSERT_TRUE(THCPDoubleStorage_init(module))if (!(THCPDoubleStorage_init(module))) return nullptr; | ||||||||
| 899 | ASSERT_TRUE(THCPFloatStorage_init(module))if (!(THCPFloatStorage_init(module))) return nullptr; | ||||||||
| 900 | ASSERT_TRUE(THCPHalfStorage_init(module))if (!(THCPHalfStorage_init(module))) return nullptr; | ||||||||
| 901 | ASSERT_TRUE(THCPLongStorage_init(module))if (!(THCPLongStorage_init(module))) return nullptr; | ||||||||
| 902 | ASSERT_TRUE(THCPIntStorage_init(module))if (!(THCPIntStorage_init(module))) return nullptr; | ||||||||
| 903 | ASSERT_TRUE(THCPShortStorage_init(module))if (!(THCPShortStorage_init(module))) return nullptr; | ||||||||
| 904 | ASSERT_TRUE(THCPCharStorage_init(module))if (!(THCPCharStorage_init(module))) return nullptr; | ||||||||
| 905 | ASSERT_TRUE(THCPByteStorage_init(module))if (!(THCPByteStorage_init(module))) return nullptr; | ||||||||
| 906 | ASSERT_TRUE(THCPBoolStorage_init(module))if (!(THCPBoolStorage_init(module))) return nullptr; | ||||||||
| 907 | ASSERT_TRUE(THCPBFloat16Storage_init(module))if (!(THCPBFloat16Storage_init(module))) return nullptr; | ||||||||
| 908 | ASSERT_TRUE(THCPComplexDoubleStorage_init(module))if (!(THCPComplexDoubleStorage_init(module))) return nullptr; | ||||||||
| 909 | ASSERT_TRUE(THCPComplexFloatStorage_init(module))if (!(THCPComplexFloatStorage_init(module))) return nullptr; | ||||||||
| 910 | |||||||||
| 911 | THCPStream_init(module); | ||||||||
| 912 | THCPEvent_init(module); | ||||||||
| 913 | THCPGraph_init(module); | ||||||||
| 914 | #endif | ||||||||
| 915 | |||||||||
| 916 | auto set_module_attr = [&](const char* name, PyObject* v, bool incref = true) { | ||||||||
| 917 | // PyModule_AddObject steals reference | ||||||||
| 918 | if (incref
| ||||||||
| 919 | Py_INCREF(v)_Py_INCREF(((PyObject*)(v))); | ||||||||
| 920 | } | ||||||||
| 921 | return PyModule_AddObject(module, name, v) == 0; | ||||||||
| 922 | }; | ||||||||
| 923 | |||||||||
| 924 | #if defined(USE_CUDNN) || defined(__HIP_PLATFORM_HCC__) | ||||||||
| 925 | PyObject *has_cudnn = Py_True((PyObject *) &_Py_TrueStruct); | ||||||||
| 926 | #else | ||||||||
| 927 | PyObject *has_cudnn = Py_False((PyObject *) &_Py_FalseStruct); | ||||||||
| 928 | #endif | ||||||||
| 929 | ASSERT_TRUE(set_module_attr("has_cudnn", has_cudnn))if (!(set_module_attr("has_cudnn", has_cudnn))) return nullptr; | ||||||||
| 930 | |||||||||
| 931 | // force ATen to initialize because it handles | ||||||||
| 932 | // setting up TH Errors so that they throw C++ exceptions | ||||||||
| 933 | at::init(); | ||||||||
| 934 | |||||||||
| 935 | // Automatically translate errors thrown from pybind11 functions | ||||||||
| 936 | py::register_exception_translator([](std::exception_ptr e) { // NOLINT | ||||||||
| 937 | if (torch::crash_handler::is_enabled_on_exceptions()) { | ||||||||
| 938 | torch::crash_handler::write_minidump(); | ||||||||
| 939 | } | ||||||||
| 940 | |||||||||
| 941 | try { | ||||||||
| 942 | if (e) { | ||||||||
| 943 | std::rethrow_exception(e); | ||||||||
| 944 | } | ||||||||
| 945 | } | ||||||||
| 946 | CATCH_TH_ERRORS()catch (python_error & e) { e.restore(); ; } catch (const c10 ::IndexError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_IndexError , torch::processErrorMsg(msg)); ; } catch (const c10::ValueError & e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e .what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); ; } catch (const c10::TypeError & e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e .what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); ; } catch (const c10::NotImplementedError & e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e .what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); ; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what () : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); ; } catch (torch::PyTorchError & e) { auto msg = torch::processErrorMsg(e.what()); PyErr_SetString (e.python_type(), msg); ; } | ||||||||
| 947 | }); | ||||||||
| 948 | |||||||||
| 949 | auto py_module = py::reinterpret_borrow<py::module>(module); | ||||||||
| 950 | py_module.def("_demangle", &c10::demangle); | ||||||||
| 951 | py_module.def("_log_api_usage_once", &LogAPIUsageOnceFromPython); | ||||||||
| 952 | |||||||||
| 953 | py_module.def("vitals_enabled", &at::vitals::torchVitalEnabled); | ||||||||
| 954 | py_module.def("set_vital", [](const std::string &vital, const std::string &attr, const std::string value){ | ||||||||
| 955 | return at::vitals::VitalsAPI.setVital(vital, attr, value); | ||||||||
| 956 | }); | ||||||||
| 957 | py_module.def("read_vitals", [](){ | ||||||||
| 958 | return at::vitals::VitalsAPI.readVitals(); | ||||||||
| 959 | }); | ||||||||
| 960 | |||||||||
| 961 | py_module.def( | ||||||||
| 962 | "init_num_threads", | ||||||||
| 963 | torch::wrap_pybind_function(at::init_num_threads), | ||||||||
| 964 | R"( | ||||||||
| 965 | init_num_threads() | ||||||||
| 966 | |||||||||
| 967 | Initializes the number of parallel threads used on the current thread. | ||||||||
| 968 | |||||||||
| 969 | Call this whenever a new thread is created in order to propagate values from | ||||||||
| 970 | :func:`torch.set_num_threads` onto the new thread. | ||||||||
| 971 | )"); | ||||||||
| 972 | |||||||||
| 973 | ASSERT_TRUE(set_module_attr("has_openmp", at::hasOpenMP() ? Py_True : Py_False))if (!(set_module_attr("has_openmp", at::hasOpenMP() ? ((PyObject *) &_Py_TrueStruct) : ((PyObject *) &_Py_FalseStruct )))) return nullptr; | ||||||||
| 974 | ASSERT_TRUE(set_module_attr("has_mkl", at::hasMKL() ? Py_True : Py_False))if (!(set_module_attr("has_mkl", at::hasMKL() ? ((PyObject *) &_Py_TrueStruct) : ((PyObject *) &_Py_FalseStruct))) ) return nullptr; | ||||||||
| 975 | ASSERT_TRUE(set_module_attr("has_lapack", at::hasLAPACK() ? Py_True : Py_False))if (!(set_module_attr("has_lapack", at::hasLAPACK() ? ((PyObject *) &_Py_TrueStruct) : ((PyObject *) &_Py_FalseStruct )))) return nullptr; | ||||||||
| 976 | |||||||||
| 977 | py_module.def( | ||||||||
| 978 | "_valgrind_supported_platform", [](){ | ||||||||
| 979 | #if defined(USE_VALGRIND1) | ||||||||
| 980 | return true; | ||||||||
| 981 | #else | ||||||||
| 982 | return false; | ||||||||
| 983 | #endif | ||||||||
| 984 | } | ||||||||
| 985 | ); | ||||||||
| 986 | |||||||||
| 987 | py_module.def( | ||||||||
| 988 | "_valgrind_toggle", [](){ | ||||||||
| 989 | #if defined(USE_VALGRIND1) | ||||||||
| 990 | CALLGRIND_TOGGLE_COLLECTdo { (void) __extension__ ({ volatile unsigned long int _zzq_args [6]; volatile unsigned long int _zzq_result; _zzq_args[0] = ( unsigned long int)((VG_USERREQ__TOGGLE_COLLECT)); _zzq_args[1 ] = (unsigned long int)((0)); _zzq_args[2] = (unsigned long int )((0)); _zzq_args[3] = (unsigned long int)((0)); _zzq_args[4] = (unsigned long int)((0)); _zzq_args[5] = (unsigned long int )((0)); __asm__ volatile("rolq $3, %%rdi ; rolq $13, %%rdi\n\t" "rolq $61, %%rdi ; rolq $51, %%rdi\n\t" "xchgq %%rbx,%%rbx" : "=d" (_zzq_result) : "a" (&_zzq_args[0]), "0" (0) : "cc" , "memory" ); _zzq_result; }); } while (0); | ||||||||
| 991 | #else | ||||||||
| 992 | TORCH_CHECK(false, "Valgrind is not supported.")if ((__builtin_expect(static_cast<bool>(!(false)), 0))) { ::c10::detail::torchCheckFail( __func__, "../torch/csrc/Module.cpp" , static_cast<uint32_t>(992), (::c10::detail::torchCheckMsgImpl ( "Expected " "false" " to be true, but got false. " "(Could this error message be improved? If so, " "please report an enhancement request to PyTorch.)", "Valgrind is not supported." ))); }; | ||||||||
| 993 | #endif | ||||||||
| 994 | } | ||||||||
| 995 | ); | ||||||||
| 996 | |||||||||
| 997 | py_module.def( | ||||||||
| 998 | "_valgrind_toggle_and_dump_stats", [](){ | ||||||||
| 999 | #if defined(USE_VALGRIND1) | ||||||||
| 1000 | // NB: If we don't toggle collect around dump stats, callgrind_annotate | ||||||||
| 1001 | // won't process the results correctly. Specifically, | ||||||||
| 1002 | // `callgrind_annotate --inclusive=no` will be almost completely empty. | ||||||||
| 1003 | CALLGRIND_TOGGLE_COLLECTdo { (void) __extension__ ({ volatile unsigned long int _zzq_args [6]; volatile unsigned long int _zzq_result; _zzq_args[0] = ( unsigned long int)((VG_USERREQ__TOGGLE_COLLECT)); _zzq_args[1 ] = (unsigned long int)((0)); _zzq_args[2] = (unsigned long int )((0)); _zzq_args[3] = (unsigned long int)((0)); _zzq_args[4] = (unsigned long int)((0)); _zzq_args[5] = (unsigned long int )((0)); __asm__ volatile("rolq $3, %%rdi ; rolq $13, %%rdi\n\t" "rolq $61, %%rdi ; rolq $51, %%rdi\n\t" "xchgq %%rbx,%%rbx" : "=d" (_zzq_result) : "a" (&_zzq_args[0]), "0" (0) : "cc" , "memory" ); _zzq_result; }); } while (0); | ||||||||
| 1004 | CALLGRIND_DUMP_STATSdo { (void) __extension__ ({ volatile unsigned long int _zzq_args [6]; volatile unsigned long int _zzq_result; _zzq_args[0] = ( unsigned long int)((VG_USERREQ__DUMP_STATS)); _zzq_args[1] = ( unsigned long int)((0)); _zzq_args[2] = (unsigned long int)(( 0)); _zzq_args[3] = (unsigned long int)((0)); _zzq_args[4] = ( unsigned long int)((0)); _zzq_args[5] = (unsigned long int)(( 0)); __asm__ volatile("rolq $3, %%rdi ; rolq $13, %%rdi\n\t" "rolq $61, %%rdi ; rolq $51, %%rdi\n\t" "xchgq %%rbx,%%rbx" : "=d" (_zzq_result) : "a" (&_zzq_args[0]), "0" (0) : "cc" , "memory" ); _zzq_result; }); } while (0); | ||||||||
| 1005 | #else | ||||||||
| 1006 | TORCH_CHECK(false, "Valgrind is not supported.")if ((__builtin_expect(static_cast<bool>(!(false)), 0))) { ::c10::detail::torchCheckFail( __func__, "../torch/csrc/Module.cpp" , static_cast<uint32_t>(1006), (::c10::detail::torchCheckMsgImpl ( "Expected " "false" " to be true, but got false. " "(Could this error message be improved? If so, " "please report an enhancement request to PyTorch.)", "Valgrind is not supported." ))); }; | ||||||||
| 1007 | #endif | ||||||||
| 1008 | } | ||||||||
| 1009 | ); | ||||||||
| 1010 | |||||||||
| 1011 | py::class_<WeakTensorRef>(py_module, "_WeakTensorRef") | ||||||||
| 1012 | .def(py::init([](py::object tensor) { | ||||||||
| 1013 | return WeakTensorRef(THPVariable_Unpack(tensor.ptr())); | ||||||||
| 1014 | })) | ||||||||
| 1015 | .def("expired", &WeakTensorRef::expired); | ||||||||
| 1016 | |||||||||
| 1017 | #ifdef USE_CUDA | ||||||||
| 1018 | PyObject *has_cuda = Py_True((PyObject *) &_Py_TrueStruct); | ||||||||
| 1019 | #else | ||||||||
| 1020 | PyObject *has_cuda = Py_False((PyObject *) &_Py_FalseStruct); | ||||||||
| 1021 | #endif | ||||||||
| 1022 | #ifdef USE_MLCOMPUTE | ||||||||
| 1023 | PyObject *has_mlc = Py_True((PyObject *) &_Py_TrueStruct); | ||||||||
| 1024 | #else | ||||||||
| 1025 | PyObject *has_mlc = Py_False((PyObject *) &_Py_FalseStruct); | ||||||||
| 1026 | #endif | ||||||||
| 1027 | |||||||||
| 1028 | ASSERT_TRUE(set_module_attr("has_mlc", has_mlc))if (!(set_module_attr("has_mlc", has_mlc))) return nullptr; | ||||||||
| 1029 | |||||||||
| 1030 | ASSERT_TRUE(set_module_attr("has_cuda", has_cuda))if (!(set_module_attr("has_cuda", has_cuda))) return nullptr; | ||||||||
| 1031 | |||||||||
| 1032 | ASSERT_TRUE(set_module_attr("has_mkldnn", at::hasMKLDNN() ? Py_True : Py_False))if (!(set_module_attr("has_mkldnn", at::hasMKLDNN() ? ((PyObject *) &_Py_TrueStruct) : ((PyObject *) &_Py_FalseStruct )))) return nullptr; | ||||||||
| 1033 | |||||||||
| 1034 | #ifdef _GLIBCXX_USE_CXX11_ABI1 | ||||||||
| 1035 | ASSERT_TRUE(set_module_attr("_GLIBCXX_USE_CXX11_ABI", _GLIBCXX_USE_CXX11_ABI ? Py_True : Py_False))if (!(set_module_attr("_GLIBCXX_USE_CXX11_ABI", 1 ? ((PyObject *) &_Py_TrueStruct) : ((PyObject *) &_Py_FalseStruct )))) return nullptr; | ||||||||
| 1036 | #else | ||||||||
| 1037 | ASSERT_TRUE(set_module_attr("_GLIBCXX_USE_CXX11_ABI", Py_False))if (!(set_module_attr("_GLIBCXX_USE_CXX11_ABI", ((PyObject *) &_Py_FalseStruct)))) return nullptr; | ||||||||
| 1038 | #endif | ||||||||
| 1039 | |||||||||
| 1040 | // See note [Pybind11 ABI constants] | ||||||||
| 1041 | #define SET_STR_DEFINE(name) \ | ||||||||
| 1042 | ASSERT_TRUE(set_module_attr("_" # name, THPUtils_packString(name)))if (!(set_module_attr("_" # name, THPUtils_packString(name))) ) return nullptr | ||||||||
| 1043 | |||||||||
| 1044 | #ifdef PYBIND11_COMPILER_TYPE"_clang" | ||||||||
| 1045 | SET_STR_DEFINE(PYBIND11_COMPILER_TYPE"_clang"); | ||||||||
| 1046 | #else | ||||||||
| 1047 | ASSERT_TRUE(set_module_attr("_" C10_STRINGIZE(PYBIND11_COMPILER_TYPE), Py_None))if (!(set_module_attr("_" "\"_clang\"", (&_Py_NoneStruct) ))) return nullptr; | ||||||||
| 1048 | #endif | ||||||||
| 1049 | |||||||||
| 1050 | #ifdef PYBIND11_STDLIB"_libstdcpp" | ||||||||
| 1051 | SET_STR_DEFINE(PYBIND11_STDLIB"_libstdcpp"); | ||||||||
| 1052 | #else | ||||||||
| 1053 | ASSERT_TRUE(set_module_attr("_" C10_STRINGIZE(PYBIND11_STDLIB), Py_None))if (!(set_module_attr("_" "\"_libstdcpp\"", (&_Py_NoneStruct )))) return nullptr; | ||||||||
| 1054 | #endif | ||||||||
| 1055 | |||||||||
| 1056 | #ifdef PYBIND11_BUILD_ABI"_cxxabi" "1002" | ||||||||
| 1057 | SET_STR_DEFINE(PYBIND11_BUILD_ABI"_cxxabi" "1002"); | ||||||||
| 1058 | #else | ||||||||
| 1059 | ASSERT_TRUE(set_module_attr("_" C10_STRINGIZE(PYBIND11_BUILD_ABI), Py_None))if (!(set_module_attr("_" "\"_cxxabi\" \"1002\"", (&_Py_NoneStruct )))) return nullptr; | ||||||||
| 1060 | #endif | ||||||||
| 1061 | #undef SET_STR_DEFINE | ||||||||
| 1062 | |||||||||
| 1063 | const auto& defaultGenerator = at::detail::getDefaultCPUGenerator(); | ||||||||
| 1064 | THPDefaultCPUGenerator = (THPGenerator*)THPGenerator_initDefaultGenerator(defaultGenerator); | ||||||||
| 1065 | // This reference is meant to be given away, so no need to incref here. | ||||||||
| 1066 | ASSERT_TRUE(set_module_attr("default_generator", (PyObject*)THPDefaultCPUGenerator, /* incref= */ false))if (!(set_module_attr("default_generator", (PyObject*)THPDefaultCPUGenerator , false))) return nullptr; | ||||||||
| 1067 | ASSERT_TRUE(set_module_attr("DisableTorchFunction", (PyObject*)THPModule_DisableTorchFunctionType(), /* incref= */ false))if (!(set_module_attr("DisableTorchFunction", (PyObject*)THPModule_DisableTorchFunctionType (), false))) return nullptr; | ||||||||
| 1068 | torch::set_disabled_torch_function_impl(PyObject_GetAttrString(module, "_disabled_torch_function_impl")); | ||||||||
| 1069 | ASSERT_TRUE(torch::disabled_torch_function_impl() != nullptr)if (!(torch::disabled_torch_function_impl() != nullptr)) return nullptr; | ||||||||
| 1070 | return module; | ||||||||
| 1071 | END_HANDLE_TH_ERRORS} catch(...) { __enforce_warning_buffer.set_in_exception(); throw ; } } catch (python_error & e) { e.restore(); return nullptr ; } catch (const c10::IndexError& e) { auto msg = torch:: get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace (); PyErr_SetString(PyExc_IndexError, torch::processErrorMsg( msg)); return nullptr; } catch (const c10::ValueError& e) { auto msg = torch::get_cpp_stacktraces_enabled() ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_ValueError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::TypeError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_TypeError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::NotImplementedError& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_NotImplementedError , torch::processErrorMsg(msg)); return nullptr; } catch (const c10::Error& e) { auto msg = torch::get_cpp_stacktraces_enabled () ? e.what() : e.what_without_backtrace(); PyErr_SetString(PyExc_RuntimeError , torch::processErrorMsg(msg)); return nullptr; } catch (torch ::PyTorchError & e) { auto msg = torch::processErrorMsg(e .what()); PyErr_SetString(e.python_type(), msg); return nullptr ; } catch (const std::exception& e) { auto msg = torch::processErrorMsg (e.what()); PyErr_SetString(PyExc_RuntimeError, msg); return nullptr ; } | ||||||||
| 1072 | } | ||||||||
| 1073 | |||||||||
| 1074 | // Checks that the _C shared library isn't initialized multiple times. This | ||||||||
| 1075 | // can happen if the same csrc files are compiled into multiple shared | ||||||||
| 1076 | // libraries. | ||||||||
| 1077 | inline void pytorch_duplicate_guard() { | ||||||||
| 1078 | static int initialized = 0; | ||||||||
| 1079 | if (initialized) { | ||||||||
| 1080 | fprintf(stderrstderr, "pytorch: _C shared library re-initialized\n"); | ||||||||
| 1081 | abort(); | ||||||||
| 1082 | } | ||||||||
| 1083 | initialized = 1; | ||||||||
| 1084 | ;} | ||||||||
| 1085 | |||||||||
| 1086 | struct call_duplicate_guard { | ||||||||
| 1087 | call_duplicate_guard() { pytorch_duplicate_guard(); } | ||||||||
| 1088 | }; | ||||||||
| 1089 | |||||||||
| 1090 | // NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables) | ||||||||
| 1091 | static call_duplicate_guard _call_duplicate_guard; |
| 1 | #pragma once | |||
| 2 | ||||
| 3 | #include <torch/csrc/python_headers.h> | |||
| 4 | #include <stdexcept> | |||
| 5 | #include <string> | |||
| 6 | #include <torch/csrc/utils/object_ptr.h> | |||
| 7 | #include <torch/csrc/utils/pybind.h> | |||
| 8 | ||||
| 9 | // Utilities for handling Python strings. Note that PyString, when defined, is | |||
| 10 | // the same as PyBytes. | |||
| 11 | ||||
| 12 | // Returns true if obj is a bytes/str or unicode object | |||
| 13 | // As of Python 3.6, this does not require the GIL | |||
| 14 | inline bool THPUtils_checkString(PyObject* obj) { | |||
| 15 | return PyBytes_Check(obj)((((((PyObject*)(obj))->ob_type))->tp_flags & ((1UL << 27))) != 0) || PyUnicode_Check(obj)((((((PyObject*)(obj))->ob_type))->tp_flags & ((1UL << 28))) != 0); | |||
| 16 | } | |||
| 17 | ||||
| 18 | // Unpacks PyBytes (PyString) or PyUnicode as std::string | |||
| 19 | // PyBytes are unpacked as-is. PyUnicode is unpacked as UTF-8. | |||
| 20 | // NOTE: this method requires the GIL | |||
| 21 | inline std::string THPUtils_unpackString(PyObject* obj) { | |||
| 22 | if (PyBytes_Check(obj)((((((PyObject*)(obj))->ob_type))->tp_flags & ((1UL << 27))) != 0)) { | |||
| 23 | size_t size = PyBytes_GET_SIZE(obj)((((PyVarObject*)(obj))->ob_size)); | |||
| 24 | return std::string(PyBytes_AS_STRING(obj)((((PyBytesObject *)(obj))->ob_sval)), size); | |||
| 25 | } | |||
| 26 | if (PyUnicode_Check(obj)((((((PyObject*)(obj))->ob_type))->tp_flags & ((1UL << 28))) != 0)) { | |||
| 27 | // NOLINTNEXTLINE(cppcoreguidelines-init-variables) | |||
| 28 | Py_ssize_t size; | |||
| 29 | const char* data = PyUnicode_AsUTF8AndSize(obj, &size); | |||
| 30 | if (!data) { | |||
| 31 | throw std::runtime_error("error unpacking string as utf-8"); | |||
| 32 | } | |||
| 33 | return std::string(data, (size_t)size); | |||
| 34 | } | |||
| 35 | throw std::runtime_error("unpackString: expected bytes or unicode object"); | |||
| 36 | } | |||
| 37 | ||||
| 38 | // Unpacks PyBytes (PyString) or PyUnicode as c10::string_view | |||
| 39 | // PyBytes are unpacked as-is. PyUnicode is unpacked as UTF-8. | |||
| 40 | // NOTE: If `obj` is destroyed, then the non-owning c10::string_view will | |||
| 41 | // become invalid. If the string needs to be accessed at any point after | |||
| 42 | // `obj` is destroyed, then the c10::string_view should be copied into | |||
| 43 | // a std::string, or another owning object, and kept alive. For an example, | |||
| 44 | // look at how IValue and autograd nodes handle c10::string_view arguments. | |||
| 45 | // NOTE: this method requires the GIL | |||
| 46 | inline c10::string_view THPUtils_unpackStringView(PyObject* obj) { | |||
| 47 | if (PyBytes_Check(obj)((((((PyObject*)(obj))->ob_type))->tp_flags & ((1UL << 27))) != 0)) { | |||
| 48 | size_t size = PyBytes_GET_SIZE(obj)((((PyVarObject*)(obj))->ob_size)); | |||
| 49 | return c10::string_view(PyBytes_AS_STRING(obj)((((PyBytesObject *)(obj))->ob_sval)), size); | |||
| 50 | } | |||
| 51 | if (PyUnicode_Check(obj)((((((PyObject*)(obj))->ob_type))->tp_flags & ((1UL << 28))) != 0)) { | |||
| 52 | // NOLINTNEXTLINE(cppcoreguidelines-init-variables) | |||
| 53 | Py_ssize_t size; | |||
| 54 | const char* data = PyUnicode_AsUTF8AndSize(obj, &size); | |||
| 55 | if (!data) { | |||
| 56 | throw std::runtime_error("error unpacking string as utf-8"); | |||
| 57 | } | |||
| 58 | return c10::string_view(data, (size_t)size); | |||
| 59 | } | |||
| 60 | throw std::runtime_error("unpackString: expected bytes or unicode object"); | |||
| 61 | } | |||
| 62 | ||||
| 63 | inline PyObject* THPUtils_packString(const char* str) { | |||
| 64 | return PyUnicode_FromString(str); | |||
| ||||
| 65 | } | |||
| 66 | ||||
| 67 | inline PyObject* THPUtils_packString(const std::string& str) { | |||
| 68 | return PyUnicode_FromStringAndSize(str.c_str(), str.size()); | |||
| 69 | } | |||
| 70 | ||||
| 71 | inline PyObject* THPUtils_internString(const std::string& str) { | |||
| 72 | return PyUnicode_InternFromString(str.c_str()); | |||
| 73 | } | |||
| 74 | ||||
| 75 | // Precondition: THPUtils_checkString(obj) must be true | |||
| 76 | inline bool THPUtils_isInterned(PyObject* obj) { | |||
| 77 | return PyUnicode_CHECK_INTERNED(obj)(((PyASCIIObject *)(obj))->state.interned); | |||
| 78 | } | |||
| 79 | ||||
| 80 | // Precondition: THPUtils_checkString(obj) must be true | |||
| 81 | inline void THPUtils_internStringInPlace(PyObject** obj) { | |||
| 82 | PyUnicode_InternInPlace(obj); | |||
| 83 | } | |||
| 84 | ||||
| 85 | /* | |||
| 86 | * Reference: https://github.com/numpy/numpy/blob/f4c497c768e0646df740b647782df463825bfd27/numpy/core/src/common/get_attr_string.h#L42 | |||
| 87 | * | |||
| 88 | * Stripped down version of PyObject_GetAttrString, | |||
| 89 | * avoids lookups for None, tuple, and List objects, | |||
| 90 | * and doesn't create a PyErr since this code ignores it. | |||
| 91 | * | |||
| 92 | * This can be much faster then PyObject_GetAttrString where | |||
| 93 | * exceptions are not used by caller. | |||
| 94 | * | |||
| 95 | * 'obj' is the object to search for attribute. | |||
| 96 | * | |||
| 97 | * 'name' is the attribute to search for. | |||
| 98 | * | |||
| 99 | * Returns a py::object wrapping the return value. If the attribute lookup failed | |||
| 100 | * the value will be NULL. | |||
| 101 | * | |||
| 102 | */ | |||
| 103 | ||||
| 104 | // NOLINTNEXTLINE(clang-diagnostic-unused-function) | |||
| 105 | static py::object PyObject_FastGetAttrString(PyObject *obj, const char *name) | |||
| 106 | { | |||
| 107 | PyTypeObject *tp = Py_TYPE(obj)(((PyObject*)(obj))->ob_type); | |||
| 108 | PyObject *res = (PyObject *)nullptr; | |||
| 109 | ||||
| 110 | /* Attribute referenced by (char *)name */ | |||
| 111 | if (tp->tp_getattr != nullptr) { | |||
| 112 | // This is OK per https://bugs.python.org/issue39620 | |||
| 113 | res = (*tp->tp_getattr)(obj, const_cast<char*>(name)); | |||
| 114 | if (res == nullptr) { | |||
| 115 | PyErr_Clear(); | |||
| 116 | } | |||
| 117 | } | |||
| 118 | /* Attribute referenced by (PyObject *)name */ | |||
| 119 | else if (tp->tp_getattro != nullptr) { | |||
| 120 | auto w = py::reinterpret_steal<py::object>( | |||
| 121 | THPUtils_internString(name)); | |||
| 122 | if (w.ptr() == nullptr) { | |||
| 123 | return py::object(); | |||
| 124 | } | |||
| 125 | res = (*tp->tp_getattro)(obj, w.ptr()); | |||
| 126 | if (res == nullptr) { | |||
| 127 | PyErr_Clear(); | |||
| 128 | } | |||
| 129 | } | |||
| 130 | return py::reinterpret_steal<py::object>(res); | |||
| 131 | } |
| 1 | #ifndef PyUnicode_FromString |
| 2 | struct _object; |
| 3 | typedef struct _object PyObject; |
| 4 | PyObject* clang_analyzer_PyObject_New_Reference(); |
| 5 | PyObject *PyUnicode_FromString(const char *u) { |
| 6 | return clang_analyzer_PyObject_New_Reference(); |
| 7 | } |
| 8 | #else |
| 9 | #warning "API PyUnicode_FromString is defined as a macro." |
| 10 | #endif |
| 1 | void _Py_INCREF(PyObject *op) { ++op->ob_refcnt; } |