Files
pytorch/test/cpp_extensions/cuda_extension.cpp
Edward Yang 73a97387c1 Replace AT_CHECK with TORCH_CHECK [shard 9/10]
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/20435

Reviewed By: jerryzh168

Differential Revision: D15318877

fbshipit-source-id: 4d83571187ea14a604fef83ac355d328b46d93e1
2019-05-15 08:05:59 -07:00

20 lines
738 B
C++

#include <torch/extension.h>
// Declare the function from cuda_extension.cu. It will be compiled
// separately with nvcc and linked with the object file of cuda_extension.cpp
// into one shared library.
void sigmoid_add_cuda(const float* x, const float* y, float* output, int size);
torch::Tensor sigmoid_add(torch::Tensor x, torch::Tensor y) {
TORCH_CHECK(x.type().is_cuda(), "x must be a CUDA tensor");
TORCH_CHECK(y.type().is_cuda(), "y must be a CUDA tensor");
auto output = torch::zeros_like(x);
sigmoid_add_cuda(
x.data<float>(), y.data<float>(), output.data<float>(), output.numel());
return output;
}
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
m.def("sigmoid_add", &sigmoid_add, "sigmoid(x) + sigmoid(y)");
}