From 49314c12d9c945627f50bac34ed0379e088b5f07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adri=C3=A0=20Arrufat?= <1671644+arrufat@users.noreply.github.com> Date: Sun, 10 Apr 2022 22:31:22 +0900 Subject: [PATCH] Use CUDA in LayerNorm gradient computation I don't know how I could miss this. --- dlib/cuda/tensor_tools.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dlib/cuda/tensor_tools.cpp b/dlib/cuda/tensor_tools.cpp index 5c4f3bed4..fc78d73e7 100644 --- a/dlib/cuda/tensor_tools.cpp +++ b/dlib/cuda/tensor_tools.cpp @@ -687,7 +687,11 @@ namespace dlib { namespace tt tensor& beta_grad ) { +#ifdef DLIB_USE_CUDA + cuda::layer_normalize_gradient(eps, gradient_input, means, invstds, src, gamma, src_grad, gamma_grad, beta_grad); +#else cpu::layer_normalize_gradient(eps, gradient_input, means, invstds, src, gamma, src_grad, gamma_grad, beta_grad); +#endif } // ----------------------------------------------------------------------------------------