Use CUDA in LayerNorm gradient computation

I don't know how I could miss this.
pull/2570/head
Adrià Arrufat 2 years ago committed by Davis E. King
parent 3a267db577
commit 49314c12d9

@ -687,7 +687,11 @@ namespace dlib { namespace tt
tensor& beta_grad tensor& beta_grad
) )
{ {
#ifdef DLIB_USE_CUDA
cuda::layer_normalize_gradient(eps, gradient_input, means, invstds, src, gamma, src_grad, gamma_grad, beta_grad);
#else
cpu::layer_normalize_gradient(eps, gradient_input, means, invstds, src, gamma, src_grad, gamma_grad, beta_grad); cpu::layer_normalize_gradient(eps, gradient_input, means, invstds, src, gamma, src_grad, gamma_grad, beta_grad);
#endif
} }
// ---------------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------------

Loading…
Cancel
Save