ref: 5eaa4a504f865e73c0e480fb95113e67f9310ffa
parent: 5e045405739f8b58817a0ae1a97bceb5bd113dcf
author: Jean-Marc Valin <jmvalin@amazon.com>
date: Fri Jul 28 15:13:00 EDT 2023
Add Gated Linear Unit (GLU)
--- a/dnn/nnet.c
+++ b/dnn/nnet.c
@@ -142,6 +142,17 @@
state[i] = h[i];
}
+void compute_gated_activation(const LinearLayer *layer, float *output, const float *input, int activation)
+{
+ int i;
+ float act1[MAX_INPUTS];
+ celt_assert(layer->nb_inputs == layer->nb_outputs);
+ compute_linear(layer, output, input);
+ compute_activation(output, output, layer->nb_outputs, ACTIVATION_SIGMOID);
+ compute_activation(act1, input, layer->nb_outputs, activation);
+ for (i=0;i<layer->nb_outputs;i++) output[i] *= act1[i];
+}
+
void compute_activation(float *output, const float *input, int N, int activation)
{
int i;
--
⑨