ref: 57f5681987bf1efd2718c712eceebf4b8e45054f
parent: 93d6c3975a8b7094b8cf1e6083a6e38bc190b9cc
author: Jean-Marc Valin <jmvalin@amazon.com>
date: Mon Jan 24 11:22:29 EST 2022
Add swish activation support
--- a/dnn/nnet.c
+++ b/dnn/nnet.c
@@ -46,7 +46,18 @@
#define SOFTMAX_HACK
+#define MAX_ACTIVATIONS (4096)
+static OPUS_INLINE void vec_swish(float *y, const float *x, int N)
+{
+ int i;
+ float tmp[MAX_ACTIVATIONS];
+ celt_assert(N <= MAX_ACTIVATIONS);
+ vec_sigmoid(tmp, x, N);
+ for (i=0;i<N;i++)
+ y[i] = x[i]*tmp[i];
+}
+
static OPUS_INLINE float relu(float x)
{
return x < 0 ? 0 : x;
@@ -75,6 +86,8 @@
vec_sigmoid(output, input, N);
} else if (activation == ACTIVATION_TANH) {
vec_tanh(output, input, N);
+ } else if (activation == ACTIVATION_SWISH) {
+ vec_swish(output, input, N);
} else if (activation == ACTIVATION_RELU) {
for (i=0;i<N;i++)
output[i] = relu(input[i]);
--- a/dnn/nnet.h
+++ b/dnn/nnet.h
@@ -36,6 +36,7 @@
#define ACTIVATION_TANH 2
#define ACTIVATION_RELU 3
#define ACTIVATION_SOFTMAX 4
+#define ACTIVATION_SWISH 5
typedef struct {
const float *bias;
--
⑨