ref: b1f94b1e9229ffc801e6190775f563f3398ab27a
parent: 60d67b11126e87ec41ae09b34aa4a133a3af5f7c
author: Jean-Marc Valin <jmvalin@amazon.com>
date: Mon Jul 24 17:31:37 EDT 2023
Add compute_generic_dense() And missing prototypes
--- a/dnn/nnet.c
+++ b/dnn/nnet.c
@@ -102,6 +102,12 @@
}
}
+void compute_generic_dense(const LinearLayer *layer, float *output, const float *input, int activation)
+{
+ compute_linear(layer, output, input);
+ compute_activation(output, output, layer->nb_outputs, activation);
+}
+
#define MAX_RNN_NEURONS_ALL IMAX(IMAX(MAX_RNN_NEURONS, PLC_MAX_RNN_NEURONS), DRED_MAX_RNN_NEURONS)
--- a/dnn/nnet.h
+++ b/dnn/nnet.h
@@ -131,6 +131,11 @@
int dim;
} EmbeddingLayer;
+void compute_linear(const LinearLayer *linear, float *out, const float *in);
+void compute_generic_dense(const LinearLayer *layer, float *output, const float *input, int activation);
+void compute_generic_gru(const LinearLayer *input_weights, const LinearLayer *recurrent_weights, float *state, const float *in);
+void compute_generic_conv1d(const LinearLayer *layer, float *output, float *mem, const float *input, int input_size, int activation);
+
void compute_activation(float *output, const float *input, int N, int activation);
void _lpcnet_compute_dense(const DenseLayer *layer, float *output, const float *input);
--
⑨