1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374 |
- #ifndef ACTIVATION_LAYER_HPP
- #define ACTIVATION_LAYER_HPP
- #include "layer.hpp"
- #include "math.hpp"
- namespace Layer{
- /** Enumeration type for the different implemented actiovation map */
- enum ActivationMap{
- Sigmoid /**< \f$x\mapsto \frac{1}{1+e^{-x}}\f$ */
- };
- /** Activation map.*/
- template<ActivationMap A> Real activation_map(Real);
- /** Derivative of activation map.*/
- template<ActivationMap A> Real activation_diff_map(Real);
- /**
- * Class for activation layer.
- * The Output vector is obtained by applying activation map to each entry of the input vector.
- */
- template<ActivationMap A> class ActivationLayer:public Layer{
- public:
- ActivationLayer(const size_t);
- ~ActivationLayer(){};
- /** \f$y[i]:=\alpha(x[i])\f$ where \f$\alpha\f$ is the activation map.*/
- Vector feed_forward(Vector x) override;
- /**Null.*/
- void init_nabla() override {};
- /** \f$d[i]:=\alpha'(x[i])\times \e[i]\f$ where \f$\alpha\f$ is the activation map and $e$ the difference output vector.*/
- Vector back_propagation(Vector e) override;
- /**Null.*/
- void update(Real eta) override{};
- };
- template<ActivationMap A>
- inline
- ActivationLayer<A>::ActivationLayer(size_t n):Layer(n,n){
- }
- template<ActivationMap A>
- inline Vector
- ActivationLayer<A>::feed_forward(Vector x_){
- x=x_;
- for(size_t i=0;i<n;++i){
- y[i]=activation_map<A>(x[i]);
- }
- return y;
- }
- template<ActivationMap A>
- inline Vector
- ActivationLayer<A>::back_propagation(Vector e){
- for(size_t i=0;i<n;++i){
- d[i]=activation_diff_map<A>(x[i])*e[i];
- }
- return d;
- }
- template<>
- inline Real
- activation_map<Sigmoid>(Real x){
- return 1.0/(1.0+exp(-x));
- }
- template<>
- inline Real
- activation_diff_map<Sigmoid>(Real x){
- Real t=activation_map<Sigmoid>(x);
- return t*(1.0-t);
- }
- }
- #endif
|