@@ -32,6 +32,7 @@ export default class NeuralNetwork {
32
32
33
33
static get defaults ( ) {
34
34
return {
35
+ leakyReluAlpha : 0.01 ,
35
36
binaryThresh : 0.5 ,
36
37
hiddenLayers : [ 3 ] , // array of ints for the sizes of the hidden layers in the network
37
38
activation : 'sigmoid' // Supported activation types ['sigmoid', 'relu', 'leaky-relu', 'tanh']
@@ -249,7 +250,7 @@ export default class NeuralNetwork {
249
250
250
251
_runInputLeakyRelu ( input ) {
251
252
this . outputs [ 0 ] = input ; // set output state of input layer
252
-
253
+ let alpha = this . leakyReluAlpha ;
253
254
let output = null ;
254
255
for ( let layer = 1 ; layer <= this . outputLayer ; layer ++ ) {
255
256
for ( let node = 0 ; node < this . sizes [ layer ] ; node ++ ) {
@@ -260,7 +261,7 @@ export default class NeuralNetwork {
260
261
sum += weights [ k ] * input [ k ] ;
261
262
}
262
263
//leaky relu
263
- this . outputs [ layer ] [ node ] = ( sum < 0 ? 0 : 0.01 * sum ) ;
264
+ this . outputs [ layer ] [ node ] = ( sum < 0 ? 0 : alpha * sum ) ;
264
265
}
265
266
output = input = this . outputs [ layer ] ;
266
267
}
@@ -557,6 +558,7 @@ export default class NeuralNetwork {
557
558
* @param target
558
559
*/
559
560
_calculateDeltasLeakyRelu ( target ) {
561
+ let alpha = this . leakyReluAlpha ;
560
562
for ( let layer = this . outputLayer ; layer >= 0 ; layer -- ) {
561
563
for ( let node = 0 ; node < this . sizes [ layer ] ; node ++ ) {
562
564
let output = this . outputs [ layer ] [ node ] ;
@@ -572,7 +574,7 @@ export default class NeuralNetwork {
572
574
}
573
575
}
574
576
this . errors [ layer ] [ node ] = error ;
575
- this . deltas [ layer ] [ node ] = output > 0 ? error : 0.01 * error ;
577
+ this . deltas [ layer ] [ node ] = output > 0 ? error : alpha * error ;
576
578
}
577
579
}
578
580
}
@@ -933,6 +935,7 @@ export default class NeuralNetwork {
933
935
*/
934
936
toFunction ( ) {
935
937
const activation = this . activation ;
938
+ const leakyReluAlpha = this . leakyReluAlpha ;
936
939
let needsVar = false ;
937
940
function nodeHandle ( layers , layerNumber , nodeKey ) {
938
941
if ( layerNumber === 0 ) {
@@ -962,7 +965,7 @@ export default class NeuralNetwork {
962
965
}
963
966
case 'leaky-relu' : {
964
967
needsVar = true ;
965
- return `((v=${ result . join ( '' ) } )<0?0:0.01 *v)` ;
968
+ return `((v=${ result . join ( '' ) } )<0?0:${ leakyReluAlpha } *v)` ;
966
969
}
967
970
case 'tanh' :
968
971
return `Math.tanh(${ result . join ( '' ) } )` ;
@@ -988,4 +991,4 @@ export default class NeuralNetwork {
988
991
989
992
return new Function ( 'input' , `${ needsVar ? 'var v;' : '' } return ${ result } ;` ) ;
990
993
}
991
- }
994
+ }
0 commit comments