From 8f96f71f2bd4f78e58f22b43e814719511b47f65 Mon Sep 17 00:00:00 2001 From: Alex Black Date: Tue, 19 Nov 2019 00:12:59 +1100 Subject: [PATCH] Mist gradient check (#57) Signed-off-by: AlexDBlack --- .../org/deeplearning4j/gradientcheck/GradientCheckTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java index 75a0ad759..d506bb233 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java @@ -142,7 +142,7 @@ public class GradientCheckTests extends BaseDL4JTest { // (a) activation function // (b) Whether to test at random initialization, or after some learning (i.e., 'characteristic mode of operation') // (c) Loss function (with specified output activations) - Activation[] activFns = {Activation.SIGMOID, Activation.TANH}; + Activation[] activFns = {Activation.SIGMOID, Activation.TANH, Activation.MISH}; boolean[] characteristic = {false, true}; //If true: run some backprop steps first LossFunction[] lossFunctions = {LossFunction.MCXENT, LossFunction.MSE};