diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java index f1849aa96..bf3b1fd38 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java @@ -26,16 +26,17 @@ import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.Convolution3D; -import org.deeplearning4j.nn.conf.layers.ConvolutionLayer; +import org.deeplearning4j.nn.workspace.ArrayType; +import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; +import org.nd4j.linalg.api.ops.DynamicCustomOp; import org.nd4j.linalg.api.ops.Op; import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastCopyOp; -import org.nd4j.linalg.api.ops.impl.layers.convolution.LegacyPooling2D; +import org.nd4j.linalg.api.ops.impl.layers.convolution.MaxPooling2D; +import org.nd4j.linalg.api.ops.impl.layers.convolution.config.Pooling2DConfig; import org.nd4j.linalg.api.shape.Shape; import org.nd4j.linalg.factory.Nd4j; -import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; -import org.deeplearning4j.nn.workspace.ArrayType; import java.util.Arrays; @@ -624,7 +625,7 @@ public class ConvolutionUtils { INDArray reshaped4d = in.reshape(in.size(0), 1, in.size(1), 1); int[] outSize; - int[] pad; + int[] pad = null; int[] k = new int[]{kernel,1}; int[] s = new int[]{stride, 1}; int[] d = new int[]{dilation, 1}; @@ -638,8 +639,15 @@ public class ConvolutionUtils { INDArray output = Nd4j.createUninitialized(new int[]{(int)in.size(0), 1, outH, 1}, 'c'); - Op op = new LegacyPooling2D(reshaped4d, kernel, 1, stride, 1, padding, 0, dilation, 1, - cm == ConvolutionMode.Same, LegacyPooling2D.Pooling2DType.MAX, 0.0, output); + DynamicCustomOp op = new MaxPooling2D(in, output, Pooling2DConfig.builder() + .kH(k[0]).kW(k[1]) + .sH(s[0]).sW(s[1]) + .pH(pad == null ? 0 : pad[0]).pW(pad == null ? 0 : pad[1]) + .dH(d[0]).dW(d[1]) + .isSameMode(cm== ConvolutionMode.Same) + .isNHWC(false) + .build()); + Nd4j.getExecutioner().exec(op); return output.reshape('c', in.size(0), outH); } @@ -717,10 +725,18 @@ public class ConvolutionUtils { } long[] outArraySize = new long[]{inMask.size(0), inMask.size(1), outSize[0], outSize[1]}; - INDArray outMask = Nd4j.createUninitialized(outArraySize); - Op op = new LegacyPooling2D(inMask, kernel[0], kernel[1], stride[0], stride[1], padding[0], padding[1], dilation[0], dilation[1], - convolutionMode == ConvolutionMode.Same, LegacyPooling2D.Pooling2DType.MAX, 0.0, outMask); - Nd4j.getExecutioner().exec(op); + INDArray outMask = Nd4j.createUninitialized(inMask.dataType(), outArraySize); + + DynamicCustomOp op = new MaxPooling2D(inMask, outMask, Pooling2DConfig.builder() + .kH(k[0]).kW(k[1]) + .sH(s[0]).sW(s[1]) + .pH(p[0]).pW(p[1]) + .dH(d[0]).dW(d[1]) + .isSameMode(convolutionMode == ConvolutionMode.Same) + .isNHWC(false) + .build()); + + Nd4j.exec(op); return outMask; } } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java index 7dcd66530..af7829eab 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java @@ -846,7 +846,6 @@ public class OpValidation { RestoreV2.class, SaveV2.class, ScalarSetValue.class, //Not used in SameDiff (it's a "set to X if less than X" type op, redundant given other ops) - LegacyPooling2D.class, //Deprecated; not used in samediff BinomialDistributionEx.class, //Redundant? //Exclude manual broadcast ops: SameDiff uses auto broadcasting diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java index 78e7e74b2..723b77511 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java @@ -119,7 +119,6 @@ public class ImportClassMapping { org.nd4j.linalg.api.ops.impl.layers.convolution.DepthwiseConv2D.class, org.nd4j.linalg.api.ops.impl.layers.convolution.Im2col.class, org.nd4j.linalg.api.ops.impl.layers.convolution.Im2colBp.class, - org.nd4j.linalg.api.ops.impl.layers.convolution.LegacyPooling2D.class, org.nd4j.linalg.api.ops.impl.layers.convolution.LocalResponseNormalization.class, org.nd4j.linalg.api.ops.impl.layers.convolution.LocalResponseNormalizationDerivative.class, org.nd4j.linalg.api.ops.impl.layers.convolution.MaxPooling2D.class, diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/LegacyPooling2D.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/LegacyPooling2D.java deleted file mode 100644 index 2ef17c0fe..000000000 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/LegacyPooling2D.java +++ /dev/null @@ -1,116 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.api.ops.impl.layers.convolution; - -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.nd4j.autodiff.samediff.SDVariable; -import org.nd4j.imports.NoOpNameFoundException; -import org.nd4j.linalg.api.buffer.DataBuffer; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.BaseTransformFloatOp; -import org.nd4j.linalg.api.ops.BaseTransformOp; -import org.nd4j.linalg.convolution.Convolution; -import org.nd4j.linalg.factory.Nd4j; - -import java.util.List; - - -/** - * Legacy version of the Pooling2D operation - * @deprecated Note: This operation will be removed in a future release - */ -@Deprecated -@Slf4j -public class LegacyPooling2D extends BaseTransformFloatOp { - - public enum Pooling2DType { - MAX, AVG, PNORM, - } - - private int kh, kw, sy, sx, ph, pw, dh, dw; - private Pooling2DType type; - boolean isSameMode; - double extra; - @Getter protected DataBuffer im2colShape; - - public LegacyPooling2D() {} - - public LegacyPooling2D(INDArray x, int kh, int kw, int sy, int sx, int ph, int pw, int dh, int dw, boolean isSameMode, - Pooling2DType type, double extra, INDArray z) { - super(x); - - // FIXME: int csast - int outHeight = Convolution.outputSize((int) x.size(2), kh, sy, ph, dh, isSameMode); - int outWidth = Convolution.outputSize((int) x.size(3), kw, sx, pw, dw, isSameMode); - - this.kh = kh; - this.kw = kw; - this.sy = sy; - this.sx = sx; - this.ph = ph; - this.pw = pw; - this.dh = dh; - this.dw = dw; - this.isSameMode = isSameMode; - this.type = type; - this.z = z; - this.extra = extra; - this.im2colShape = getIm2ColShape(x, kh, kw, outHeight, outWidth); - extraArgs = this.extraArgs(); - } - - @Override - public int opNum() { - return 2; - } - - @Override - public String opName(){ - return "legacypooling2d"; - } - - @Override - public Object[] extraArgs() { - return new Object[] {kh, kw, sy, sx, ph, pw, dh, dw, isSameMode ? 1.0 : 0.0, type.ordinal(), extra}; - } - - private static DataBuffer getIm2ColShape(INDArray img, int kernelHeight, int kernelWidth, int outHeight, int outWidth) { - //number of images - long n = img.size(0); - //number of channels (depth) - long c = img.size(1); - - return Nd4j.getShapeInfoProvider().createShapeInformation(new long[] {n, c, kernelHeight, kernelWidth, outHeight, outWidth}, 'c', img.dataType()).getFirst(); - } - - @Override - public String onnxName() { - throw new NoOpNameFoundException("Not supported"); - } - - @Override - public String tensorflowName() { - throw new NoOpNameFoundException("Not supported"); - } - - @Override - public List doDiff(List f1) { - throw new UnsupportedOperationException("Not supported"); - } - -} diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/MaxPooling2D.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/MaxPooling2D.java index c543ee23b..f996fc29f 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/MaxPooling2D.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/MaxPooling2D.java @@ -18,6 +18,7 @@ package org.nd4j.linalg.api.ops.impl.layers.convolution; import lombok.Builder; import lombok.Getter; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import lombok.val; import onnx.OnnxProto3; @@ -69,6 +70,14 @@ public class MaxPooling2D extends DynamicCustomOp { addArgs(); } + public MaxPooling2D(INDArray input, INDArray output, @NonNull Pooling2DConfig config){ + super(null, new INDArray[]{input}, output == null ? null : new INDArray[]{output}); + config.setType(Pooling2D.Pooling2DType.MAX); + + this.config = config; + addArgs(); + } + @Override public boolean isConfigProperties() { return true; diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/SpecialTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/SpecialTests.java index 4b7ed9990..e3aeb6fbd 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/SpecialTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/SpecialTests.java @@ -32,7 +32,6 @@ import org.nd4j.linalg.api.memory.enums.ResetPolicy; import org.nd4j.linalg.api.memory.enums.SpillPolicy; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.DynamicCustomOp; -import org.nd4j.linalg.api.ops.impl.layers.convolution.LegacyPooling2D; import org.nd4j.linalg.api.ops.impl.reduce.longer.MatchCondition; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.exception.ND4JIllegalStateException; @@ -488,52 +487,6 @@ public class SpecialTests extends BaseNd4jTest { System.out.println(out); } - - @Test - public void legacyPooling2dTest_double(){ - - Nd4j.getRandom().setSeed(12345); - INDArray in = Nd4j.rand(DataType.DOUBLE, new int[]{1,1,3,3}); - INDArray out = Nd4j.create(DataType.DOUBLE, 1,1,2,2).assign(-119); - - Nd4j.getExecutioner().commit(); - - val op = new LegacyPooling2D(in, 2, 2, 1, 1, 0, 0, 1, 1, true, LegacyPooling2D.Pooling2DType.MAX, 0.0, out); - Nd4j.getExecutioner().exec(op); - Nd4j.getExecutioner().commit(); - System.out.println(in); - System.out.println(out); - } - - - @Test - public void legacyPooling2dTes_float(){ - - Nd4j.getRandom().setSeed(12345); - INDArray in = Nd4j.rand(DataType.FLOAT, new int[]{1,1,3,3}); - INDArray out = Nd4j.create(DataType.FLOAT, 1,1,2,2); - - val op = new LegacyPooling2D(in, 2, 2, 1, 1, 0, 0, 1, 1, true, LegacyPooling2D.Pooling2DType.MAX, 0.0, out); - Nd4j.getExecutioner().exec(op); - Nd4j.getExecutioner().commit(); - System.out.println(in); - System.out.println(out); - } - - @Test - public void legacyPooling2dTes_half(){ - - Nd4j.getRandom().setSeed(12345); - INDArray in = Nd4j.rand(DataType.HALF, new int[]{1,1,3,3}); - INDArray out = Nd4j.create(DataType.HALF, 1,1,2,2); - - val op = new LegacyPooling2D(in, 2, 2, 1, 1, 0, 0, 1, 1, true, LegacyPooling2D.Pooling2DType.MAX, 0.0, out); - Nd4j.getExecutioner().exec(op); - Nd4j.getExecutioner().commit(); - System.out.println(in); - System.out.println(out); - } - @Test public void testYoloStyle(){ WorkspaceConfiguration WS_ALL_LAYERS_ACT_CONFIG = WorkspaceConfiguration.builder() diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/YuriiTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/YuriiTests.java deleted file mode 100644 index a8462effb..000000000 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/YuriiTests.java +++ /dev/null @@ -1,87 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.crash; - -import lombok.extern.slf4j.Slf4j; -import lombok.val; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.nd4j.linalg.BaseNd4jTest; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.impl.layers.convolution.LegacyPooling2D; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.factory.Nd4jBackend; - -@Slf4j -@RunWith(Parameterized.class) -public class YuriiTests extends BaseNd4jTest { - public YuriiTests(Nd4jBackend backend) { - super(backend); - } - - @Test - public void legacyPooling2dTest_double(){ - - Nd4j.getRandom().setSeed(12345); - INDArray in = Nd4j.rand(DataType.DOUBLE, new int[]{1,1,3,3}); - INDArray out = Nd4j.create(DataType.DOUBLE, 1,1,2,2).assign(-119); - - Nd4j.getExecutioner().commit(); - - val op = new LegacyPooling2D(in, 2, 2, 1, 1, 0, 0, 1, 1, true, LegacyPooling2D.Pooling2DType.MAX, 0.0, out); - Nd4j.getExecutioner().exec(op); - Nd4j.getExecutioner().commit(); - System.out.println(in); - System.out.println(out); - } - - - @Test - public void legacyPooling2dTes_float(){ - - Nd4j.getRandom().setSeed(12345); - INDArray in = Nd4j.rand(DataType.FLOAT, new int[]{1,1,3,3}); - INDArray out = Nd4j.create(DataType.FLOAT, 1,1,2,2); - - val op = new LegacyPooling2D(in, 2, 2, 1, 1, 0, 0, 1, 1, true, LegacyPooling2D.Pooling2DType.MAX, 0.0, out); - Nd4j.getExecutioner().exec(op); - Nd4j.getExecutioner().commit(); - System.out.println(in); - System.out.println(out); - } - - @Test - public void legacyPooling2dTes_half(){ - - Nd4j.getRandom().setSeed(12345); - INDArray in = Nd4j.rand(DataType.HALF, new int[]{1,1,3,3}); - INDArray out = Nd4j.create(DataType.HALF, 1,1,2,2); - - val op = new LegacyPooling2D(in, 2, 2, 1, 1, 0, 0, 1, 1, true, LegacyPooling2D.Pooling2DType.MAX, 0.0, out); - Nd4j.getExecutioner().exec(op); - Nd4j.getExecutioner().commit(); - System.out.println(in); - System.out.println(out); - } - - @Override - public char ordering() { - return 'c'; - } -} diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpConstructorTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpConstructorTests.java new file mode 100644 index 000000000..baef89a1a --- /dev/null +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpConstructorTests.java @@ -0,0 +1,73 @@ +package org.nd4j.linalg.ops; + +import org.junit.Test; +import org.nd4j.autodiff.functions.DifferentialFunction; +import org.nd4j.autodiff.samediff.SDVariable; +import org.nd4j.linalg.BaseNd4jTest; +import org.nd4j.linalg.factory.Nd4jBackend; +import org.nd4j.linalg.lossfunctions.ILossFunction; +import org.reflections.Reflections; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.util.ClasspathHelper; +import org.reflections.util.ConfigurationBuilder; +import org.reflections.util.FilterBuilder; + +import java.lang.reflect.Constructor; +import java.lang.reflect.Modifier; +import java.util.Set; + +import static org.junit.Assert.assertEquals; + +public class OpConstructorTests extends BaseNd4jTest { + + public OpConstructorTests(Nd4jBackend backend) { + super(backend); + } + + @Test + public void checkForINDArrayConstructors() throws Exception { + /* + Check that all op classes have at least one INDArray or INDArray[] constructor, so they can actually + be used outside of SameDiff + */ + + Reflections f = new Reflections(new ConfigurationBuilder() + .filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix("org.nd4j.*")).exclude("^(?!.*\\.class$).*$")) + .setUrls(ClasspathHelper.forPackage("org.nd4j")).setScanners(new SubTypesScanner())); + + Set> classSet = f.getSubTypesOf(DifferentialFunction.class); + + int count = 0; + for(Class c : classSet){ + if(Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers()) || c == SDVariable.class || ILossFunction.class.isAssignableFrom(c)) + continue; + +// System.out.println(c.getName()); + + Constructor[] constructors = c.getConstructors(); + boolean foundINDArray = false; + for( int i=0; i co = constructors[i]; + String str = co.toGenericString(); //This is a convenience hack for checking - returns strings like "public org.nd4j.linalg.api.ops.impl.reduce.floating.Norm2(org.nd4j.linalg.api.ndarray.INDArray,int...)" + if(str.contains("INDArray") && !str.contains("SameDiff")){ + foundINDArray = true; + break; + } + } + + if(!foundINDArray){ + System.out.println("No INDArray constructor: " + c.getName()); + count++; + } + } + + assertEquals(0, count); + + } + + @Override + public char ordering(){ + return 'c'; + } + +} diff --git a/nd4s/pom.xml b/nd4s/pom.xml index f1841014f..63e5495a7 100644 --- a/nd4s/pom.xml +++ b/nd4s/pom.xml @@ -80,8 +80,9 @@ org.nd4j - nd4j-native-platform + nd4j-native ${nd4j.version} + test ch.qos.logback diff --git a/nd4s/src/main/scala/org/nd4s/Implicits.scala b/nd4s/src/main/scala/org/nd4s/Implicits.scala index e254cd60c..58f4f245a 100644 --- a/nd4s/src/main/scala/org/nd4s/Implicits.scala +++ b/nd4s/src/main/scala/org/nd4s/Implicits.scala @@ -46,8 +46,8 @@ object Implicits { implicit def jfloatColl2INDArray(s: Seq[java.lang.Float]): FloatArray2INDArray = new FloatArray2INDArray(s.map(x => x: Float)(breakOut)) class FloatArray2INDArray(val underlying: Array[Float]) extends AnyVal { - def mkNDArray(shape: Array[Int], ord: NDOrdering = NDOrdering(Nd4j.order()), offset: Int = 0): INDArray = - Nd4j.create(underlying, shape, ord.value, offset) + def mkNDArray(shape: Array[Int], ord: NDOrdering = NDOrdering(Nd4j.order())): INDArray = + Nd4j.create(underlying, shape, ord.value) def asNDArray(shape: Int*): INDArray = Nd4j.create(underlying.toArray, shape.toArray: _*) diff --git a/nd4s/src/main/scala/org/nd4s/NDArrayEvidence.scala b/nd4s/src/main/scala/org/nd4s/NDArrayEvidence.scala index 6cfbb3189..5151fcb34 100644 --- a/nd4s/src/main/scala/org/nd4s/NDArrayEvidence.scala +++ b/nd4s/src/main/scala/org/nd4s/NDArrayEvidence.scala @@ -327,7 +327,7 @@ case object FloatNDArrayEvidence extends RealNDArrayEvidence[Float] { arr.asNDArray(shape: _*) override def create(arr: Array[Float], shape: Array[Int], ordering: NDOrdering, offset: Int): INDArray = - arr.mkNDArray(shape, ordering, offset) + arr.mkNDArray(shape, ordering) override def update(underlying: INDArray, ir: Array[IndexRange], num: Float): INDArray = { if (ir.length == 1 && !ir.head.hasNegative && ir.head