Skip to content

Commit 758e0ab

Browse files
committed
Updates for snapshots
Signed-off-by: Alex Black <[email protected]>
1 parent a4594ba commit 758e0ab

File tree

10 files changed

+41
-29
lines changed

10 files changed

+41
-29
lines changed

dl4j-cuda-specific-examples/pom.xml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929

3030
<name>DeepLearning4j CUDA special examples</name>
3131
<properties>
32-
<nd4j.backend>nd4j-cuda-10.2-platform</nd4j.backend>
32+
<nd4j.backend>nd4j-cuda-10.2</nd4j.backend>
3333
</properties>
3434

3535
<repositories>
@@ -58,22 +58,22 @@
5858
<dependencies>
5959
<dependency>
6060
<groupId>org.nd4j</groupId>
61-
<artifactId>nd4j-cuda-9.2-platform</artifactId>
61+
<artifactId>nd4j-cuda-9.2</artifactId>
6262
<version>${nd4j.version}</version>
6363
</dependency>
6464
<dependency>
6565
<groupId>org.nd4j</groupId>
66-
<artifactId>nd4j-cuda-10.0-platform</artifactId>
66+
<artifactId>nd4j-cuda-10.0</artifactId>
6767
<version>${nd4j.version}</version>
6868
</dependency>
6969
<dependency>
7070
<groupId>org.nd4j</groupId>
71-
<artifactId>nd4j-cuda-10.1-platform</artifactId>
71+
<artifactId>nd4j-cuda-10.1</artifactId>
7272
<version>${nd4j.version}</version>
7373
</dependency>
7474
<dependency>
7575
<groupId>org.nd4j</groupId>
76-
<artifactId>nd4j-cuda-10.2-platform</artifactId>
76+
<artifactId>nd4j-cuda-10.2</artifactId>
7777
<version>${nd4j.version}</version>
7878
</dependency>
7979
</dependencies>

dl4j-examples/pom.xml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,27 +54,27 @@
5454
<dependencies>
5555
<dependency>
5656
<groupId>org.nd4j</groupId>
57-
<artifactId>nd4j-native-platform</artifactId>
57+
<artifactId>nd4j-native</artifactId>
5858
<version>${nd4j.version}</version>
5959
</dependency>
6060
<dependency>
6161
<groupId>org.nd4j</groupId>
62-
<artifactId>nd4j-cuda-9.2-platform</artifactId>
62+
<artifactId>nd4j-cuda-9.2</artifactId>
6363
<version>${nd4j.version}</version>
6464
</dependency>
6565
<dependency>
6666
<groupId>org.nd4j</groupId>
67-
<artifactId>nd4j-cuda-10.0-platform</artifactId>
67+
<artifactId>nd4j-cuda-10.0</artifactId>
6868
<version>${nd4j.version}</version>
6969
</dependency>
7070
<dependency>
7171
<groupId>org.nd4j</groupId>
72-
<artifactId>nd4j-cuda-10.1-platform</artifactId>
72+
<artifactId>nd4j-cuda-10.1</artifactId>
7373
<version>${nd4j.version}</version>
7474
</dependency>
7575
<dependency>
7676
<groupId>org.nd4j</groupId>
77-
<artifactId>nd4j-cuda-10.2-platform</artifactId>
77+
<artifactId>nd4j-cuda-10.2</artifactId>
7878
<version>${nd4j.version}</version>
7979
</dependency>
8080
</dependencies>

dl4j-examples/src/main/java/org/deeplearning4j/examples/modelimport/keras/DeepMojiAttentionLayer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ public void setNIn(InputType inputType, boolean override) {
8383
public SDVariable defineLayer(SameDiff sd, SDVariable layerInput, Map<String, SDVariable> paramTable, SDVariable mask) {
8484
SDVariable weights = paramTable.get(DefaultParamInitializer.WEIGHT_KEY);
8585

86-
SDVariable logits = sd.tensorMmul(layerInput, weights, new int[][] { {2}, {0}});
86+
SDVariable logits = sd.tensorMmul(layerInput, weights, new int[]{2}, new int[]{0});
8787
SDVariable reshapedLogits = sd.reshape(logits, layerInput.getShape()[0], layerInput.getShape()[1]);
8888
SDVariable ai = sd.math().exp(reshapedLogits);
8989
SDVariable aiSum = sd.sum(ai, 1);

dl4j-examples/src/main/java/org/deeplearning4j/examples/samediff/dl4j/Ex3LambdaVertex.java

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,7 @@ public static void validateLayer() throws Exception {
119119
.seed(12345)
120120
.activation(Activation.TANH)
121121
.convolutionMode(ConvolutionMode.Same)
122+
.dataType(DataType.DOUBLE)
122123
.graphBuilder()
123124
.addInputs("in")
124125
//Add some standard DL4J layers:
@@ -161,8 +162,19 @@ public static void validateLayer() throws Exception {
161162
double max_relative_error = 1e-5; //Maximum relative error allowable for each parameter
162163
double min_absolute_error = 1e-8; //Minimum absolute error, to avoid failures on 0 vs 1e-30, for example.
163164

164-
boolean gradOk = GradientCheckUtil.checkGradients(net, gradient_check_epsilon, max_relative_error, min_absolute_error, print,
165-
return_on_first_failure, new INDArray[]{testFeatures}, new INDArray[]{testLabels});
165+
// boolean gradOk = GradientCheckUtil.checkGradients(net, gradient_check_epsilon, max_relative_error, min_absolute_error, print,
166+
// return_on_first_failure, new INDArray[]{testFeatures}, new INDArray[]{testLabels});
167+
168+
boolean gradOk = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig()
169+
.net(net)
170+
.epsilon(gradient_check_epsilon)
171+
.maxRelError(max_relative_error)
172+
.minAbsoluteError(min_absolute_error)
173+
.print(print ? GradientCheckUtil.PrintMode.ALL : GradientCheckUtil.PrintMode.FAILURES_ONLY)
174+
.exitOnFirstError(return_on_first_failure)
175+
.inputs(new INDArray[]{testFeatures})
176+
.labels(new INDArray[]{testLabels}));
177+
166178
if(!gradOk){
167179
throw new IllegalStateException("Gradient check failed");
168180
}

dl4j-examples/src/main/java/org/deeplearning4j/examples/samediff/tfimport/SameDiffTransferLearningExample.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ We want to replace the reshapes (unneeded and the wrong shape) and the softmax (
186186
SDVariable outputs = sd1.nn().softmax("Output", input);
187187

188188
// we need a loss to train on, the tensorflow model doesn't come with one
189-
SDVariable loss = sd1.loss().softmaxCrossEntropy("Loss", labels, input);
189+
SDVariable loss = sd1.loss().softmaxCrossEntropy("Loss", labels, input, null);
190190

191191
logits.close();
192192

dl4j-examples/src/main/java/org/deeplearning4j/examples/samediff/training/SameDiffMNISTTrainingExample.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ static SameDiff makeMNISTNet(){
7070
SDVariable z = sd.nn().linear("z", flat, wOut, bOut);
7171

7272
// softmax crossentropy loss function
73-
SDVariable loss = sd.loss().softmaxCrossEntropy("loss", label, z);
73+
SDVariable loss = sd.loss().softmaxCrossEntropy("loss", label, z, null);
7474

7575
//noinspection unused
7676
SDVariable out = sd.nn().softmax("out", z, 1);

dl4j-examples/src/main/java/org/deeplearning4j/examples/samediff/training/SameDiffTrainingExample.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public static void main(String[] args) throws Exception {
6767
SDVariable softmax = sd.nn().softmax("softmax", z1);
6868

6969
//Define loss function:
70-
SDVariable diff = sd.f().squaredDifference(softmax, label);
70+
SDVariable diff = sd.math.squaredDifference(softmax, label);
7171
SDVariable lossMse = diff.mean();
7272

7373
sd.setLossVariables(lossMse);

lstm-hdfs/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@
7777
</dependency>
7878
<dependency>
7979
<groupId>org.nd4j</groupId>
80-
<artifactId>nd4j-native-platform</artifactId>
80+
<artifactId>nd4j-native</artifactId>
8181
<version>${nd4j.version}</version>
8282
<scope>test</scope>
8383
</dependency>
@@ -88,7 +88,7 @@
8888
</dependency>
8989
<dependency>
9090
<groupId>org.nd4j</groupId>
91-
<artifactId>nd4j-cuda-9.2-platform</artifactId>
91+
<artifactId>nd4j-cuda-9.2</artifactId>
9292
<version>${nd4j.version}</version>
9393
<scope>test</scope>
9494
</dependency>
@@ -153,7 +153,7 @@
153153
<!-- ND4J -->
154154
<dependency>
155155
<groupId>org.nd4j</groupId>
156-
<artifactId>nd4j-native-platform</artifactId>
156+
<artifactId>nd4j-native</artifactId>
157157
<version>${nd4j.version}</version>
158158
</dependency>
159159
<dependency>

pom.xml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,11 +34,11 @@
3434

3535

3636
<java.version>1.8</java.version>
37-
<nd4j.version>1.0.0-beta6</nd4j.version>
38-
<dl4j.version>1.0.0-beta6</dl4j.version>
39-
<datavec.version>1.0.0-beta6</datavec.version>
40-
<arbiter.version>1.0.0-beta6</arbiter.version>
41-
<rl4j.version>1.0.0-beta6</rl4j.version>
37+
<nd4j.version>1.0.0-SNAPSHOT</nd4j.version>
38+
<dl4j.version>1.0.0-SNAPSHOT</dl4j.version>
39+
<datavec.version>1.0.0-SNAPSHOT</datavec.version>
40+
<arbiter.version>1.0.0-SNAPSHOT</arbiter.version>
41+
<rl4j.version>1.0.0-SNAPSHOT</rl4j.version>
4242

4343
<!-- Scala binary version: DL4J's Spark and UI functionality are released with both Scala 2.10 and 2.11 support -->
4444
<scala.binary.version>2.11</scala.binary.version>

rl4j-examples/pom.xml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -59,27 +59,27 @@
5959
<dependencies>
6060
<dependency>
6161
<groupId>org.nd4j</groupId>
62-
<artifactId>nd4j-native-platform</artifactId>
62+
<artifactId>nd4j-native</artifactId>
6363
<version>${nd4j.version}</version>
6464
</dependency>
6565
<dependency>
6666
<groupId>org.nd4j</groupId>
67-
<artifactId>nd4j-cuda-9.2-platform</artifactId>
67+
<artifactId>nd4j-cuda-9.2</artifactId>
6868
<version>${nd4j.version}</version>
6969
</dependency>
7070
<dependency>
7171
<groupId>org.nd4j</groupId>
72-
<artifactId>nd4j-cuda-10.0-platform</artifactId>
72+
<artifactId>nd4j-cuda-10.0</artifactId>
7373
<version>${nd4j.version}</version>
7474
</dependency>
7575
<dependency>
7676
<groupId>org.nd4j</groupId>
77-
<artifactId>nd4j-cuda-10.1-platform</artifactId>
77+
<artifactId>nd4j-cuda-10.1</artifactId>
7878
<version>${nd4j.version}</version>
7979
</dependency>
8080
<dependency>
8181
<groupId>org.nd4j</groupId>
82-
<artifactId>nd4j-cuda-10.2-platform</artifactId>
82+
<artifactId>nd4j-cuda-10.2</artifactId>
8383
<version>${nd4j.version}</version>
8484
</dependency>
8585
</dependencies>

0 commit comments

Comments
 (0)