1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 4731159769089111040

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.05 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.708, -0.024, -0.908 ], [ 1.56, 1.928, -0.648 ], [ 1.92, 0.416, -0.38 ], [ 1.148, 0.064, 0.244 ], [ -0.864, -0.072, -1.176 ], [ -0.536, 0.86, 1.068 ], [ -0.7, -0.012, -0.904 ], [ -0.392, -0.62, 1.168 ], ... ],
    	[ [ -0.02, -0.284, -0.896 ], [ -1.792, 1.156, -0.224 ], [ 1.22, 1.476, 1.756 ], [ -0.852, 0.324, 0.892 ], [ 0.02, 1.136, -1.324 ], [ -1.792, 1.488, -1.436 ], [ 1.984, -0.544, 0.52 ], [ -0.716, 0.196, 1.024 ], ... ],
    	[ [ -0.452, -0.512, 1.38 ], [ -0.212, -0.796, 1.344 ], [ 1.74, 1.688, 0.936 ], [ 0.816, 1.244, -0.728 ], [ -0.016, -1.472, 1.92 ], [ -1.084, -1.5, -0.324 ], [ -1.716, -0.136, 0.772 ], [ 0.268, 1.996, 0.32 ], ... ],
    	[ [ -0.188, 1.144, 0.596 ], [ -0.536, -0.936, 0.336 ], [ -1.036, 1.872, 0.316 ], [ -1.032, 1.312, -0.496 ], [ 0.292, -0.02, -1.504 ], [ -0.904, 1.124, 1.568 ], [ 0.884, 1.872, -0.696 ], [ -1.592, 1.172, -0.484 ], ... ],
    	[ [ -0.624, 0.724, 1.436 ], [ 1.356, -1.676, 1.292 ], [ 1.208, 0.164, -1.048 ], [ 1.588, -0.632, -1.904 ], [ 0.328, 1.832, 0.388 ], [ 1.308, 0.94, 0.076 ], [ -1.82, 0.048, 0.424 ], [ 0.972, -1.36, 1.484 ], ... ],
    	[ [ -0.044, 1.164, -1.208 ], [ -1.416, 1.236, -1.216 ], [ 1.624, -0.172, 0.74 ], [ -1.672, -1.048, 0.96 ], [ 1.164, -1.432, 1.732 ], [ 1.728, -1.712, -1.288 ], [ -0.368, 0.232, -1.7 ], [ 0.824, 1.024, -0.44 ], ... ],
    	[ [ -0.08, 1.06, 1.248 ], [ 0.104, 1.252, 1.576 ], [ 1.628, -0.364, 1.852 ], [ -1.148, -0.7, -1.896 ], [ 1.772, -0.508, -1.22 ], [ -0.812, 0.248, -1.448 ], [ -1.944, 1.9, -1.316 ], [ 0.24, -1.948, -1.484 ], ... ],
    	[ [ 0.368, 1.7, -1.78 ], [ -0.556, 0.24, 1.868 ], [ 1.76, 0.096, 0.716 ], [ -0.836, -1.16, -1.956 ], [ 0.208, 1.472, 0.192 ], [ 1.92, 1.908, 0.492 ], [ 1.196, -0.196, -1.804 ], [ 1.256, 0.02, 1.428 ], ... ],
    	...
    ]
    [
    	[ [ -0.832, 0.44, -0.804 ] ]
    ]
    [
    	[ [ -0.692, -0.192, -1.004 ], [ 0.172, -1.552, 0.912 ], [ 0.288, 0.292, -0.096 ], [ 1.372, 1.988, 1.188 ], [ 1.424, -0.908, -0.476 ], [ -0.732, 1.084, -0.584 ], [ -1.284, -0.944, 0.716 ], [ 0.028, -0.836, 0.748 ], ... ],
    	[ [ 0.98, -1.516, -1.016 ], [ 0.872, -1.644, -0.616 ], [ -0.536, -1.44, -1.516 ], [ -0.296, 1.46, 0.784 ], [ 0.74, -1.116, -1.536 ], [ 0.14, -1.688, -0.284 ], [ 1.776, -0.028, -1.444 ], [ 0.708, 0.76, -1.372 ], ... ],
    	[ [ -1.952, 1.984, -0.328 ], [ 1.956, 1.092, 0.048 ], [ -0.86, -1.796, 0.668 ], [ -0.424, 1.436, -0.564 ], [ -1.508, -0.196, -1.128 ], [ 0.512, 0.84, 0.364 ], [ 1.144, 1.072, 0.696 ], [ 0.06, 0.288, -0.548 ], ... ],
    	[ [ -1.748, 1.364, 1.8 ], [ 1.348, 1.728, -1.22 ], [ 1.324, 0.192, -1.852 ], [ -0.404, -0.944, -0.248 ], [ 1.288, -1.952, -0.136 ], [ -0.928, -0.26, -0.088 ], [ -1.66, 0.76, 0.884 ], [ 1.364, -1.836, -0.3 ], ... ],
    	[ [ -1.908, -1.968, 1.26 ], [ -0.432, 1.06, -1.324 ], [ 0.944, 0.52, 1.96 ], [ 0.08, -0.076, 0.952 ], [ -1.748, 1.796, -0.796 ], [ -0.424, -0.764, -1.88 ], [ -0.8, -1.996, -1.036 ], [ 1.452, -1.568, 0.472 ], ... ],
    	[ [ -1.84, 0.404, -1.116 ], [ -0.46, -0.26, -1.344 ], [ 0.976, -1.724, -1.448 ], [ 1.148, -1.52, 1.84 ], [ 0.14, 1.748, 0.904 ], [ 1.288, -0.376, -1.66 ], [ -1.992, 1.388, -0.12 ], [ 0.804, 0.596, -1.232 ], ... ],
    	[ [ -0.424, -1.888, 1.944 ], [ -1.388, 1.256, 1.308 ], [ -0.572, -1.976, 1.86 ], [ 1.372, 0.136, -1.112 ], [ -0.432, -0.724, -1.156 ], [ -1.4, 1.484, -0.42 ], [ -0.004, -0.14, 0.664 ], [ -1.556, 1.308, -0.976 ], ... ],
    	[ [ 0.224, 1.784, -1.812 ], [ -1.916, 0.312, -0.168 ], [ -1.8, 1.02, 0.568 ], [ -0.636, 0.92, 1.78 ], [ 1.768, 0.008, -0.332 ], [ -1.364, 0.152, -1.392 ], [ -1.916, -0.436, -0.664 ], [ 0.136, -0.16, 1.048 ], ... ],
    	...
    ]
    [
    	[ [ -0.832, 0.44, -0.804 ] ]
    ]
    [
    	[ [ -1.052, -1.592, 0.536 ], [ -1.432, 0.344, 1.056 ], [ -0.98, -1.144, -0.364 ], [ 1.848, -1.6, -0.408 ], [ 0.124, -1.396, -1.428 ], [ 0.808, -1.476, -0.048 ], [ 1.996, -0.168, 1.484 ], [ -1.792, -0.588, 1.052 ], ... ],
    	[ [ -1.976, 1.296, 0.576 ], [ -0.64, -0.736, -0.148 ], [ -1.828, 0.064, -0.288 ], [ -0.404, -0.508, -0.644 ], [ -1.284, 1.324, 1.696 ], [ 1.78, -0.932, -0.944 ], [ 1.412, 0.564, 1.636 ], [ 0.128, 0.196, -1.324 ], ... ],
    	[ [ -0.724, -1.26, -0.596 ], [ -1.868, 1.612, -0.464 ], [ 0.176, -0.296, 1.488 ], [ 0.796, 1.26, -1.384 ], [ -1.412, -0.356, 0.06 ], [ 1.092, -0.8, 1.236 ], [ 1.612, 0.18, 0.716 ], [ -1.376, 0.508, 1.204 ], ... ],
    	[ [ 1.964, 1.956, -1.952 ], [ -0.276, -1.08, 0.696 ], [ 1.912, 0.464, -1.392 ], [ -1.676, -0.092, -1.3 ], [ -1.184, 1.064, -1.952 ], [ 1.772, -0.768, 1.78 ], [ 1.216, 0.648, -1.756 ], [ 1.98, -0.696, 1.596 ], ... ],
    	[ [ 0.364, 0.08, -1.976 ], [ -1.444, -0.704, -0.328 ], [ 1.408, -1.312, -0.008 ], [ -0.684, -0.484, 1.776 ], [ 1.388, 0.112, -1.484 ], [ 1.264, 0.98, 1.48 ], [ -0.068, -0.492, -1.524 ], [ -0.028, 1.58, 0.1 ], ... ],
    	[ [ 0.336, 0.82, -1.588 ], [ 1.28, -1.96, 0.14 ], [ 0.464, -1.032, 1.636 ], [ -0.844, -1.788, -1.976 ], [ -1.984, 1.876, -1.016 ], [ 1.06, -1.168, 1.456 ], [ 1.868, -1.544, -0.672 ], [ 1.696, -0.656, -1.452 ], ... ],
    	[ [ 1.292, -1.512, -1.128 ], [ 0.676, -1.268, -0.012 ], [ 0.08, -0.364, 0.9 ], [ -1.952, -1.28, 1.548 ], [ -1.108, -1.288, 0.908 ], [ -1.768, 1.476, -0.388 ], [ 0.816, 0.568, 0.604 ], [ 1.788, 0.028, 0.792 ], ... ],
    	[ [ 1.0, 1.176, 1.764 ], [ -0.148, -1.244, 0.216 ], [ 1.012, -1.372, 0.98 ], [ -1.668, 0.992, -0.76 ], [ 1.756, 1.16, -0.46 ], [ 1.264, 1.272, -0.576 ], [ -1.056, -1.416, -1.544 ], [ -0.312, 1.648, 1.108 ], ... ],
    	...
    ]
    [
    	[ [ -0.832, 0.44, -0.804 ] ]
    ]
    [
    	[ [ 1.508, 0.264, -0.5 ], [ 1.816, -1.392, -1.288 ], [ 1.812, -1.7, -0.644 ], [ -0.208, 0.692, -1.62 ], [ 0.44, 0.152, -0.46 ], [ 1.436, 0.104, 0.54 ], [ -1.928, -0.084, 1.152 ], [ 0.004, 0.812, 1.828 ], ... ],
    	[ [ 0.224, 1.12, 0.772 ], [ -0.528, -1.84, -0.668 ], [ 1.816, 0.684, 1.312 ], [ -0.288, -1.576, 0.056 ], [ -0.132, 0.808, 0.212 ], [ 0.732, 0.464, -0.956 ], [ 0.944, -1.568, -0.568 ], [ -1.06, -1.108, 0.38 ], ... ],
    	[ [ -0.828, -1.924, -0.944 ], [ 0.684, 1.58, -0.856 ], [ 1.48, -0.54, -1.088 ], [ -0.016, 0.092, -0.344 ], [ -0.54, -0.036, 1.644 ], [ -0.08, -0.376, -1.764 ], [ -0.1, 1.968, -0.224 ], [ 0.096, -0.176, 1.94 ], ... ],
    	[ [ 1.816, 1.98, -1.608 ], [ -0.08, 1.756, 0.428 ], [ -1.644, 1.716, 1.36 ], [ -0.488, -0.324, 0.096 ], [ -0.368, -0.716, 0.308 ], [ -0.22, -0.832, -1.82 ], [ 0.58, 0.956, -0.416 ], [ 0.856, -1.628, 0.436 ], ... ],
    	[ [ -1.388, 0.792, -0.996 ], [ 1.904, -1.38, 0.528 ], [ 0.904, -1.736, -1.452 ], [ -0.592, -1.016, -1.064 ], [ -0.148, 1.268, -0.416 ], [ -0.724, -0.296, -1.192 ], [ -0.572, 1.2, 1.572 ], [ 0.848, -1.388, -0.008 ], ... ],
    	[ [ 0.12, -0.24, 1.724 ], [ 0.116, -0.66, -1.56 ], [ -1.836, -0.444, -0.436 ], [ -0.272, 0.904, 0.996 ], [ -1.784, 1.548, 1.192 ], [ -0.208, 0.384, -0.316 ], [ -0.332, 1.808, -0.68 ], [ 1.768, 1.172, -0.964 ], ... ],
    	[ [ -1.34, 1.156, 1.46 ], [ -1.664, 1.62, -1.676 ], [ -0.244, -1.58, 0.648 ], [ -0.6, 1.492, -0.172 ], [ 1.296, -0.216, -1.752 ], [ -0.66, 1.92, 1.436 ], [ -0.288, -1.084, 1.444 ], [ 1.108, 1.448, 1.528 ], ... ],
    	[ [ 1.86, -0.092, -0.848 ], [ 1.448, -0.168, -1.936 ], [ 0.044, -1.392, 0.48 ], [ 0.444, -0.16, -0.796 ], [ -0.336, 0.956, 0.92 ], [ 1.872, -0.812, 0.372 ], [ 0.856, -1.104, 1.068 ], [ 1.704, -0.3, 0.528 ], ... ],
    	...
    ]
    [
    	[ [ -0.804, 0.44, -0.832 ] ]
    ]
    [
    	[ [ -0.488, -1.924, 0.208 ], [ 0.564, -0.464, -0.304 ], [ 1.196, -0.904, -0.076 ], [ -0.652, -1.272, -1.3 ], [ 1.508, -1.504, 1.988 ], [ -1.68, -0.856, -0.012 ], [ -0.448, 0.076, -0.532 ], [ -0.72, -0.46, 0.468 ], ... ],
    	[ [ 1.08, 1.472, 1.46 ], [ 1.564, -0.232, 1.34 ], [ 1.544, 0.5, -0.236 ], [ -1.024, -1.324, 0.156 ], [ 1.536, 0.128, 1.352 ], [ -1.392, 1.784, 1.984 ], [ 1.524, 1.476, -0.144 ], [ -0.488, 1.836, -0.956 ], ... ],
    	[ [ -0.576, -0.332, 1.192 ], [ 0.284, 1.76, 0.748 ], [ 1.376, -1.516, 0.832 ], [ -1.9, 0.172, 1.28 ], [ -0.656, 1.436, -1.076 ], [ 1.28, 0.7, 1.024 ], [ -0.444, -1.248, -1.744 ], [ 1.088, 0.184, -0.516 ], ... ],
    	[ [ -0.64, 1.988, -1.04 ], [ 0.932, -1.804, -1.828 ], [ 0.252, -1.948, -0.164 ], [ 0.088, -1.932, 1.156 ], [ -0.088, -1.612, 1.404 ], [ -0.816, 0.012, 0.796 ], [ -1.324, 1.28, -1.636 ], [ -1.76, -1.904, 1.452 ], ... ],
    	[ [ -1.16, 1.844, -0.088 ], [ 0.024, 1.884, 1.712 ], [ 0.072, 0.136, -1.164 ], [ -1.424, -0.296, 1.484 ], [ 0.176, -0.588, -1.432 ], [ 1.86, -1.78, 1.052 ], [ 0.472, 0.748, -0.188 ], [ -0.64, 1.588, -1.38 ], ... ],
    	[ [ -0.636, 0.016, 0.716 ], [ 1.772, 1.612, -1.352 ], [ 0.784, -1.004, -1.076 ], [ 0.24, 0.588, 0.808 ], [ 0.08, 1.216, -0.66 ], [ 1.356, 0.572, 1.696 ], [ 0.884, -1.712, -1.58 ], [ 0.496, 1.26, 0.076 ], ... ],
    	[ [ -1.316, 1.624, 1.184 ], [ -1.96, 1.76, -1.924 ], [ -0.076, 0.436, -0.208 ], [ -0.564, 1.848, 1.492 ], [ 1.496, 0.844, -1.828 ], [ 0.732, -0.892, 1.468 ], [ -1.428, 1.84, 0.3 ], [ -1.552, 1.804, -1.988 ], ... ],
    	[ [ 0.96, -1.096, -0.904 ], [ -0.396, 0.908, -0.092 ], [ 1.632, -1.792, -0.816 ], [ 1.672, 0.736, 1.016 ], [ 0.82, 1.384, 0.944 ], [ -1.668, 0.112, 0.412 ], [ -1.632, -1.052, 0.488 ], [ -1.952, 0.352, 0.34 ], ... ],
    	...
    ]
    [
    	[ [ -0.804, 0.44, -0.832 ] ]
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 32.58 seconds (2.327 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 5207147721298
Reset training subject: 5208012206437
Constructing line search parameters: GD
th(0)=3.299861601031111;dx=-0.16895518751408264
New Minimum: 3.299861601031111 > 2.9881399349587268
END: th(2.154434690031884)=2.9881399349587268; dx=-0.12042157337326743 evalInputDelta=0.3117216660723843
Fitness changed from 3.299861601031111 to 2.9881399349587268
Iteration 1 complete. Error: 2.9881399349587268 Total: 6.5104; Orientation: 0.6510; Line Search: 3.8735
th(0)=2.9881399349587268;dx=-0.08582963583914967
New Minimum: 2.9881399349587268 > 2.7130296620099856
END: th(4.641588833612779)=2.7130296620099856; dx=-0.03271178728583193 evalInputDelta=0.27511027294874113
Fitness changed from 2.9881399349587268 to 2.7130296620099856
Iteration 2 complete. Error: 2.7130296620099856 Total: 3.5147; Orientation: 0.7018; Line Search: 2.2609
th(0)=2.7130296620099856;dx=-0.012467456589280763
New Minimum: 2.7130296620099856 > 2.671468239396801
WOLF (strong): th(10.000000000000002)=2.671468239396801; dx=0.0041551720666438054 evalInputDelta=0.04156142261318463
END: th(5.000000000000001)=2.6714706648834876; dx=-0.004156142261318479 evalInputDelta=0.04155899712649802
Fitness changed from 2.7130296620099856 to 2.671468239396801
Iteration 3 complete. Error: 2.671468239396801 Total: 4.5041; Orientation: 0.8224; Line Search: 3.1271
th(0)=2.671468239396801;dx=-0.0013857195427109793
New Minimum: 2.671468239396801 > 2.667257116492409
WOLF (strong): th(10.772173450159421)=2.667257116492409; dx=6.038675007110795E-4 evalInputDelta=0.004211122904392184
New Minimum: 2.667257116492409 > 2.6666836558538636
END: th(5.386086725079711)=2.6666836558538636; dx=-3.9092602099995E-4 evalInputDelta=0.004784583542937426
Fitness changed from 2.671468239396801 to 2.6666836558538636
Iteration 4 complete. Error: 2.6666836558538636 Total: 4.5472; Orientation: 0.6802; Line Search: 3.2309
th(0)=2.6666836558538636;dx=-1.1053888609169518E-4
New Minimum: 2.6666836558538636 > 2.6663888207707602
WOLF (strong): th(11.60397208403195)=2.6663888207707602; dx=5.972265162166205E-5 evalInputDelta=2.948350831033508E-4
New Minimum: 2.6663888207707602 > 2.6662892745459854
END: th(5.801986042015975)=2.6662892745459854; dx=-2.540811723501662E-5 evalInputDelta=3.943813078781666E-4
Fitness changed from 2.6666836558538636 to 2.6662892745459854
Iteration 5 complete. Error: 2.6662892745459854 Total: 5.7497; Orientation: 0.6858; Line Search: 4.5415
th(0)=2.6662892745459854;dx=-6.134398260839758E-6
New Minimum: 2.6662892745459854 > 2.6662713513298595
WOLF (strong): th(12.500000000000004)=2.6662713513298595; dx=3.2666836806373437E-6 evalInputDelta=1.7923216125925734E-5
New Minimum: 2.6662713513298595 > 2.666265623747389
END: th(6.250000000000002)=2.666265623747389; dx=-1.4338572901011853E-6 evalInputDelta=2.3650798596452915E-5
Fitness changed from 2.6662892745459854 to 2.666265623747389
Iteration 6 complete. Error: 2.666265623747389 Total: 4.4023; Orientation: 0.6946; Line Search: 3.1884
th(0)=2.666265623747389;dx=-6.504362770844878E-7
New Minimum: 2.666265623747389 > 2.6662587593945153
END: th(13.465216812699278)=2.6662587593945153; dx=-3.6913183961283276E-7 evalInputDelta=6.864352873670043E-6
Fitness changed from 2.666265623747389 to 2.6662587593945153
Iteration 7 complete. Error: 2.6662587593945153 Total: 3.3411; Orientation: 0.6884; Line Search: 2.1342
Final threshold in iteration 7: 2.6662587593945153 (> 0.0) after 32.571s (< 30.000s)

Returns

    2.6662587593945153

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.5960004740737852, 1.3319940118003568, 0.5719934833208975 ], [ 1.5799998910832367, -1.9519828629078761, 1.2279917343492515 ], [ -1.2959858168151295, 0.2200008663558995, -0.22800065144085452 ], [ 1.7079975061219006, -0.09599929264152289, 0.963996839933148 ], [ -1.8639956039897958, 0.3359981987324796, 1.2639892433896343 ], [ 1.907989185256796, -0.5839936217334578, 1.5239980059142455 ], [ -0.0760027765401834, -1.0679953353723437, -0.1520033013978942 ], [ -0.4039999675857213, 0.1639965380927346, -0.8879908995989483 ], ... ],
    	[ [ -1.9799913640585323, -1.167996095026695, 1.8199880244093958 ], [ 0.6479892029231763, -1.535988109822815, 1.275993394988996 ], [ 1.7799975061219007, -0.18799265008254332, -1.4279859176797134 ], [ 1.4759896975818239, -1.0879937630645005, -1.983987277990994 ], [ 0.35599849543919565, 0.8880010960188429, -1.6079987258054058 ], [ 0.05999179988107529, -1.9719847178778043, -0.10800584535665236 ], [ -0.05199102839730729, 0.03199745674450838, -1.9159892212928231 ], [ -1.5319964166432878, 0.14800021269982971, 1.7599967692676266 ], ... ],
    	[ [ 1.419991711549174, 1.3759916621717814, 1.5039994722238073 ], [ 1.8319909518948223, -0.69600044095624, 1.3280000905471165 ], [ -1.0759875834531558, 0.0920070495889922, -1.8439877019841207 ], [ 1.2919978771158869, -0.5319921554238959, 1.9759880774085365 ], [ -0.35199853660891983, 0.9839891535457838, -0.23199047560582195 ], [ 1.211989838912866, -1.779998762650115, 1.1719934126553762 ], [ -1.568000674240932, 0.5679968914203399, -0.5759940265482552 ], [ 0.7239979654477877, 1.252003286649996, 0.5679989245660191 ], ... ],
    	[ [ 1.9919903512378934, -0.24799385139640134, 1.1839974229236967 ], [ -0.3440008685711147, -1.8599959183628925, 1.0839967162684863 ], [ 0.29599409651050973, 0.8760043996319525, -1.0319940265482552 ], [ 1.395989255922317, 0.7040026859930668, 1.4079916106845896 ], [ 0.8239976297865627, 0.11199941771271779, -1.192001358096065 ], [ 1.6559886729317685, -1.9919862371865071, -1.2639874723211768 ], [ 0.7920003857418841, -1.4199854598657757, -1.1239980898157165 ], [ 0.3199915348853713, -0.2159938690627816, 1.6439906213672948 ], ... ],
    	[ [ -1.3199969466346961, 1.2839975274100297, -1.0919888149660772 ], [ -0.3879923180430666, 1.8119845956196754, 1.6279985359056532 ], [ 1.9599966581356483, 1.0079962730970304, 0.23999433130715028 ], [ 0.1320064099775543, 0.7519938881356948, 0.07599127502336449 ], [ 1.7319937785156647, -0.651989028474589, 1.715994154643348 ], [ -0.03199410234747336, -0.3239944167205698, 1.2959946316356148 ], [ -1.716000479910749, 0.8039966617573965, 1.683994454971812 ], [ 0.6960011983953761, 0.7399907258536274, -1.479986889330628 ], ... ],
    	[ [ 0.8719959338140572, 1.2999994000463375, 0.8319910100276604 ], [ 0.7759902982387529, -1.8159865198485914, 0.9559904270371118 ], [ -1.2679872477919305, -1.7239931447411907, -1.6439894509557662 ], [ -0.9360032711988308, 1.7319877225689824, -1.267990139944597 ], [ 1.43599877810128, -1.4319999992967334, 1.7000001612126374 ], [ 1.5640007037367287, -0.5600050872142497, -1.6319984608097016 ], [ -1.1959963636441473, -1.6719915900997275, -0.8160038843884431 ], [ -0.6039937136871075, 0.7520012020171246, 1.4919914870199278 ], ... ],
    	[ [ 1.4919930365276939, -1.0879905124505316, -0.4519924719067918 ], [ 1.2399949621631428, 0.27200432896643145, 0.9760026698386353 ], [ 1.4320008450677708, -0.6159988863147768, 0.5560057437888014 ], [ 0.7399916408836529, -0.5800005292881414, 0.5119893847206761 ], [ -1.3879860641444532, -0.40800044095624, -1.4719988671364477 ], [ 1.7319887435972894, 1.3679950541167925, 1.3679875827498893 ], [ 0.7159882312722617, 1.6160012550162652, 0.8319905330353934 ], [ -1.1999936606879669, 0.503989171212164, -1.0800017644228113 ], ... ],
    	[ [ -1.5919913640585324, -0.3479909541100379, 1.3879860281084258 ], [ -0.5719999499193411, 1.2639954781099187, -0.9759874193220361 ], [ -1.5839852514909607, -1.2999938337300212, 1.447996786934007 ], [ 0.5919936725173833, -1.6279979323302423, 1.9399828128272174 ], [ -1.1679939433500512, -0.5679909894427985, -0.6639961995130278 ], [ -1.115986611802241, 1.3920022796663207, -0.027997683488970006 ], [ 1.6999977534512245, -1.7759930210765291, -0.2960066403437643 ], [ -1.1799892617592806, 1.8639918565019644, 1.06000164518858 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.08 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -0.23566795100455407, 1.7718052427383926, -0.23214932253879894 ], [ 0.7483314660048974, -1.5121716319698404, 0.423848928489555 ], [ -2.1276542418934685, 0.6598120972939352, -1.032143457300551 ], [ 0.8763290810435613, 0.34381193829651274, 0.15985403407345156 ], [ -2.6956640290681353, 0.7758094296705152, 0.4598464375299378 ], [ 1.0763207601784568, -0.14418239079542217, 0.7198552000545491 ], [ -0.9076712016185227, -0.628184104434308, -0.9561461072575906 ], [ -1.2356683926640606, 0.6038077690307703, -1.6921337054586447 ], ... ],
    	[ [ -2.8116597891368715, -0.7281848640886593, 1.0158452185496993 ], [ -0.18367922215516297, -1.0961768788847794, 0.47185058912929967 ], [ 0.9483290810435614, 0.25181858085549236, -2.23212872353941 ], [ 0.6443212725034846, -0.6481825321264648, -2.7881300838506906 ], [ -0.4756699296391436, 1.3278123269568787, -2.4121415316651023 ], [ -0.771676625197264, -1.5321734869397687, -0.9121486512163488 ], [ -0.8836594534756466, 0.471808687682544, -2.72013202715252 ], [ -2.363664841721627, 0.5878114436378654, 0.9558539634079302 ], ... ],
    	[ [ 0.5883232864708348, 1.815802893109817, 0.6998566663641108 ], [ 1.000322526816483, -0.2561892100182044, 0.5238572846874201 ], [ -1.907656008531495, 0.5318182805270278, -2.648130507843817 ], [ 0.4603294520375476, -0.09218092448586024, 1.17184527154884 ], [ -1.183666961687259, 1.4238003844838194, -1.0361332814655184 ], [ 0.3803214138345268, -1.3401875317120795, 0.3678506067956797 ], [ -2.3996690993192713, 1.0078081223583757, -1.3801368324079517 ], [ -0.10767045963055155, 1.6918145175880315, -0.23614388129367736 ], ... ],
    	[ [ 1.160321926159554, 0.1918173795416343, 0.37985461706400026 ], [ -1.175669293649454, -1.420184687424857, 0.2798539104087898 ], [ -0.5356743285678296, 1.3158156305699882, -1.8361368324079517 ], [ 0.5643208308439778, 1.1438139169311023, 0.6038488048248931 ], [ -0.007670795291776611, 0.5518106486507535, -1.9961441639557613 ], [ 0.8243202478534293, -1.5521750062484716, -2.068130278180873 ], [ -0.03966803933645513, -0.9801742289277401, -1.928140895675413 ], [ -0.511676890192968, 0.22381736187525406, 0.8398478155075983 ], ... ],
    	[ [ -2.1516653717130354, 1.7238087583480652, -1.8961316208257737 ], [ -1.219660743121406, 2.251795826557711, 0.8238557300459568 ], [ 1.128328233057309, 1.4478075040350662, -0.5641484745525461 ], [ -0.699662015100785, 1.1918051190737304, -0.728151530836332 ], [ 0.9003253534373254, -0.2121777975365533, 0.9118513487836515 ], [ -0.8636625274258126, 0.11581681421746587, 0.49185182577591835 ], [ -2.5476689049890884, 1.2438078926954321, 0.8798516491121156 ], [ -0.13566722668296316, 1.179801956791663, -2.2841296951903245 ], ... ],
    	[ [ 0.04032750873571789, 1.739810630984373, 0.027848204167963964 ], [ -0.055678126839586395, -1.3761752889105558, 0.15184762117741535 ], [ -2.09965567287027, -1.2841819138031552, -2.4481322568154624 ], [ -1.7676716962771701, 2.171798953507018, -2.0721329458042934 ], [ 0.6043303530229407, -0.9921887683586977, 0.895857355352941 ], [ 0.7323322786583895, -0.12019385627621404, -2.436141266669398 ], [ -2.0276647887224866, -1.232180359161692, -1.6201466902481396 ], [ -1.435662138765447, 1.1918124329551603, 0.6878486811602313 ], ... ],
    	[ [ 0.6603246114493546, -0.6481792815124959, -1.2561352777664883 ], [ 0.4083265370848035, 0.7118155599044671, 0.17185986397893882 ], [ 0.6003324199894315, -0.1761876553767412, -0.24813706207089503 ], [ -0.09167678419468639, -0.14018929835010574, -0.2921534211390203 ], [ -2.2196544892227923, 0.03181078998179565, -2.276141672996144 ], [ 0.9003203185189501, 1.807806285054828, 0.5638447768901929 ], [ -0.11568019380607752, 2.0558124859543008, 0.027847727175696946 ], [ -2.031662085766306, 0.9438004021501997, -1.8841445702825077 ], ... ],
    	[ [ -2.4236597891368716, 0.09182027682799776, 0.5838432222487293 ], [ -1.4036683749976804, 1.7038067090479543, -1.7801302251817326 ], [ -2.4156536765693, -0.8601826027919856, 0.6438539810743105 ], [ -0.23967475256095594, -1.1881867013922065, 1.135840006967521 ], [ -1.9996623684283905, -0.12817975850476282, -1.4681390053727243 ], [ -1.9476550368805803, 1.8318135106043565, -0.8321404893486665 ], [ 0.8683293283728852, -1.3361817901384936, -1.1001494462034607 ], [ -2.01165768683762, 2.30380308744, 0.25585883932888365 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 38.38 seconds (2.109 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 5240036823476
Reset training subject: 5240559195402
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=3.299861601031111}, derivative=-0.16895518751408262}
New Minimum: 3.299861601031111 > 3.2998616010142157
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=3.2998616010142157}, derivative=-0.16895518751182992}, evalInputDelta = -1.6895373988745632E-11
New Minimum: 3.2998616010142157 > 3.2998616009128425
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=3.2998616009128425}, derivative=-0.16895518749831354}, evalInputDelta = -1.1826850609963913E-10
New Minimum: 3.2998616009128425 > 3.2998616002032306
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=3.2998616002032306}, derivative=-0.16895518740369883}, evalInputDelta = -8.278804308758936E-10
New Minimum: 3.2998616002032306 > 3.2998615952359485
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=3.2998615952359485}, derivative=-0.16895518674139598}, evalInputDelta = -5.795162572042045E-9
New Minimum: 3.2998615952359485 > 3.2998615604649713
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=3.2998615604649713}, derivative=-0.16895518210527596}, evalInputDelta = -4.0566139780651156E-8
New Minimum: 3.2998615604649713 > 3.299861317068159
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=3.299861317068159}, derivative=-0.16895514965243583}, evalInputDelta = -2.839629518192055E-7
New Minimum: 3.299861317068159 > 3.2998596132917846
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=3.2998596132917846}, derivative=-0.16895492248255503}, evalInputDelta = -1.987739326470006E-6
New Minimum: 3.2998596132917846 > 3.299847686921305
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=3.299847686921305}, derivative=-0.16895333229338946}, evalInputDelta = -1.3914109806112407E-5
New Minimum: 3.299847686921305 > 3.29976420547096
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=3.29976420547096}, derivative=-0.16894220096923043}, evalInputDelta = -9.73955601510923E-5
New Minimum: 3.29976420547096 > 3.2991799893262304
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=3.2991799893262304}, derivative=-0.16886428170011716}, evalInputDelta = -6.816117048806802E-4
New Minimum: 3.2991799893262304 > 3.2950980226896753
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=3.2950980226896753}, derivative=-0.1683188468163243}, evalInputDelta = -0.004763578341435704
New Minimum: 3.2950980226896753 > 3.266894028684862
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=3.266894028684862}, derivative=-0.16450080262977423}, evalInputDelta = -0.03296757234624925
New Minimum: 3.266894028684862 > 3.0875849207536157
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=3.0875849207536157}, derivative=-0.13777449332392383}, evalInputDelta = -0.21227668027749536
New Minimum: 3.0875849207536157 > 2.7202448202549765
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=2.7202448202549765}, derivative=0.04930967181702916}, evalInputDelta = -0.5796167807761345
2.7202448202549765 <= 3.299861601031111
New Minimum: 2.7202448202549765 > 2.6662782362529436
F(7.500016709760479) = LineSearchPoint{point=PointSample{avg=2.6662782362529436}, derivative=-1.4315108271503892E-17}, evalInputDelta = -0.6335833647781675
Left bracket at 7.500016709760479
Converged to left
Fitness changed from 3.299861601031111 to 2.6662782362529436
Iteration 1 complete. Error: 2.6662782362529436 Total: 22.7385; Orientation: 0.7108; Line Search: 20.4588
F(0.0) = LineSearchPoint{point=PointSample{avg=2.6662782362529436}, derivative=-4.937566718929517E-7}
New Minimum: 2.6662782362529436 > 2.6662745330763507
F(7.500016709760479) = LineSearchPoint{point=PointSample{avg=2.6662745330763507}, derivative=-4.937548860441723E-7}, evalInputDelta = -3.7031765929285143E-6
New Minimum: 2.6662745330763507 > 2.666252314298066
F(52.500116968323354) = LineSearchPoint{point=PointSample{avg=2.666252314298066}, derivative=-4.937441709514935E-7}, evalInputDelta = -2.5921954877716757E-5
New Minimum: 2.666252314298066 > 2.6660967963511175
F(367.50081877826346) = LineSearchPoint{point=PointSample{avg=2.6660967963511175}, derivative=-4.936691653027495E-7}, evalInputDelta = -1.8143990182606373E-4
New Minimum: 2.6660967963511175 > 2.6650088322737755
F(2572.5057314478445) = LineSearchPoint{point=PointSample{avg=2.6650088322737755}, derivative=-4.931441257615452E-7}, evalInputDelta = -0.0012694039791680822
New Minimum: 2.6650088322737755 > 2.6574254997458757
F(18007.540120134912) = LineSearchPoint{point=PointSample{avg=2.6574254997458757}, derivative=-4.894688489731E-7}, evalInputDelta = -0.00885273650706786
New Minimum: 2.6574254997458757 > 2.60593055671187
F(126052.78084094438) = LineSearchPoint{point=PointSample{avg=2.60593055671187}, derivative=-4.6374191145399345E-7}, evalInputDelta = -0.060347679541073784
New Minimum: 2.60593055671187 > 2.323296803877141
F(882369.4658866107) = LineSearchPoint{point=PointSample{avg=2.323296803877141}, derivative=-2.8365334882025013E-7}, evalInputDelta = -0.3429814323758027
F(6176586.261206275) = LineSearchPoint{point=PointSample{avg=4.158572105796229}, derivative=9.76966589615964E-7}, evalInputDelta = 1.4922938695432855
F(475122.02009279036) = LineSearchPoint{point=PointSample{avg=2.4585594537537014}, derivative=-3.8062411331534303E-7}, evalInputDelta = -0.20771878249924214
F(3325854.1406495324) = LineSearchPoint{point=PointSample{avg=2.3410339239210947}, derivative=2.981712381503108E-7}, evalInputDelta = -0.3252443123318489
2.3410339239210947 <= 2.6662782362529436
New Minimum: 2.323296803877141 > 2.154344785495122
F(2073626.4638012259) = LineSearchPoint{point=PointSample{avg=2.154344785495122}, derivative=-1.376428539288238E-21}, evalInputDelta = -0.5119334507578217
Left bracket at 2073626.4638012259
Converged to left
Fitness changed from 2.6662782362529436 to 2.154344785495122
Iteration 2 complete. Error: 2.154344785495122 Total: 15.6393; Orientation: 0.7035; Line Search: 14.4154
Final threshold in iteration 2: 2.154344785495122 (> 0.0) after 38.379s (< 30.000s)

Returns

    2.154344785495122

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.6174447459905392, 1.0716801884670653, 0.28785886231820484 ], [ 1.5761003484774727, -1.20699204351463, 0.8678256697790053 ], [ -0.6785779064294728, 0.2576688622570594, -0.2571611384594291 ], [ 1.6004187222876551, -0.06524324615559512, 0.8257811510297999 ], [ -1.67205657983065, 0.2576990433336339, 0.7955359713140849 ], [ 1.4386850486920697, -0.30671137943758464, 1.436469946055933 ], [ -0.19586947044595276, -0.8652085478850832, -0.29636294533700414 ], [ -0.4017555551557233, 0.013505911023686895, -0.4932184891499531 ], ... ],
    	[ [ -1.6037336888265297, -0.9982330658566545, 1.2985431401504006 ], [ 0.17945306073792022, -1.0190916211322287, 0.9880188020889524 ], [ 1.6724187222876552, 0.13152928308419284, -0.8166390922201121 ], [ 1.0289573980217344, -0.8168554758043888, -1.4317760197506004 ], [ 0.2914273968552832, 0.9356530188531159, -1.5534478254617248 ], [ -0.2956491685220563, -1.3076333083289327, -0.36295667993947606 ], [ 0.3388585400446297, -0.07855746259208712, -1.4482573447941556 ], [ -1.375385133939773, 0.15725241656059086, 1.618709102846398 ], ... ],
    	[ [ 1.0605107712486912, 1.0135345863689489, 1.4802149458615244 ], [ 1.4394862532771198, -0.7151640291358776, 1.331095367466292 ], [ -0.535379111014523, 0.3984730783047344, -1.3102083088510126 ], [ 1.2005469752505158, -0.1909663796319932, 1.456847176287952 ], [ -0.2875465794418327, 0.5124768758581779, 0.1812137999504588 ], [ 0.7711014943885383, -1.72620288478008, 0.8847868141348029 ], [ -1.5964760369897435, 0.4328661519406969, -0.31715662126549155 ], [ 0.6363870354797682, 1.3948865125385779, 0.5204065724401589 ], ... ],
    	[ [ 1.5733738437182028, 0.01930446396635882, 1.0711255485428663 ], [ -0.38092416949409874, -1.6825529453981498, 0.9404050667088464 ], [ 0.04019239743850872, 1.0672712714271595, -0.7731566212654917 ], [ 0.9297570968754717, 0.8207741029796608, 1.042449585458052 ], [ 0.7217948066086087, 0.08669187449731838, -1.251881620293449 ], [ 1.1644126993624053, -1.3936823442720758, -0.7202241522549561 ], [ 0.8096046857612867, -0.7878898142546535, -1.0417993918111068 ], [ -0.04716934920981375, 0.05053645192050832, 1.235440910890424 ], ... ],
    	[ [ -1.1864254953152877, 1.176514585591315, -0.6065930677395941 ], [ -0.05320633930245672, 1.142329768028749, 1.5635103074314478 ], [ 1.8155541440868312, 0.8459857303359293, -0.007276559480971112 ], [ 0.41149679339630724, 0.4863041041461119, -0.30414264341310765 ], [ 1.4623681806131996, -0.17502824751645468, 1.461043320060524 ], [ 0.22522444406664274, -0.08127192150085713, 1.0617796452984876 ], [ -1.736027904485388, 0.6588819953446404, 1.4420995248399824 ], [ 0.7489332398704097, 0.3368299479388724, -0.9108797547418896 ], ... ],
    	[ [ 0.6960656502069608, 1.273923862451468, 0.4403371758991348 ], [ 0.3550698075806514, -1.229970537005684, 0.5389927783860684 ], [ -0.7127868821433633, -1.425975054199621, -1.186241501390212 ], [ -1.0773738077297668, 1.198267900144287, -0.8401939711783817 ], [ 1.383715589588891, -1.4319637279896151, 1.706167415649694 ], [ 1.5954289025865958, -0.7811511971945592, -1.565927644773967 ], [ -1.0370810978022214, -1.3063899941647772, -0.9857073428500706 ], [ -0.3298792909246463, 0.8042610911282189, 1.1210735011370985 ], ... ],
    	[ [ 1.1901116746874787, -0.6755412593678969, -0.12557156123064767 ], [ 1.0218249876851833, 0.4601992232437574, 1.0912251261604649 ], [ 1.4695729989533997, -0.5675789691010337, 0.804859222138452 ], [ 0.37743872306528925, -0.6030040893651301, 0.049680067680888895 ], [ -0.7813300750713799, -0.4271640291358776, -1.4235919218285285 ], [ 1.2434847475458073, 1.152992899172245, 0.8273428390041381 ], [ 0.2052123982161429, 1.6705651272657704, 0.4196008506611714 ], [ -0.9235752547870948, 0.03324488790402841, -1.1575458973480108 ], ... ],
    	[ [ -1.2157336888265298, 0.04525843948584091, 0.7797577789692939 ], [ -0.5689875431098727, 1.0674251882726569, -0.42992011611740455 ], [ -0.9420015209622569, -1.0319275239877908, 1.3074771148922484 ], [ 0.3177601083380968, -1.5381063186251065, 1.191979586624503 ], [ -0.9038634475207028, -0.17627758460586002, -0.4996221029051031 ], [ -0.5331384484927455, 1.4911098259250992, 0.0718648852434549 ], [ 1.6031708909295623, -1.4725989698786677, -0.5855172220027485 ], [ -0.7123402553703202, 1.5099827188733044, 1.130680427501136 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.07 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -0.19187174702474752, 1.5116439164443847, -0.5387883726255835 ], [ 0.766783855462186, -0.7670283155373105, 0.04117843483521688 ], [ -1.4878943994447595, 0.6976325902343788, -1.0838083734032176 ], [ 0.7911022292723684, 0.37472048182172435, -8.66083913988569E-4 ], [ -2.481373072845937, 0.6976627713109533, -0.03111126362970351 ], [ 0.629368555676783, 0.13325234853973483, 0.6098227111121445 ], [ -1.0051859634612395, -0.4252448199077637, -1.1230101802807926 ], [ -1.21107204817101, 0.45346963900100634, -1.3198657240937415 ], ... ],
    	[ [ -2.4130501818418164, -0.5582693378793351, 0.4718959052066122 ], [ -0.6298634322773665, -0.5791278931549093, 0.16137156714516399 ], [ 0.8631022292723685, 0.5714930110615123, -1.6432863271639007 ], [ 0.21964090500644762, -0.3768917478270693, -2.2584232546943888 ], [ -0.5178890961600036, 1.3756167468304354, -2.3800950604055133 ], [ -1.104965661537343, -0.8676695803516132, -1.1896039148832644 ], [ -0.47045795297065707, 0.36140626538523235, -2.274904579737944 ], [ -2.18470162695506, 0.5972161445379103, 0.7920618679026096 ], ... ],
    	[ [ 0.25119427823340446, 1.4534983143462683, 0.6535677109177359 ], [ 0.630169760261833, -0.27520030115855815, 0.5044481325225035 ], [ -1.3446956040298097, 0.8384368062820539, -2.136855543794801 ], [ 0.39123048223522905, 0.24899734834532627, 0.6301999413441636 ], [ -1.0968630724571193, 0.9524406038354973, -0.6454334349933296 ], [ -0.038214998626748486, -1.2862391568027607, 0.05813957919101442 ], [ -2.40579253000503, 0.8728298799180163, -1.14380385620928 ], [ -0.17292945753551858, 1.8348502405158973, -0.3062406625036296 ], ... ],
    	[ [ 0.764057350702916, 0.4592681919436783, 0.2444783135990779 ], [ -1.1902406625093855, -1.2425892174208304, 0.11375783176505794 ], [ -0.769124095576778, 1.507234999404479, -1.59980385620928 ], [ 0.12044060386018496, 1.2607378309569803, 0.21580235051426355 ], [ -0.08752168640667801, 0.5266556024746378, -2.0785288552372374 ], [ 0.3550962063471186, -0.9537186162947564, -1.5468713871987445 ], [ 2.8819274600000444E-4, -0.34792608627733407, -1.8684466267548951 ], [ -0.8564858422251005, 0.4905001798978278, 0.4087936759466356 ], ... ],
    	[ [ -1.9957419883305745, 1.6164783135686345, -1.4332403026833824 ], [ -0.8625228323177434, 1.5822934960060684, 0.7368630724876594 ], [ 1.0062376510715445, 1.2859494583132487, -0.8339237944247595 ], [ -0.3978196996189795, 0.9262678321234314, -1.1307898783568961 ], [ 0.6530516875979129, 0.2649354804608648, 0.6343960851167355 ], [ -0.584092048948644, 0.35869180647646237, 0.23513241035469912 ], [ -2.5453443975006746, 1.09884572332196, 0.6154522898961939 ], [ -0.060383253144877025, 0.7767936759161919, -1.7375269896856782 ], ... ],
    	[ [ -0.11325084280832598, 1.7138875904287874, -0.38631005904465365 ], [ -0.45424668543463537, -0.7900068090283645, -0.28765445655772004 ], [ -1.52210337515865, -0.9860113262223016, -2.0128887363340002 ], [ -1.8866903007450535, 1.6382316281216065, -1.6668412061221702 ], [ 0.5743990965736043, -0.9920000000122957, 0.8795201807059055 ], [ 0.786112409571309, -0.3411874692172397, -2.3925748797177553 ], [ -1.846397590817508, -0.8664262661874578, -1.812354577793859 ], [ -1.139195783939933, 1.2442248191055383, 0.29442626619331 ], ... ],
    	[ [ 0.38079518167219195, -0.2355775313905774, -0.9522187961744362 ], [ 0.21250849466989652, 0.9001629512210769, 0.2645778912166764 ], [ 0.6602565059381129, -0.1276152411237142, -0.021788012805336487 ], [ -0.4318777699499975, -0.16304036138781058, -0.7769671672628995 ], [ -1.5906465680866666, 0.012799698841441887, -2.250239156772317 ], [ 0.43416825453052055, 1.5929566271495643, 6.956040603496305E-4 ], [ -0.6041040947991438, 2.11052885524309, -0.407046384282617 ], [ -1.7328917478023815, 0.4732086158813479, -1.9841931322917992 ], ... ],
    	[ [ -2.0250501818418165, 0.4852221674631604, -0.046889455974494565 ], [ -1.3783040361251593, 1.5073889162499763, -1.256567351061193 ], [ -1.7513180139775435, -0.5919637960104713, 0.48082987994845994 ], [ -0.49155638467718993, -1.098142590647787, 0.36533235168071465 ], [ -1.7131799405359895, 0.26368614337145946, -1.3262693378488914 ], [ -1.3424549415080322, 1.9310735539024186, -0.7547823497003335 ], [ 0.7938543979142756, -1.0326352419013483, -1.4121644569465368 ], [ -1.521656748385607, 1.9499464468506238, 0.3040331925573475 ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 43.57 seconds (1.967 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 5278736823683
Reset training subject: 5279261867506
Adding measurement 18a63455 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 3.299861601031111 < 3.299861601031111. Total: 1
th(0)=3.299861601031111;dx=-0.16895518751408264
Adding measurement 5d6f205 to history. Total: 1
New Minimum: 3.299861601031111 > 2.9881399349587268
END: th(2.154434690031884)=2.9881399349587268; dx=-0.12042157337326745 evalInputDelta=0.3117216660723843
Fitness changed from 3.299861601031111 to 2.9881399349587268
Iteration 1 complete. Error: 2.9881399349587268 Total: 5.1841; Orientation: 0.8366; Line Search: 2.7784
Non-optimal measurement 2.9881399349587268 < 2.9881399349587268. Total: 2
LBFGS Accumulation History: 2 points
Non-optimal measurement 2.9881399349587268 < 2.9881399349587268. Total: 2
th(0)=2.9881399349587268;dx=-0.08582963583914967
Adding measurement 5489a5c3 to history. Total: 2
New Minimum: 2.9881399349587268 > 2.7130296620099856
END: th(4.641588833612779)=2.7130296620099856; dx=-0.03271178728583193 evalInputDelta=0.27511027294874113
Fitness changed from 2.9881399349587268 to 2.7130296620099856
Iteration 2 complete. Error: 2.7130296620099856 Total: 3.9728; Orientation: 0.6782; Line Search: 2.7749
Non-optimal measurement 2.7130296620099856 < 2.7130296620099856. Total: 3
LBFGS Accumulation History: 3 points
Non-optimal measurement 2.7130296620099856 < 2.7130296620099856. Total: 3
th(0)=2.7130296620099856;dx=-0.012467456589280763
Adding measurement 6d11205a to history. Total: 3
New Minimum: 2.7130296620099856 > 2.671468239396801
WOLF (strong): th(10.000000000000002)=2.671468239396801; dx=0.0041551720666438054 evalInputDelta=0.04156142261318463
Non-optimal measurement 2.6714706648834876 < 2.671468239396801. Total: 4
END: th(5.000000000000001)=2.6714706648834876; dx=-0.004156142261318479 evalInputDelta=0.04155899712649802
Fitness changed from 2.7130296620099856 to 2.671468239396801
Iteration 3 complete. Error: 2.671468239396801 Total: 6.0544; Orientation: 1.4418; Line Search: 4.0744
Non-optimal measurement 2.671468239396801 < 2.671468239396801. Total: 4
Rejected: LBFGS Orientation magnitude: 2.792e-01, gradient 3.723e-02, dot -1.000; [eceac7f5-f78c-4364-8cfc-a6c3551bd505 = 1.000/1.000e+00, f4215eca-1433-45c9-ac80-8d06a35e123e = 1.000/1.000e+00, da86bf51-a61c-4d60-a14a-d2d9d874a2fa = 1.000/1.000e+00, 18640a0a-9100-4734-8b19-c9ab9c85aefb = 1.000/1.000e+00, a7015cf6-f791-491b-be02-2eb9c0f6e807 = 1.000/1.000e+00, 61f09855-6adf-4c80-9ab1-5e4fb48f7feb = 1.000/1.000e+00, bd123e6e-a460-45da-b65b-1a36318f88d7 = 1.000/1.000e+00, 8d8a6830-6143-490c-86c6-420041d45af5 = 1.000/1.000e+00, 4b8b4969-25b4-4a0a-86ac-704a54c56c93 = 1.000/1.000e+00, aef090fc-d94c-49b7-b5a6-b321ccc43bac = 1.000/1.000e+00]
Orientation rejected. Popping history element from 2.671468239396801, 2.7130296620099856, 2.9881399349587268, 3.299861601031111
LBFGS Accumulation History: 3 points
Removed measurement 6d11205a to history. Total: 3
Adding measurement 4b2a7bf0 to history. Total: 3
th(0)=2.671468239396801;dx=-0.0013857195427109793
Adding measurement 39a8c7ab to history. Total: 4
New Minimum: 2.671468239396801 > 2.667257116492409
WOLF (strong): th(10.772173450159421)=2.667257116492409; dx=6.038675007110795E-4 evalInputDelta=0.004211122904392184
Adding measurement 4cbac0a7 to history. Total: 5
New Minimum: 2.667257116492409 > 2.6666836558538636
END: th(5.386086725079711)=2.6666836558538636; dx=-3.909260209999499E-4 evalInputDelta=0.004784583542937426
Fitness changed from 2.671468239396801 to 2.6666836558538636
Iteration 4 complete. Error: 2.6666836558538636 Total: 28.3558; Orientation: 23.1889; Line Search: 4.6468
Final threshold in iteration 4: 2.6666836558538636 (> 0.0) after 43.568s (< 30.000s)

Returns

    2.6666836558538636

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.5960002096794594, 1.331997215168254, 0.5719969803350794 ], [ 1.5799999385649695, -1.951992030623634, 1.2279961669916089 ], [ -1.2959934150222254, 0.22000040281741157, -0.22800029208883724 ], [ 1.7079988294602364, -0.09599967112290395, 0.9639985412972958 ], [ -1.8639979664594246, 0.3359991622632292, 1.2639950085933325 ], [ 1.907994959917058, -0.5839970339183173, 1.5239990835262762 ], [ -0.07600130198921318, -1.0679978308306068, -0.1520015244274291 ], [ -0.4039999957103058, 0.1639983899977115, -0.8879957570828191 ], ... ],
    	[ [ -1.9799959947176775, -1.167998184101003, 1.8199944417175802 ], [ 0.6479949681326487, -1.5359944706540463, 1.2759969392571262 ], [ 1.7799988294602365, -0.18799658206083364, -1.4279934402862662 ], [ 1.475995198169186, -1.0879970996430426, -1.9839940728867433 ], [ 0.3559992895333109, 0.8880005096200895, -1.6079993965894606 ], [ 0.05999617582446878, -1.9719928932606485, -0.1080027074724773 ], [ -0.05199583862145585, 0.031998817208423354, -1.9159949766017108 ], [ -1.5319983443765925, 0.14800009884055892, 1.7599985084349332 ], ... ],
    	[ [ 1.4199961347465158, 1.3759961224947024, 1.503999765420297 ], [ 1.8319957814761192, -0.6960002051362937, 1.3280000529659683 ], [ -1.0759942365812865, 0.09200327827412595, -1.8439942700609184 ], [ 1.2919990019876397, -0.5319963520242965, 1.9759944663643518 ], [ -0.35199933024746616, 0.9839949558808354, -0.2319955599086444 ], [ 1.2119952638939109, -1.7799994246551856, 1.1719969474727168 ], [ -1.5680003243339304, 0.5679985543095237, -0.5759972112423575 ], [ 0.7239990430655926, 1.2520015283533255, 0.5679995107369881 ], ... ],
    	[ [ 1.9919955021460385, -0.2479971407209953, 1.1839988124117862 ], [ -0.34400041470542697, -1.859998101945097, 1.0839984837881615 ], [ 0.2959972438512484, 0.8760020459355341, -1.0319972112423574 ], [ 1.3959949927794206, 0.7040012490232447, 1.4079961094824744 ], [ 0.8239988869693708, 0.11199972913898135, -1.1920006207124616 ], [ 1.6559947216649304, -1.9919935998014413, -1.2639941632582403 ], [ 0.7920001686015066, -1.4199932383154545, -1.1239991008281986 ], [ 0.3199960525906096, -0.2159971489365859, 1.6439956494094001 ], ... ],
    	[ [ -1.319998590844311, 1.283998850070786, -1.091994787643127 ], [ -0.38799643835957054, 1.8119928362584574, 1.6279993299939943 ], [ 1.9599984351118873, 1.007998266763852, 0.23999737468342883 ], [ 0.13200297011790535, 0.7519971576591196, 0.07599595338625287 ], [ 1.7319970959706172, -0.6519948978647581, 1.7159972925275229 ], [ -0.03199726813422242, -0.3239974036198949, 1.2959975143484692 ], [ -1.7160002339624336, 0.8039984475068458, 1.683997432192563 ], [ 0.6960005465186746, 0.7399956870683999, -1.4799938921437499 ], ... ],
    	[ [ 0.8719980982726722, 1.2999997209233907, 0.8319958301523936 ], [ 0.7759954774992668, -1.8159937312508911, 0.9559955590379035 ], [ -1.267994080485065, -1.7239968120973708, -1.6439950834043886 ], [ -0.9360015320257503, 1.731994290417996, -1.2679954038124228 ], [ 1.4359994209827607, -1.4319999997465283, 1.7000000858283308 ], [ 1.5640003164821374, -0.5600023658366249, -1.6319992733556012 ], [ -1.1959983197298207, -1.671996089125397, -0.8160017955419193 ], [ -0.6039970873912289, 0.7520005589136333, 1.4919960519733402 ], ... ],
    	[ [ 1.4919967509158116, -1.0879955879743697, -0.4519964882703836 ], [ 1.2399976464151885, 0.2720020130731717, 0.9760012524421979 ], [ 1.4320003822068623, -0.6159994821643199, 0.5560026819549646 ], [ 0.7399961018841532, -0.5800002462142467, 0.5119950743180572 ], [ -1.387993530040494, -0.4080002051362937, -1.4719994623141852 ], [ 1.7319947545272927, 1.3679976998881003, 1.367994236327815 ], [ 0.7159945162751651, 1.616000583560405, 0.8319956083314471 ], [ -1.1999970627444572, 0.503994964096426, -1.080000809671046 ], ... ],
    	[ [ -1.5919959947176776, -0.3479957933641348, 1.387993513355841 ], [ -0.5719999874947153, 1.263997897062275, -0.9759941386114684 ], [ -1.5839931521233255, -1.2999971325054047, 1.4479985166505238 ], [ 0.5919970466770738, -1.6279990385224266, 1.9399920181183494 ], [ -1.167997194193907, -0.567995809795316, -0.6639982217600029 ], [ -1.1159937847238026, 1.3920010600646606, -0.027998911869614312 ], [ 1.699998944478505, -1.7759967545882367, -0.29600307717405483 ], [ -1.1799950170623945, 1.863996212866199, 1.0600007759379424 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.08 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ -0.23639609523781213, 1.7718034801136815, -0.2314129796930765 ], [ 0.7476036336476979, -1.5121857656782065, 0.42458620696345295 ], [ -2.1283897199394968, 0.659806667762839, -1.0314102521169932 ], [ 0.8756025245429648, 0.34380659382252354, 0.16058858126913989 ], [ -2.696394271376696, 0.7758054272086566, 0.46058504856517657 ], [ 1.0755986549997865, -0.1441907689728898, 0.7205891234981203 ], [ -0.9083976069064847, -0.6281915658851793, -0.955411484455585 ], [ -1.2363963006275773, 0.603804654943139, -1.691405717110975 ], ... ],
    	[ [ -2.812392299634949, -0.7281919191555755, 1.0165844816894243 ], [ -0.1844013367846229, -1.0961882057086187, 0.4725869792289703 ], [ 0.9476025245429649, 0.2518096828845938, -2.231403400314422 ], [ 0.6435988932519143, -0.6481908346976151, -2.7874040329148992 ], [ -0.4763970153839607, 1.3278067745655169, -2.4114093566176162 ], [ -0.7724001290928028, -1.532186628315221, -0.9114126675006332 ], [ -0.8843921435387274, 0.4718050821538508, -2.719404936629867 ], [ -2.364394649293864, 0.5878063637859864, 0.9565885484067773 ], ... ],
    	[ [ 0.5875998298292442, 1.81580238744013, 0.7005898053921411 ], [ 0.9995994765588476, -0.2561939401908662, 0.5245900929378124 ], [ -1.908390541498558, 0.5318095432195534, -2.6474042300890743 ], [ 0.45960269707036816, -0.09219008707886905, 1.172584506336196 ], [ -1.1843956351647378, 1.423801220826263, -1.0354055199368002 ], [ 0.3795989589766393, -1.340193159709758, 0.36858698744456087 ], [ -2.400396629251202, 1.0078048192549511, -1.3794071712705134 ], [ -0.10839726185167897, 1.691807793298753, -0.23541044929116783 ], ... ],
    	[ [ 1.159599197228767, 0.19180912422443216, 0.38058885238363027 ], [ -1.1763967196226985, -1.4201918369996696, 0.28058852376000565 ], [ -0.5363990610660232, 1.3158083108809615, -1.8354071712705133 ], [ 0.5635986878621491, 1.143807513968672, 0.6045861494543185 ], [ -0.00839741794790072, 0.5518059940844088, -1.9954105807406175 ], [ 0.8235984167476589, -1.5521873348560138, -2.067404123286396 ], [ -0.040396136315765, -0.980186973370027, -1.9274090608563546 ], [ -0.512400252326662, 0.22380911600884157, 0.8405856893812442 ], ... ],
    	[ [ -2.1523948957615824, 1.7238051150162135, -1.895404747671283 ], [ -1.2203927432768422, 2.251799101203885, 0.8245893699658384 ], [ 1.1276021301946157, 1.4478045317092796, -0.563412585344727 ], [ -0.7003933347993663, 1.191803422604547, -0.727414006641903 ], [ 0.8996007910533457, -0.2121886329193306, 0.912587332499367 ], [ -0.864393573051494, 0.11580886132553259, 0.4925875543203133 ], [ -2.548396538879705, 1.2438047124522733, 0.8805874721644071 ], [ -0.13639575839859697, 1.1798019520138274, -2.283403852171906 ], ... ],
    	[ [ 0.039601793355400594, 1.7398059858688182, 0.028585870124237744 ], [ -0.056400827418004784, -1.3761874663054636, 0.15258559900974755 ], [ -2.1003903854023367, -1.2841905471519433, -2.4474050434325445 ], [ -1.7683978369430218, 2.1718005553634234, -2.0714053638405785 ], [ 0.6036031160654891, -0.9921937348011007, 0.8965901258001749 ], [ 0.7316040115648659, -0.12019610089119742, -2.435409233383757 ], [ -2.0283946246470923, -1.2321898241799696, -1.6194117555700753 ], [ -1.4363933923085006, 1.1918068238590607, 0.6885860919451843 ], ... ],
    	[ [ 0.6596004459985401, -0.6481893230289422, -1.2554064482985394 ], [ 0.40760134149791694, 0.7118082780185991, 0.17259129241404203 ], [ 0.5996040772895908, -0.17619321721889242, -0.2474072780731913 ], [ -0.09240020303311836, -0.14019398126881927, -0.29141488571009866 ], [ -2.2203898349577655, 0.03180605980913376, -2.2754094223423413 ], [ 0.8995984496100211, 1.8078039648335278, 0.5645842762996591 ], [ -0.11640178864210649, 2.0558068485058323, 0.028585648303291245 ], [ -2.0323933676617285, 0.9438012290418534, -1.883410769699202 ], ... ],
    	[ [ -2.424392299634949, 0.09181047158129269, 0.5845835533276851 ], [ -1.4043962924119868, 1.7038041620077025, -1.7794040986396245 ], [ -2.416389457040597, -0.8601908675599772, 0.6445885566223679 ], [ -0.2403992582401978, -1.188192773576999, 1.1365820580901935 ], [ -2.0003934991111785, -0.1281895448498886, -1.4674081817881588 ], [ -1.9483900896410742, 1.8318073250100881, -0.8314088718977702 ], [ 0.8676026395612335, -1.3361904896428092, -1.0994130372022108 ], [ -2.012391321979666, 2.3038024778116264, 0.2565908159097865 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.15 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[1.0, 0.3333152098510613], [7.0, 0.4754009316833127]; valueStats=DoubleSummaryStatistics{count=13, sum=34.898080, min=2.154345, average=2.684468, max=2.988140}
Plotting 7 points for GD
Plotting 2 points for CjGD
Plotting 4 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[0.0, 0.3333152098510613], [38.383, 0.4754009316833127]; valueStats=DoubleSummaryStatistics{count=13, sum=34.898080, min=2.154345, average=2.684468, max=2.988140}
Plotting 7 points for GD
Plotting 2 points for CjGD
Plotting 4 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 2.6666836558538636 }, "CjGD": { "type": "NonConverged", "value": 2.154344785495122 }, "GD": { "type": "NonConverged", "value": 2.6662587593945153 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 2.6666836558538636 }, "CjGD": { "type": "NonConverged", "value": 2.154344785495122 }, "GD": { "type": "NonConverged", "value": 2.6662587593945153 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "119.645",
      "gc_time": "6.683"
    },
    "created_on": 1586739835280,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Double",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.GateBiasLayerTest.Double",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/GateBiasLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 2.6666836558538636
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 2.154344785495122
        },
        "GD": {
          "type": "NonConverged",
          "value": 2.6662587593945153
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/GateBiasLayer/Double/trainingTest/202004130355",
    "id": "dcc26a1b-d2c4-49fe-a4c8-f8394b46a9d4",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "GateBiasLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.GateBiasLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/GateBiasLayer.java",
      "javaDoc": ""
    }
  }