1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 7878959241017491456

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.04 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.188, -1.868, -0.172 ], [ 0.58, -0.208, 1.784 ], [ -0.8, 1.78, -1.848 ], [ 1.868, 0.796, -1.456 ], [ -1.688, -0.632, 0.36 ], [ -1.924, 1.856, 0.876 ], [ -0.836, -0.4, -0.496 ], [ -0.46, -1.632, -0.88 ], ... ],
    	[ [ 0.54, -0.692, 0.956 ], [ 0.568, 1.428, 1.912 ], [ 0.192, 1.272, -1.24 ], [ 0.204, 0.86, -1.844 ], [ -0.704, -1.456, -0.288 ], [ -1.78, -1.4, -0.912 ], [ -1.064, -1.816, 1.124 ], [ 1.788, -1.432, 1.556 ], ... ],
    	[ [ -0.272, 0.06, -1.116 ], [ 1.188, 1.736, 1.664 ], [ 0.36, -0.812, -0.888 ], [ 1.536, 0.004, 1.864 ], [ -1.448, 1.772, -1.712 ], [ 0.852, -1.76, 1.86 ], [ -1.308, -1.776, -0.46 ], [ -0.012, 0.124, -0.58 ], ... ],
    	[ [ 0.06, 1.54, 1.104 ], [ -1.412, -0.296, -0.968 ], [ -1.108, -0.568, 1.82 ], [ 0.336, -1.728, 0.228 ], [ 0.384, -0.352, 1.176 ], [ 1.748, 1.22, -1.24 ], [ -0.8, -0.236, -0.236 ], [ 0.424, 0.668, 0.552 ], ... ],
    	[ [ -1.588, -1.78, -1.3 ], [ 0.652, 0.02, -1.288 ], [ 0.776, 1.948, 0.072 ], [ 1.936, 0.852, 1.924 ], [ -0.044, -0.628, -1.44 ], [ -1.912, 0.204, -0.596 ], [ 0.928, -0.756, 1.268 ], [ -1.748, 1.804, 0.1 ], ... ],
    	[ [ -1.92, 1.924, -0.828 ], [ 0.884, -0.656, 1.276 ], [ 0.98, 1.388, -0.68 ], [ -0.112, 1.952, -0.492 ], [ -0.212, -1.556, -0.064 ], [ 0.404, -0.588, -0.804 ], [ -1.848, -1.556, 0.54 ], [ -0.22, -1.568, -1.832 ], ... ],
    	[ [ -1.652, 0.312, -0.34 ], [ -0.74, -0.168, -1.684 ], [ -1.504, 1.072, -0.976 ], [ -0.704, -0.42, -1.344 ], [ -0.384, 0.208, -0.228 ], [ 1.768, -0.208, 1.664 ], [ -0.552, 0.152, -1.332 ], [ -1.308, -1.576, -1.88 ], ... ],
    	[ [ -1.68, -1.86, 0.776 ], [ 0.88, 0.152, 1.424 ], [ -1.748, -1.104, 1.932 ], [ -1.884, 0.404, -0.32 ], [ 0.712, -0.484, 0.22 ], [ 0.252, -1.324, 0.504 ], [ -0.848, 1.916, 1.16 ], [ -0.872, 1.792, -1.592 ], ... ],
    	...
    ]
    [
    	[ [ 1.596, -0.388, -1.94 ], [ -1.488, -1.908, 1.328 ], [ -0.4, 0.5, -1.468 ], [ -0.56, -0.32, -1.808 ], [ -1.328, 1.144, -1.34 ], [ 1.508, -0.832, -0.692 ], [ 1.276, 0.092, 1.452 ], [ 0.3, -0.968, -0.68 ], ... ],
    	[ [ 0.652, -0.436, 1.552 ], [ 0.404, 0.088, 1.068 ], [ -1.428, -0.532, 0.876 ], [ -1.944, -1.976, 0.168 ], [ -1.716, -0.22, -1.632 ], [ -1.46, -0.016, -0.696 ], [ 1.112, 0.092, -0.344 ], [ -1.3, 0.86, -1.5 ], ... ],
    	[ [ -1.452, 0.224, -1.84 ], [ 1.94, -1.512, 1.308 ], [ 0.956, -1.26, -0.28 ], [ -0.752, 0.968, 1.392 ], [ 1.048, -1.94, 0.028 ], [ 1.164, -0.252, -1.66 ], [ -0.972, 1.052, 0.26 ], [ -1.568, 1.428, -1.132 ], ... ],
    	[ [ 0.26, 1.848, 1.404 ], [ -0.592, 0.148, -1.684 ], [ -0.344, 0.252, -1.012 ], [ -1.228, 0.144, 0.096 ], [ 0.34, -1.444, 1.968 ], [ 1.652, -1.548, -0.808 ], [ 1.992, -1.66, 1.232 ], [ -1.196, -0.248, -0.16 ], ... ],
    	[ [ 1.712, 1.716, 0.148 ], [ -1.692, 0.612, 0.136 ], [ -1.78, 0.948, -0.348 ], [ -0.048, 1.82, -1.516 ], [ -0.864, -0.372, 1.576 ], [ -1.312, 1.592, -1.892 ], [ 1.932, 1.128, 1.388 ], [ -1.296, -1.556, -1.6 ], ... ],
    	[ [ 1.352, -0.728, 0.524 ], [ 1.912, -1.64, -1.668 ], [ 1.968, 0.232, -0.696 ], [ -0.048, 1.768, 1.76 ], [ 0.444, -1.24, 1.036 ], [ -1.552, 0.476, -0.848 ], [ -0.844, 1.708, -0.284 ], [ -1.4, -1.008, -1.272 ], ... ],
    	[ [ -0.092, -0.128, 1.444 ], [ 0.22, 1.656, -1.48 ], [ -1.06, -1.352, 1.324 ], [ -0.608, 1.488, -0.644 ], [ -0.74, 0.564, 0.796 ], [ 0.86, 1.18, 1.76 ], [ -1.396, -0.116, -0.744 ], [ -1.924, -1.664, 1.296 ], ... ],
    	[ [ 1.828, -1.072, -1.7 ], [ 0.704, 0.012, -1.236 ], [ -1.84, 0.58, -1.636 ], [ 1.764, -1.396, 0.04 ], [ 0.528, -1.272, 1.7 ], [ -0.272, -1.924, -1.712 ], [ 0.764, -0.924, 1.36 ], [ 1.756, 1.492, -1.136 ], ... ],
    	...
    ]
    [
    	[ [ 1.528, -0.6, 1.58 ], [ -1.744, -0.552, -1.408 ], [ 0.268, -0.46, -1.32 ], [ -1.716, -1.168, -0.74 ], [ 1.624, -1.708, 0.428 ], [ -1.004, 0.284, 0.128 ], [ -0.98, 0.284, -0.124 ], [ 0.4, -1.832, 1.86 ], ... ],
    	[ [ 0.444, -1.824, -1.688 ], [ 0.28, 1.388, 0.788 ], [ 0.756, -1.364, -0.084 ], [ 1.828, 0.28, 1.168 ], [ 1.228, 1.944, -0.904 ], [ 1.276, 0.504, 0.732 ], [ 1.416, 0.304, 0.852 ], [ 0.392, 1.128, 0.26 ], ... ],
    	[ [ 0.304, -1.6, -0.32 ], [ 0.312, -0.176, -1.7 ], [ -0.732, 1.464, -1.788 ], [ 0.508, 1.168, -1.352 ], [ 1.76, -1.572, 0.048 ], [ -0.068, 0.152, 0.992 ], [ 0.496, -0.92, -0.784 ], [ 1.04, 0.484, -1.26 ], ... ],
    	[ [ -1.108, -1.22, -0.72 ], [ 1.9, -1.808, 1.224 ], [ 1.684, -0.004, -0.708 ], [ -1.704, 1.572, 0.952 ], [ -0.264, 0.18, -0.66 ], [ -1.508, 0.104, -0.892 ], [ 0.0, 1.72, -0.124 ], [ 1.84, -1.516, 0.228 ], ... ],
    	[ [ -1.668, 0.08, -0.316 ], [ -1.188, -1.064, -1.404 ], [ -1.584, -0.16, 0.544 ], [ -0.988, 1.216, 0.832 ], [ 1.176, -1.22, -1.228 ], [ 1.708, 1.432, -1.28 ], [ -0.276, 1.704, -1.492 ], [ 0.54, -0.272, 0.08 ], ... ],
    	[ [ 0.9, -0.028, -0.236 ], [ 0.572, -1.044, 0.16 ], [ -0.644, 0.592, 0.184 ], [ 1.808, 0.268, -0.576 ], [ 0.8, 1.716, -1.936 ], [ 1.716, 1.036, 1.792 ], [ 0.068, -0.1, 1.432 ], [ 0.832, -1.436, -1.408 ], ... ],
    	[ [ -1.964, -0.004, 1.112 ], [ -1.768, 0.332, -1.216 ], [ -1.088, 0.336, -0.324 ], [ -0.26, -1.6, -1.156 ], [ 1.496, -1.356, -1.328 ], [ 0.224, -0.164, -1.896 ], [ 1.376, 0.516, 0.452 ], [ -1.992, 1.268, -0.56 ], ... ],
    	[ [ 0.684, 0.532, 0.708 ], [ -0.264, -0.46, -0.744 ], [ 0.74, 1.712, -0.152 ], [ -1.04, -1.92, 1.428 ], [ 0.872, -0.42, -0.432 ], [ 0.604, -1.376, 1.628 ], [ -0.904, -1.996, -0.864 ], [ 0.732, 1.772, -0.208 ], ... ],
    	...
    ]
    [
    	[ [ -0.776, -1.976, 1.16 ], [ -0.516, -0.216, -1.0 ], [ 1.424, -0.488, 0.34 ], [ 1.076, 0.736, 1.276 ], [ 0.584, 0.092, 0.06 ], [ -1.004, -1.428, 0.152 ], [ 1.364, -1.268, 0.828 ], [ -0.12, 1.22, -0.688 ], ... ],
    	[ [ -0.252, -0.64, 1.508 ], [ 0.684, 0.936, -0.92 ], [ 1.944, 0.364, -0.764 ], [ 0.32, -0.8, 1.796 ], [ 1.236, 0.948, -1.228 ], [ -1.116, 0.004, 0.452 ], [ 1.968, 0.036, -0.792 ], [ -1.5, -1.344, 1.496 ], ... ],
    	[ [ -0.736, -0.3, -1.852 ], [ -1.22, -1.132, -0.104 ], [ 0.284, 1.068, -0.348 ], [ 0.552, 1.372, -1.644 ], [ 1.168, 0.14, -0.348 ], [ 0.584, -0.724, -1.396 ], [ 0.656, 1.124, 1.812 ], [ -1.476, -0.008, -1.184 ], ... ],
    	[ [ -1.152, -1.6, 0.004 ], [ 0.776, -1.624, 0.236 ], [ -1.656, -0.828, 1.284 ], [ -1.216, 0.572, 1.664 ], [ -0.492, -0.196, -0.28 ], [ -1.988, -0.216, -0.008 ], [ 1.26, 1.472, -0.28 ], [ -0.624, 1.112, 1.908 ], ... ],
    	[ [ 1.428, 0.064, 1.176 ], [ 1.032, -1.312, -0.488 ], [ 0.988, 1.148, 1.736 ], [ -1.984, -0.72, 1.952 ], [ -1.388, 1.972, 1.284 ], [ 0.312, -0.14, 1.772 ], [ 1.884, 0.34, 0.656 ], [ 0.552, 1.736, -1.988 ], ... ],
    	[ [ -1.132, -1.8, -1.4 ], [ -1.932, 0.972, -1.56 ], [ -0.612, 1.316, -1.668 ], [ 1.124, -1.484, -0.132 ], [ 1.52, 0.488, -1.452 ], [ -1.152, 1.072, 1.476 ], [ 0.224, -0.336, 0.916 ], [ 0.464, -1.928, 0.06 ], ... ],
    	[ [ -1.46, 1.604, 0.608 ], [ -0.676, 1.008, 1.268 ], [ -1.688, 1.36, 1.688 ], [ 0.34, 1.5, 0.948 ], [ 0.228, 0.956, 0.856 ], [ 1.568, 0.416, 1.944 ], [ 1.584, -1.004, 0.728 ], [ 1.8, -0.108, -1.316 ], ... ],
    	[ [ -0.54, -1.68, 0.38 ], [ -1.888, -0.6, 1.132 ], [ 0.528, -1.468, -0.524 ], [ -0.164, -1.636, 0.052 ], [ 0.624, 0.576, -0.428 ], [ -1.44, 1.708, 1.928 ], [ 1.728, -1.064, -1.908 ], [ -1.104, -0.976, 0.644 ], ... ],
    	...
    ]
    [
    	[ [ 0.0, -0.444, -1.876 ], [ -1.1, -1.728, -1.848 ], [ -0.672, 0.42, -1.732 ], [ 1.068, -1.216, -0.696 ], [ 1.364, -0.7, 0.616 ], [ -1.852, 1.096, 0.044 ], [ 0.464, -0.932, -0.488 ], [ -1.12, -0.028, 1.548 ], ... ],
    	[ [ -0.74, 1.4, 0.22 ], [ -1.516, 1.08, -1.792 ], [ -0.772, 1.276, -0.848 ], [ -1.924, 1.448, 1.592 ], [ -0.22, -0.756, 0.756 ], [ -0.22, 0.388, 0.284 ], [ 1.884, -1.832, -1.044 ], [ 1.568, -1.092, -0.656 ], ... ],
    	[ [ -0.764, -0.124, -1.384 ], [ 0.08, 0.476, 0.38 ], [ -1.2, -0.16, 0.252 ], [ 1.728, -0.312, 0.248 ], [ -1.092, -1.388, -0.66 ], [ -0.672, 0.52, 0.108 ], [ 0.784, 1.932, 1.964 ], [ 0.036, -1.056, 1.488 ], ... ],
    	[ [ -1.484, -0.644, -0.36 ], [ 0.352, -0.628, 1.332 ], [ -0.86, 0.644, 0.428 ], [ -0.524, -1.476, 0.2 ], [ 0.536, -0.876, 0.524 ], [ -0.448, 0.952, -1.312 ], [ -0.676, -1.728, 1.884 ], [ -0.428, 1.116, -0.364 ], ... ],
    	[ [ 0.364, 1.48, -0.24 ], [ -0.24, 0.072, 1.86 ], [ 0.824, -1.712, 1.428 ], [ -0.312, 0.828, -0.352 ], [ -0.38, 0.624, 0.44 ], [ 0.964, 1.66, -1.808 ], [ 1.54, -0.456, 0.384 ], [ 0.18, 0.444, 1.732 ], ... ],
    	[ [ -1.344, 1.552, 0.34 ], [ -1.488, -1.176, 1.592 ], [ -0.024, -0.068, 0.56 ], [ -1.752, 0.916, -0.62 ], [ -1.112, 1.108, 0.592 ], [ 1.7, 0.816, 1.36 ], [ -0.128, -1.712, -1.176 ], [ 1.356, 0.548, 0.312 ], ... ],
    	[ [ 0.58, 1.464, -1.864 ], [ 1.988, 0.936, 1.668 ], [ -1.156, -1.364, -0.572 ], [ -1.776, -1.716, 0.52 ], [ -1.848, 0.648, -0.572 ], [ 0.904, 0.368, -1.576 ], [ -0.248, 1.94, -0.816 ], [ 1.992, -1.732, -1.872 ], ... ],
    	[ [ -1.544, 0.016, -1.404 ], [ 1.416, -0.188, 0.636 ], [ 1.244, 1.084, 1.844 ], [ 0.04, -0.8, -1.332 ], [ -1.336, -1.7, -0.948 ], [ -0.428, 0.772, 1.552 ], [ -1.44, -1.384, 1.52 ], [ 0.624, 1.14, 0.376 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 31.16 seconds (3.256 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3825783528567
Reset training subject: 3827291382742
Constructing line search parameters: GD
th(0)=2.6674491644859257;dx=-4.939720674973957E-7
New Minimum: 2.6674491644859257 > 2.6674481002554744
WOLFE (weak): th(2.154434690031884)=2.6674481002554744; dx=-4.939719689575284E-7 evalInputDelta=1.0642304513197587E-6
New Minimum: 2.6674481002554744 > 2.6674470360252345
WOLFE (weak): th(4.308869380063768)=2.6674470360252345; dx=-4.939718704176601E-7 evalInputDelta=2.1284606912530535E-6
New Minimum: 2.6674470360252345 > 2.6674427791063984
WOLFE (weak): th(12.926608140191302)=2.6674427791063984; dx=-4.939714762581909E-7 evalInputDelta=6.385379527351631E-6
New Minimum: 2.6674427791063984 > 2.667423623013673
WOLFE (weak): th(51.70643256076521)=2.667423623013673; dx=-4.939697025406018E-7 evalInputDelta=2.5541472252754716E-5
New Minimum: 2.667423623013673 > 2.6673214583474945
WOLFE (weak): th(258.53216280382605)=2.6673214583474945; dx=-4.939602427134131E-7 evalInputDelta=1.2770613843127165E-4
New Minimum: 2.6673214583474945 > 2.666682973511643
WOLFE (weak): th(1551.1929768229563)=2.666682973511643; dx=-4.939011187935237E-7 evalInputDelta=7.661909742826545E-4
New Minimum: 2.666682973511643 > 2.662088138823699
WOLFE (weak): th(10858.350837760694)=2.662088138823699; dx=-4.934754265702667E-7 evalInputDelta=0.005361025662226737
New Minimum: 2.662088138823699 > 2.6247119548280673
WOLFE (weak): th(86866.80670208555)=2.6247119548280673; dx=-4.899989400803986E-7 evalInputDelta=0.04273720965785843
New Minimum: 2.6247119548280673 > 2.2952390616532505
WOLFE (weak): th(781801.26031877)=2.2952390616532505; dx=-4.5821392074445784E-7 evalInputDelta=0.3722101028326752
New Minimum: 2.2952390616532505 > 0.20335752507414168
END: th(7818012.6031877)=0.20335752507414168; dx=-1.3639059996801296E-7 evalInputDelta=2.464091639411784
Fitness changed from 2.6674491644859257 to 0.20335752507414168
Iteration 1 complete. Error: 0.20335752507414168 Total: 20.1543; Orientation: 0.7024; Line Search: 16.6710
th(0)=0.20335752507414168;dx=-3.7658800939655705E-8
New Minimum: 0.20335752507414168 > 0.06367589625352194
WOLF (strong): th(1.684339755941405E7)=0.06367589625352194; dx=2.107288015640517E-8 evalInputDelta=0.13968162882061974
New Minimum: 0.06367589625352194 > 0.00986157890962193
END: th(8421698.779707026)=0.00986157890962193; dx=-8.292960391625336E-9 evalInputDelta=0.19349594616451976
Fitness changed from 0.20335752507414168 to 0.00986157890962193
Iteration 2 complete. Error: 0.00986157890962193 Total: 6.0217; Orientation: 0.7860; Line Search: 4.6061
th(0)=0.00986157890962193;dx=-1.8262183165966653E-9
New Minimum: 0.00986157890962193 > 0.004559994087809182
WOLF (strong): th(1.8144E7)=0.004559994087809182; dx=1.241828455285735E-9 evalInputDelta=0.005301584821812747
New Minimum: 0.004559994087809182 > 2.524564200863212E-4
END: th(9072000.0)=2.524564200863212E-4; dx=-2.921949306554633E-10 evalInputDelta=0.009609122489535608
Fitness changed from 0.00986157890962193 to 2.524564200863212E-4
Iteration 3 complete. Error: 2.524564200863212E-4 Total: 4.9730; Orientation: 0.7797; Line Search: 3.5560
Final threshold in iteration 3: 2.524564200863212E-4 (> 0.0) after 31.150s (< 30.000s)

Returns

    2.524564200863212E-4

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.18500362716935667, -1.8574543241934502, -0.17254479506011694 ], [ 0.5929194257113453, -0.20663801234970758, 1.7673059228007015 ], [ -0.7729159027256136, 1.7594534434470173, -1.8254299189380114 ], [ 1.835779263587368, 0.8024597128556726, -1.429577439584327 ], [ -1.6629005133017538, -0.640288667700351, 0.34194393515040905 ], [ -1.9117031972145027, 1.8289937305913446, 0.8582941605461986 ], [ -0.843510389043041, -0.3800371524399997, -0.48876200848701745 ], [ -0.46568143419836266, -1.612854345030175, -0.8862651431913451 ], ... ],
    	[ [ 0.531322192970994, -0.7031682987323978, 0.9430416603557892 ], [ 0.5807248560470177, 1.4013828699199995, 1.8898579721995317 ], [ 0.18830317637777771, 1.2489629517436254, -1.2305050003808184 ], [ 0.18909596371251436, 0.8708569872694738, -1.8173050420542687 ], [ -0.6791729108318124, -1.4370489146945027, -0.2855095082966081 ], [ -1.7801167417985966, -1.399455204939883, -0.8927376032315787 ], [ -1.056411783091228, -1.814676926282573, 1.0991729108318125 ], [ 1.7689710868287716, -1.4327004507915788, 1.5269312921494733 ], ... ],
    	[ [ -0.26756381165333326, 0.05692579930362568, -1.1124199181763743 ], [ 1.188233483597193, 1.7319918649148538, 1.6514307996844442 ], [ 0.3601167417985965, -0.7992362300201168, -0.8861710451553216 ], [ 1.5301239961373099, 0.003299549208421041, 1.8528706152004677 ], [ -1.429554795821754, 1.7702488730210526, -1.705501373211462 ], [ 0.8563194465480702, -1.7584045287525145, 1.8395312713127483 ], [ -1.3043420903106433, -1.77755655731462, -0.44614663989988285 ], [ -0.0072135862575437824, 0.14155018372233946, -0.5842416186823393 ], ... ],
    	[ [ 0.05455204939883032, 1.515678791959064, 1.0939212913878362 ], [ -1.4168253276753215, -0.28354754148304073, -0.9631746723246782 ], [ -1.0794371732767247, -0.5732922948697077, 1.7921765380011692 ], [ 0.33195295098198824, -1.703133996898947, 0.21383532843695885 ], [ 0.37345432419345015, -0.36573661830152066, 1.1602787711223388 ], [ 1.7134833415483035, 1.2273936472444444, -1.2242787711223388 ], [ -0.7848624801153214, -0.21981180392795296, -0.24864702818128676 ], [ 0.41754028714432734, 0.6555475414830407, 0.5373294473097074 ], ... ],
    	[ [ -1.5576860462977773, -1.78077827865731, -1.284745738316725 ], [ 0.6389249185571928, 0.03657733540070203, -1.279555676568187 ], [ 0.7643258201403507, 1.9353140578858476, 0.08702077808608212 ], [ 1.9145195090582452, 0.8553465982264328, 1.9025195090582452 ], [ -0.039369241989005765, -0.6087376032315787, -1.4112036896795317 ], [ -1.8989249185571926, 0.20606243844187136, -0.5860380331864325 ], [ 0.9296343851803509, -0.7458434635221051, 1.253524016974035 ], [ -1.7121602678308767, 1.7988633608617544, 0.09533032805614028 ], ... ],
    	[ [ -1.882253485120467, 1.8930245094390639, -0.8121231153908769 ], [ 0.8745828282465495, -0.6499683404058478, 1.2456860462977772 ], [ 0.98821083983462, 1.362200062510175, -0.681245245851696 ], [ -0.11277827865730995, 1.935033525270643, -0.4771348776453799 ], [ -0.21441266383766086, -1.5467384839780116, -0.06987600386269016 ], [ 0.39353215205918113, -0.5951212497143861, -0.7815077468037424 ], [ -1.833446189108304, -1.5265421528208183, 0.5486388930961406 ], [ -0.23443706909309964, -1.5678443442685381, -1.8188470906914618 ], ... ],
    	[ [ -1.616588321092397, 0.3005593037375437, -0.33248961095695895 ], [ -0.7331122338828069, -0.17006243844187138, -1.6746606561122805 ], [ -1.494543914313684, 1.0639837298297075, -0.9659991192535671 ], [ -0.6802625009520463, -0.4075864554159062, -1.3428325820140352 ], [ -0.36940727517543837, 0.22239815516023415, -0.23566604477450306 ], [ 1.734689673467134, -0.21224161868233923, 1.637266128121403 ], [ -0.5370181358467834, 0.14258282824654955, -1.3365918440781288 ], [ -1.2904498162776605, -1.5490326445242102, -1.876419918176374 ], ... ],
    	[ [ -1.6657185866383624, -1.8329159027256137, 0.775766516402807 ], [ 0.8529937305913445, 0.1467466190631578, 1.3930245094390639 ], [ -1.7125883210923971, -1.0756706568739178, 1.9208317012676022 ], [ -1.8522462307817538, 0.41649137244982476, -0.3064968652956723 ], [ 0.686044406778713, -0.47271495946900566, 0.22439727441380125 ], [ 0.24908145503508766, -1.299678791959064, 0.5086307580109942 ], [ -0.8207991609270171, 1.9097737707415203, 1.1572760246994152 ], [ -0.8540217630161401, 1.7759285457265495, -1.5862796518687718 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.11 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.2662877869539182, -1.5091186074750873, 1.0471601636472516 ], [ 1.986054303356725, 1.6001167417985966, 0.33382081975953215 ], [ -0.9636706568739177, -0.7031729108318124, -0.8969873569990642 ], [ 0.9705276441433915, -0.5727765171644441, -1.408987356999064 ], [ 0.19569506212935645, -1.8393982593438594, -1.4843502253957894 ], [ -0.5548860046243272, -1.4410181358467833, 0.9436027255861987 ], [ 1.5207294681464325, -1.1468018031663156, -0.0600534226102922 ], [ 1.796946681573333, -1.6608462099450287, -1.5250425411022221 ], ... ],
    	[ [ 1.2061466398998828, -0.9691575214079529, 1.1840470490180117 ], [ -1.676045287525146, 1.9442073168488885, -0.31477915940374285 ], [ 0.6326470281812868, 1.1943574797345027, -0.24249049170339185 ], [ 1.7859602053207015, -0.20263801234970757, 0.5567294681464323 ], [ -0.0855420486371932, 1.65682180468959, -0.5183737499047953 ], [ -0.8951990775801171, 0.9171203689679532, 0.0669022747946198 ], [ 0.11632758163321652, 1.605579201077193, 0.45551852412818733 ], [ 0.6963258201403506, 1.2294154102605845, -1.8328154310973097 ], ... ],
    	[ [ -0.0879854913225733, -0.6788308205211693, 0.7666832998748536 ], [ -0.09884071709918132, 1.505079693542222, -1.561989222675555 ], [ -1.1903719884119295, -0.775133996898947, -0.5879076634568423 ], [ 0.8408769887927481, 1.6482073168488882, 0.7210407796093563 ], [ 0.1794389347695905, 0.6659846105761404, 0.39663075801099423 ], [ 0.3379927456612867, -1.8449385464881864, -0.27089590120233936 ], [ 0.23367967270549683, -0.6395620501604675, -1.3619131563026898 ], [ 1.1057267217235087, 1.7798733616233915, -1.8229946113377775 ], ... ],
    	[ [ 0.6048851238778944, -0.4160633191883043, 0.7938190582666662 ], [ -1.9326045912626895, -0.12164075877263135, 0.6629185449649123 ], [ 1.7530262709319298, 1.328411783091228, -0.13451313546596483 ], [ -1.7223086692236254, -0.48656831956912283, -0.24888051177847975 ], [ -0.8043275816332166, 0.3315864554159062, 1.6706525210271341 ], [ 0.3369728483216375, 1.5972516194439765, -0.5516271308416376 ], [ -1.063510389043041, 1.3572289756814035, -1.1821855538327481 ], [ -1.511688688537076, 1.1050814550350878, -0.1920389139328655 ], ... ],
    	[ [ 1.9614606977857305, 0.03018555383274832, 1.0593221929709942 ], [ -0.4693393438877195, 1.2303574797345027, -0.8295013732114618 ], [ 0.977733095315789, -1.461165656493099, 0.3764199181763742 ], [ -0.5889647132364911, 0.7811203689679532, 1.0669702060823387 ], [ -0.6108407170991813, 1.6540751856264322, 0.09965790968935678 ], [ -1.1403095499700582, -0.903133996898947, 1.7217656356563742 ], [ -0.3201393855611695, 0.3414171717534505, 1.7654398155160234 ], [ 1.1740833207115784, 0.5566905542135668, -0.3397728899950874 ], ... ],
    	[ [ -0.4139764754909941, 1.139164775746666, 1.5614624592785964 ], [ -1.034277890375906, -0.38016202932374243, -0.18464526668842093 ], [ 0.8534543241934501, -0.08362536934877174, -1.3265828282465497 ], [ 1.3605203898046783, -1.057034406017076, 0.19139188575157873 ], [ 0.15166428328163722, -1.8415710659920468, 0.46052038980467813 ], [ 0.7066624176051464, -1.6031258618138007, 0.16120633191883044 ], [ -0.7315864554159062, 1.9856407587726315, 1.0963113114629242 ], [ -0.748522151297544, 1.6782180941733333, 1.8401765380011692 ], ... ],
    	[ [ -0.24476376997988325, -1.4811964353408182, 1.6631828074098245 ], [ -0.0068860046243272854, -0.9572353492736838, -1.0451267425602337 ], [ 0.8309493238126315, 1.5999819683368415, 0.9748308205211693 ], [ -0.5497493654860816, 0.01704891469450262, -0.4050489146945026 ], [ 1.7259747139981283, 1.7008235661824558, -1.8093276858168417 ], [ -1.7249095291333332, 1.671631742941052, 0.6286081142484212 ], [ 0.19360184483976584, 0.48489502045590643, -1.5636416395190644 ], [ 1.322744857570292, -1.8624579513628068, -1.6406226229258478 ], ... ],
    	[ [ 1.1040923365431579, 1.032077827865731, -1.2889484430661986 ], [ 1.228979221913918, 0.5064434426853801, -0.32716829873239783 ], [ 0.942355718241637, 0.650433546107368, 0.28902803242479547 ], [ -1.122076947119298, -1.1477049587073684, 1.3458597336923976 ], [ -1.494067050541286, 1.662978341167485, -0.022709466623158134 ], [ 0.6131122338828069, 0.18198461057614038, 0.6478814967085378 ], [ 0.3317212288776611, 0.10090315554105271, -0.8729954920842105 ], [ 0.11131581937871357, -1.9651186074750873, -0.9756706568739177 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 39.05 seconds (1.620 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3857305661241
Reset training subject: 3857927291705
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=2.6674491644859257}, derivative=-4.939720674973957E-7}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=2.6674491644859257}, derivative=-4.939720674973957E-7}, evalInputDelta = 0.0
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=2.6674491644859257}, derivative=-4.939720674973957E-7}, evalInputDelta = 0.0
New Minimum: 2.6674491644859257 > 2.6674491644859235
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=2.6674491644859235}, derivative=-4.939720674973956E-7}, evalInputDelta = -2.220446049250313E-15
New Minimum: 2.6674491644859235 > 2.6674491644859093
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=2.6674491644859093}, derivative=-4.939720674973945E-7}, evalInputDelta = -1.6431300764452317E-14
New Minimum: 2.6674491644859093 > 2.667449164485807
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=2.667449164485807}, derivative=-4.939720674973855E-7}, evalInputDelta = -1.1857181902996672E-13
New Minimum: 2.667449164485807 > 2.667449164485096
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=2.667449164485096}, derivative=-4.939720674973215E-7}, evalInputDelta = -8.29558643999917E-13
New Minimum: 2.667449164485096 > 2.6674491644801144
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=2.6674491644801144}, derivative=-4.939720674968581E-7}, evalInputDelta = -5.811351400097919E-12
New Minimum: 2.6674491644801144 > 2.667449164445245
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=2.667449164445245}, derivative=-4.939720674936258E-7}, evalInputDelta = -4.0680792068314986E-11
New Minimum: 2.667449164445245 > 2.667449164201161
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=2.667449164201161}, derivative=-4.939720674710284E-7}, evalInputDelta = -2.847646562997852E-10
New Minimum: 2.667449164201161 > 2.667449162492571
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=2.667449162492571}, derivative=-4.939720673128246E-7}, evalInputDelta = -1.9933548145445457E-9
New Minimum: 2.667449162492571 > 2.6674491505324376
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=2.6674491505324376}, derivative=-4.939720662054023E-7}, evalInputDelta = -1.3953488142703918E-8
New Minimum: 2.6674491505324376 > 2.6674490668115087
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=2.6674490668115087}, derivative=-4.939720584534615E-7}, evalInputDelta = -9.767441699892743E-8
New Minimum: 2.6674490668115087 > 2.6674484807650445
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=2.6674484807650445}, derivative=-4.939720041898991E-7}, evalInputDelta = -6.837208812449092E-7
New Minimum: 2.6674484807650445 > 2.6674443784415933
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=2.6674443784415933}, derivative=-4.93971624344952E-7}, evalInputDelta = -4.786044332405481E-6
New Minimum: 2.6674443784415933 > 2.6674156622657685
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=2.6674156622657685}, derivative=-4.939689654302288E-7}, evalInputDelta = -3.3502220157188844E-5
New Minimum: 2.6674156622657685 > 2.6672146533630015
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=2.6672146533630015}, derivative=-4.939503530272532E-7}, evalInputDelta = -2.3451112292427467E-4
New Minimum: 2.6672146533630015 > 2.665807803116097
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=2.665807803116097}, derivative=-4.938200662064464E-7}, evalInputDelta = -0.001641361369828509
New Minimum: 2.665807803116097 > 2.655970242938656
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=2.655970242938656}, derivative=-4.929080584607464E-7}, evalInputDelta = -0.011478921547269927
New Minimum: 2.655970242938656 > 2.5876165076901048
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=2.5876165076901048}, derivative=-4.865240042409054E-7}, evalInputDelta = -0.07983265679582097
New Minimum: 2.5876165076901048 > 2.1340904746335974
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=2.1340904746335974}, derivative=-4.4183562470197887E-7}, evalInputDelta = -0.5333586898523284
New Minimum: 2.1340904746335974 > 0.1819638137222239
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.1819638137222239}, derivative=-1.290169679294858E-7}, evalInputDelta = -2.485485350763702
F(5.58545864083284E7) = LineSearchPoint{point=PointSample{avg=46.42230014106737}, derivative=2.060713629477939E-6}, evalInputDelta = 43.754850976581444
F(4296506.646794492) = LineSearchPoint{point=PointSample{avg=0.9672573576727649}, derivative=-2.9745778311467563E-7}, evalInputDelta = -1.700191806813161
F(3.007554652756145E7) = LineSearchPoint{point=PointSample{avg=8.49693002664256}, derivative=8.816279231816451E-7}, evalInputDelta = 5.829480862156633
F(2313503.579043188) = LineSearchPoint{point=PointSample{avg=1.6470451530667163}, derivative=-3.881566835990096E-7}, evalInputDelta = -1.0204040114192094
F(1.6194525053302318E7) = LineSearchPoint{point=PointSample{avg=0.6655107412531702}, derivative=2.467356197913197E-7}, evalInputDelta = -2.0019384232327555
0.6655107412531702 <= 2.6674491644859257
New Minimum: 0.1819638137222239 > 2.524368355386554E-30
F(1.0799999999999989E7) = LineSearchPoint{point=PointSample{avg=2.524368355386554E-30}, derivative=-4.780541094062495E-22}, evalInputDelta = -2.6674491644859257
Left bracket at 1.0799999999999989E7
Converged to left
Fitness changed from 2.6674491644859257 to 2.524368355386554E-30
Iteration 1 complete. Error: 2.524368355386554E-30 Total: 39.0465; Orientation: 0.8433; Line Search: 36.1607
Final threshold in iteration 1: 2.524368355386554E-30 (> 0.0) after 39.047s (< 30.000s)

Returns

    2.524368355386554E-30

Training Converged

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 361.47 seconds (6.590 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 3896566745541
Reset training subject: 3897189044237
Adding measurement 5c955c2e to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 2.6674491644859257 < 2.6674491644859257. Total: 1
th(0)=2.6674491644859257;dx=-4.939720674973957E-7
Adding measurement 12423a52 to history. Total: 1
New Minimum: 2.6674491644859257 > 2.6674481002554744
WOLFE (weak): th(2.154434690031884)=2.6674481002554744; dx=-4.939719689575284E-7 evalInputDelta=1.0642304513197587E-6
Adding measurement 71d4ea09 to history. Total: 2
New Minimum: 2.6674481002554744 > 2.6674470360252345
WOLFE (weak): th(4.308869380063768)=2.6674470360252345; dx=-4.939718704176601E-7 evalInputDelta=2.1284606912530535E-6
Adding measurement 6f79f075 to history. Total: 3
New Minimum: 2.6674470360252345 > 2.6674427791063984
WOLFE (weak): th(12.926608140191302)=2.6674427791063984; dx=-4.939714762581909E-7 evalInputDelta=6.385379527351631E-6
Adding measurement 5e22f4e1 to history. Total: 4
New Minimum: 2.6674427791063984 > 2.667423623013673
WOLFE (weak): th(51.70643256076521)=2.667423623013673; dx=-4.939697025406018E-7 evalInputDelta=2.5541472252754716E-5
Adding measurement 265d4926 to history. Total: 5
New Minimum: 2.667423623013673 > 2.6673214583474945
WOLFE (weak): th(258.53216280382605)=2.6673214583474945; dx=-4.939602427134131E-7 evalInputDelta=1.2770613843127165E-4
Adding measurement 23361da5 to history. Total: 6
New Minimum: 2.6673214583474945 > 2.666682973511643
WOLFE (weak): th(1551.1929768229563)=2.666682973511643; dx=-4.939011187935237E-7 evalInputDelta=7.661909742826545E-4
Adding measurement 28edea21 to history. Total: 7
New Minimum: 2.666682973511643 > 2.662088138823699
WOLFE (weak): th(10858.350837760694)=2.662088138823699; dx=-4.934754265702667E-7 evalInputDelta=0.005361025662226737
Adding measurement 473a6b27 to history. Total: 8
New Minimum: 2.662088138823699 > 2.6247119548280673
WOLFE (weak): th(86866.80670208555)=2.6247119548280673; dx=-4.899989400803986E-7 evalInputDelta=0.04273720965785843
Adding measurement 7e2d9bda to history. Total: 9
New Minimum: 2.6247119548280673 > 2.2952390616532505
WOLFE (weak): th(781801.26031877)=2.2952390616532505; dx=-4.5821392074445784E-7 evalInputDelta=0.3722101028326752
Adding measurement 29076994 to history. Total: 10
New Minimum: 2.2952390616532505 > 0.20335752507414168
END: th(7818012.6031877)=0.20335752507414168; dx=-1.3639059996801296E-7 evalInputDelta=2.464091639411784
Fitness changed from 2.6674491644859257 to 0.20335752507414168
Iteration 1 complete. Error: 0.20335752507414168 Total: 22.2841; Orientation: 0.9447; Line Search: 19.4814
Non-optimal measurement 0.20335752507414168 < 0.20335752507414168. Total: 11
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00, 6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00, 7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00, cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699, 2.666682973511643, 2.6673214583474945, 2.667423623013673, 2.6674427791063984, 2.6674470360252345, 2.6674481002554744, 2.6674491644859257
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00, cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00, d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00, 7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699, 2.666682973511643, 2.6673214583474945, 2.667423623013673, 2.6674427791063984, 2.6674470360252345, 2.6674481002554744
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00, d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00, 7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00, 6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699, 2.666682973511643, 2.6673214583474945, 2.667423623013673, 2.6674427791063984, 2.6674470360252345
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00, 6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00, 7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00, cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699, 2.666682973511643, 2.6673214583474945, 2.667423623013673, 2.6674427791063984
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00, d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00, 6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00, 7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699, 2.666682973511643, 2.6673214583474945, 2.667423623013673
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00, cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00, d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00, 7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699, 2.666682973511643, 2.6673214583474945
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00, d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00, cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00, 6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699, 2.666682973511643
Rejected: LBFGS Orientation magnitude: 2.096e+03, gradient 1.941e-04, dot -1.000; [6f8b46a1-b380-42f7-b588-1e4bc6a9aa03 = 1.000/1.000e+00, cbb98e3f-ddfa-4d43-bf3e-df0ab9b5396a = 1.000/1.000e+00, d1b730f5-4c35-4ba4-b226-702442e08b9a = 1.000/1.000e+00, 7758d60e-fdd5-4f30-91c7-4b7e26c71ea4 = 1.000/1.000e+00, d2595249-8c35-4704-b113-1b528485afb0 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.20335752507414168, 2.2952390616532505, 2.6247119548280673, 2.662088138823699
LBFGS Accumulation History: 3 points
Removed measurement 29076994 to history. Total: 10
Removed measurement 7e2d9bda to history. Total: 9
Removed measurement 473a6b27 to history. Total: 8
Removed measurement 28edea21 to history. Total: 7
Removed measurement 23361da5 to history. Total: 6
Removed measurement 265d4926 to history. Total: 5
Removed measurement 5e22f4e1 to history. Total: 4
Removed measurement 6f79f075 to history. Total: 3
Adding measurement f9cf48a to history. Total: 3
th(0)=0.20335752507414168;dx=-3.7658800939655705E-8
Adding measurement 285d7eb9 to history. Total: 4
New Minimum: 0.20335752507414168 > 0.06367589625352194
WOLF (strong): th(1.684339755941405E7)=0.06367589625352194; dx=2.107288015640517E-8 evalInputDelta=0.13968162882061974
Adding measurement 45b1a425 to history. Total: 5
New Minimum: 0.06367589625352194 > 0.00986157890962193
END: th(8421698.779707026)=0.00986157890962193; dx=-8.292960391625336E-9 evalInputDelta=0.19349594616451976
Fitness changed from 0.20335752507414168 to 0.00986157890962193
Iteration 2 complete. Error: 0.00986157890962193 Total: 339.1818; Orientation: 333.7313; Line Search: 4.8312
Final threshold in iteration 2: 0.00986157890962193 (> 0.0) after 361.467s (< 30.000s)

Returns

    0.00986157890962193

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.16927266980847921, -1.8020895262090633, -0.17540496912573103 ], [ 0.6607464106959078, -0.19948757718567237, 1.6796620175043842 ], [ -0.6307243920350849, 1.6515840215438575, -1.7069369933625709 ], [ 1.6666203974210494, 0.8363732053479539, -1.2908589974020441 ], [ -1.531128208135962, -0.6838041731271938, 0.2471495946900566 ], [ -1.8471449825906419, 1.6872108161959036, 0.7653385034137409 ], [ -0.8829399315190065, -0.275232202749998, -0.4507625530438589 ], [ -0.49550896373976666, -1.5123396564385945, -0.9191571449459071 ], ... ],
    	[ [ 0.48576370606871255, -0.7618018670774865, 0.8750103772236828 ], [ 0.647530350293861, 1.261642936999997, 1.7736123262470735 ], [ 0.16889485236111074, 1.1280184483976585, -1.1806562523801158 ], [ 0.11084977320321482, 0.9278561704342116, -1.6771565128391785 ], [ -0.5488306926988278, -1.3375557168406413, -0.27243442685380087 ], [ -1.7807296362412282, -1.396595030874269, -0.7916100201973664 ], [ -1.0165736443201747, -1.8077307892660817, 0.9688306926988279 ], [ 1.6690692926798225, -1.4363778174473685, 1.3743205759342074 ], ... ],
    	[ [ -0.24427382283333288, 0.04078624564766049, -1.093624488602339 ], [ 1.1894592724824562, 1.7109491557178358, 1.5854424980277764 ], [ 0.36072963624122806, -0.7322264376257297, -0.87656903222076 ], [ 1.4992749758581865, -3.7781744736849546E-4, 1.7944413450029228 ], [ -1.3327174738859628, 1.7610554563815788, -1.6713835825716368 ], [ 0.878996540925439, -1.7500283047032161, 1.7320704457046763 ], [ -1.28513806444152, -1.7857284832163745, -0.3734164993742676 ], [ 0.01791508589035136, 0.2336886482646217, -0.6065101167646203 ], ... ],
    	[ [ 0.025950308742689485, 1.3879924497441496, 1.0410080711739755 ], [ -1.4421582979707606, -0.21817213426900456, -0.9378417020292392 ], [ -0.9294823329795292, -0.601076842935673, 1.6461033625073072 ], [ 0.3107059436374265, -1.5725874806184184, 0.13947080273099272 ], [ 0.3180895262090633, -0.4378538643845043, 1.077742319514618 ], [ 1.532270884676897, 1.2662102952777785, -1.1417423195146181 ], [ -0.7053905007207588, -0.1348237745497059, -0.3150439261330422 ], [ 0.3836267946520461, 0.5901721342690045, 0.460309045685671 ], ... ],
    	[ [ -1.3985377893611082, -1.7848642416081872, -1.2046608644795307 ], [ 0.5702807409824548, 0.12360834625438766, -1.2352229785511688 ], [ 0.7030363758771918, 1.8687128617865483, 0.16587986303801328 ], [ 1.8017469316140329, 0.872916238915205, 1.7897469316140326 ], [ -0.015057762431286037, -0.5076100201973663, -1.260023060497073 ], [ -1.8302807409824546, 0.21689024026169612, -0.5337377074152035 ], [ 0.9382149073771932, -0.6925216470131569, 1.1775251060877179 ], [ -1.524001673942979, 1.7718960053859645, 0.07081455035087672 ], ... ],
    	[ [ -1.68408428200292, 1.7304031839941487, -0.7287694711929807 ], [ 0.8251426765409348, -0.6183021275365491, 1.086537789361108 ], [ 1.0313177489663752, 1.226750390688594, -0.6877827865730997 ], [ -0.11686424160818722, 1.8459595329415186, -0.39909298528362425 ], [ -0.22707914898538037, -1.4981155248625722, -0.1007250241418135 ], [ 0.338575950369882, -0.632507810714913, -0.6634234175233895 ], [ -1.757038681926899, -1.3718884551301138, 0.5939930818508781 ], [ -0.31023168183187283, -1.5670271516783627, -1.749794316821636 ], ... ],
    	[ [ -1.4306770068274817, 0.24049564835964793, -0.2930600684809934 ], [ -0.6969514617675432, -0.18089024026169614, -1.6256291007017534 ], [ -1.4448994644605253, 1.0218983114356717, -0.9134944953347943 ], [ -0.5556406309502898, -0.34241534634941395, -1.3367036375877193 ], [ -0.2927954698464897, 0.2979884697514635, -0.2759127798406441 ], [ 1.5598104591695872, -0.2345101167646203, 1.4969133007587692 ], [ -0.4583633490423961, 0.09314267654093475, -1.3606990254883047 ], [ -1.1983113517353785, -1.4074540282763133, -1.8576244886023388 ], ... ],
    	[ [ -1.5907411664897646, -1.6907243920350852, 0.7745407275175439 ], [ 0.7112108161959035, 0.11916636914473629, 1.2304031839941487 ], [ -1.5266770068274818, -0.9269416054619855, 1.8621981329225135 ], [ -1.6855389423859615, 0.4820710778114048, -0.23560540809795183 ], [ 0.5497775423669564, -0.4134684966812854, 0.24748296508625778 ], [ 0.23375909396929795, -1.1719924497441494, 0.532942237568714 ], [ -0.6779947557938568, 1.8770860671345022, 1.1429751543713447 ], [ -0.7596360188508753, 1.691553410790934, -1.556247824179824 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.11 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 1.2572986684619882, -1.3889912967192959, 1.0847510227953223 ], [ 1.975839395979532, 1.6007296362412282, 0.3223801234970758 ], [ -0.8149416054619855, -0.5728306926988278, -0.8391709812441509 ], [ 0.878797775896197, -0.4718532322777758, -1.3511709812441508 ], [ 0.1100941383084778, -1.68923912089912, -1.4861889087236841 ], [ -0.4860375289020453, -1.362363349042396, 0.9205170349137421 ], [ 1.3985591759152023, -1.1615112697894738, 0.0026661086856737404 ], [ 1.6339167598333302, -1.53928881215643, -1.5095158818888885 ], ... ],
    	[ [ 1.1334164993742675, -0.8492345087997052, 1.2052940563625734 ], [ -1.5922830470321618, 1.777295730305552, -0.3713697462733929 ], [ 0.6990439261330423, 1.1647342483406427, -0.2555655731461991 ], [ 1.733251283254385, -0.19548757718567236, 0.4345591759152022 ], [ -0.15663780398245752, 1.4721362793099377, -0.530835936904971 ], [ -0.9329942348757317, 0.9020023060497073, 0.040139217466373736 ], [ 0.1600473852076032, 1.5718700067324556, 0.5159907758011708 ], [ 0.6350363758771917, 1.173846314128654, -1.7320964443581852 ], ... ],
    	[ [ -0.15090932076608315, -0.5676926282573078, 0.6757706242178345 ], [ -0.11375448186988332, 1.3847480846388864, -1.3834326417222187 ], [ -1.0978249275745595, -0.6445874806184185, -0.6504228966052644 ], [ 0.6984811799546755, 1.4812957303055523, 0.6005048725584771 ], [ 0.13449334230994062, 0.6764038161008774, 0.42094223756871396 ], [ 0.3694546603830416, -1.6608659155511658, -0.328099382514621 ], [ 0.15849795440935527, -0.5112628135029214, -1.2879572268918114 ], [ 1.051792010771929, 1.6532085101461964, -1.7337163208611095 ], ... ],
    	[ [ 0.4835320242368397, -0.47939574492690185, 0.6773691141666642 ], [ -1.7887786953918101, -0.04625474232894583, 0.6782409060307021 ], [ 1.6954141933245603, 1.2885736443201747, -0.10570709666228015 ], [ -1.5664291826476577, -0.5000519973070178, -0.3165031986154984 ], [ -0.8480473852076033, 0.2664153463494139, 1.6005782564195892 ], [ 0.34208030201023404, 1.5198226215248523, -0.5916695677602348 ], [ -1.1029399315190067, 1.3216810980087714, -1.109659711454677 ], [ -1.510054303356725, 1.089759093969298, -0.19224321208040937 ], ... ],
    	[ [ 1.8221293611608158, -0.04234028854532301, 1.0137637060687126 ], [ -0.5183708992982465, 1.2007342483406427, -0.7953835825716367 ], [ 0.8398318457236815, -1.3622853530818695, 0.3576244886023388 ], [ -0.5730294577280698, 0.7660023060497073, 0.9145637880146169 ], [ -0.6257544818698833, 1.4969699101652014, 0.11886193555847992 ], [ -1.0369346873128633, -0.7725874806184185, 1.668035222852338 ], [ -0.27887115975730914, 0.3908573234590653, 1.7729988469751463 ], [ 1.0380207544473656, 0.4343159638347929, -0.2545805624692965 ], ... ],
    	[ [ -0.4033529718187132, 0.9877798484166636, 1.5271403704912274 ], [ -0.8992368148494125, -0.2970126832733902, -0.14603291680263084 ], [ 0.7980895262090633, -0.01865855842982328, -1.2771426765409348 ], [ 1.3002524362792387, -1.0204650376067246, 0.125199285947367 ], [ 0.08690177051023262, -1.7868191624502914, 0.4002524362792385 ], [ 0.7626401100321648, -1.4515366363362543, 0.1675395744926902 ], [ -0.6664153463494139, 1.910254742328946, 1.097945696643275 ], [ -0.79326344560965, 1.6898630885833335, 1.6941033625073072 ], ... ],
    	[ [ -0.3117735623742703, -1.3614777208801145, 1.6588925463114035 ], [ 0.06196247109795455, -0.8377209329605239, -0.9460421410014601 ], [ 0.8254332738289473, 1.4528873021052602, 0.8636926282573079 ], [ -0.4539335342880097, -0.08244428315935864, -0.30555571684064126 ], [ 1.6103419624883017, 1.6211472886403493, -1.6272980363552596 ], [ -1.6666845570833322, 1.522698393381576, 0.6948007140526329 ], [ 0.11801153024853644, 0.48959387784941527, -1.5407602469941517 ], [ 1.1901553598143249, -1.7913621960175423, -1.6438913932865498 ], ... ],
    	[ [ 1.0415771033947356, 1.0324864241608187, -1.2309277691637415 ], [ 1.150120136961987, 0.4982715167836256, -0.38580186707748654 ], [ 0.8077232390102312, 0.51620966317105, 0.3364252026549717 ], [ -1.069980919495613, -1.1881559919210534, 1.3346233355774852 ], [ -1.3159190658830373, 1.531614632296781, -0.09993416639473834 ], [ 0.5769514617675431, 0.1924038161008774, 0.5422593544283605 ], [ 0.41425768048538175, 0.12664472213157946, -0.8362218255263151 ], [ 0.1497238711169598, -1.8449912967192958, -0.8269416054619855 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.18 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[1.0, -29.597847272629387], [3.0, -0.6917397522600905]; valueStats=DoubleSummaryStatistics{count=6, sum=0.426691, min=0.000000, average=0.071115, max=0.203358}
Plotting 3 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.01 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[0.0, -29.597847272629387], [339.181, -0.6917397522600905]; valueStats=DoubleSummaryStatistics{count=6, sum=0.426691, min=0.000000, average=0.071115, max=0.203358}
Plotting 3 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 0.00986157890962193 }, "CjGD": { "type": "Converged", "value": 2.524368355386554E-30 }, "GD": { "type": "NonConverged", "value": 2.524564200863212E-4 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 0.00986157890962193 }, "CjGD": { "type": "Converged", "value": 2.524368355386554E-30 }, "GD": { "type": "NonConverged", "value": 2.524564200863212E-4 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "436.841",
      "gc_time": "11.783"
    },
    "created_on": 1586738453894,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "OneThird",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.ImgTileCycleLayerTest.OneThird",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/ImgTileCycleLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 0.00986157890962193
        },
        "CjGD": {
          "type": "Converged",
          "value": 2.524368355386554E-30
        },
        "GD": {
          "type": "NonConverged",
          "value": 2.524564200863212E-4
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/ImgTileCycleLayer/OneThird/trainingTest/202004134053",
    "id": "392d7f14-af45-45cc-806d-ffb1c300e54d",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "ImgTileCycleLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.ImgTileCycleLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/ImgTileCycleLayer.java",
      "javaDoc": ""
    }
  }