1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 6320739872520542208

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.02 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.92, 1.32, 1.712 ], [ -1.704, -0.44, -0.444 ], [ 0.148, -1.776, 1.92 ], [ -1.004, 0.68, 1.784 ], [ 1.18, -1.72, 0.612 ], [ -0.404, 1.22, 1.32 ], [ 1.268, -0.2, 0.504 ], [ 1.46, 1.064, 1.964 ], ... ],
    	[ [ -0.596, 0.644, -0.1 ], [ 1.36, 1.192, -1.148 ], [ -1.276, 0.56, -0.764 ], [ -0.44, -1.756, 0.412 ], [ 0.904, -0.82, -0.572 ], [ -1.072, 0.732, 1.864 ], [ -0.828, -1.508, 1.76 ], [ 1.008, 0.576, 0.02 ], ... ],
    	[ [ -0.7, 1.884, -0.22 ], [ 1.664, -1.22, 1.536 ], [ 0.168, -0.164, 1.472 ], [ -1.32, -0.456, 1.456 ], [ -1.692, 0.376, 1.848 ], [ -0.968, 1.452, 0.78 ], [ -0.1, 1.468, -1.496 ], [ 0.2, 0.42, -0.016 ], ... ],
    	[ [ 1.652, -0.156, 1.104 ], [ 1.168, -1.652, -1.628 ], [ 1.196, -2.0, -1.572 ], [ -0.432, -0.82, -0.304 ], [ 0.712, -1.512, 1.18 ], [ 0.532, 0.396, -0.288 ], [ -0.368, -0.088, 1.368 ], [ -1.68, 0.348, 1.796 ], ... ],
    	[ [ -1.512, -0.28, 1.924 ], [ -0.776, -1.04, 0.376 ], [ -1.176, -0.4, 0.048 ], [ -0.74, 0.456, -1.74 ], [ 1.924, -0.772, 0.304 ], [ 0.856, 0.26, -0.144 ], [ -0.344, -0.756, 0.596 ], [ -0.288, -0.824, -1.164 ], ... ],
    	[ [ -0.276, 1.136, -1.412 ], [ 0.236, 1.44, -1.292 ], [ 0.716, 0.664, -0.844 ], [ 0.4, 0.74, -1.956 ], [ -1.728, 0.584, -1.268 ], [ -1.46, -1.156, 1.904 ], [ 0.68, 1.456, 0.684 ], [ 0.588, 1.844, 0.632 ], ... ],
    	[ [ 0.316, 1.132, 0.604 ], [ -1.648, -1.004, 0.492 ], [ -0.652, -1.632, 0.156 ], [ -1.392, 1.708, -1.656 ], [ -0.272, -0.836, -1.928 ], [ 1.436, -1.344, -1.98 ], [ -0.46, -0.688, -0.608 ], [ 0.924, 1.38, 0.452 ], ... ],
    	[ [ 1.208, 1.224, 1.196 ], [ 1.408, -1.572, 1.672 ], [ 1.764, 1.14, -0.992 ], [ 1.2, 0.392, 1.372 ], [ 1.68, 1.156, 0.444 ], [ -1.656, -1.852, -1.012 ], [ 0.208, -1.664, 0.348 ], [ -1.0, -0.272, -0.512 ], ... ],
    	...
    ]
    [
    	[ [ -1.56, 1.452, 0.624 ], [ 1.2, 0.248, 0.048 ], [ 1.896, 1.468, 1.36 ], [ 1.204, -1.1, -0.948 ], [ 0.98, -1.032, -0.228 ], [ 1.16, 0.5, 0.268 ], [ 0.264, 0.632, -0.156 ], [ 0.18, -1.04, 1.344 ], ... ],
    	[ [ -1.544, -0.192, 0.532 ], [ -0.044, -0.52, 0.664 ], [ -1.212, -1.56, -0.884 ], [ -0.34, 1.916, 0.492 ], [ 0.992, 0.072, -0.384 ], [ -0.224, -0.248, -0.276 ], [ -1.736, 1.044, -1.504 ], [ 1.212, 1.676, 0.604 ], ... ],
    	[ [ 1.232, -1.916, 1.24 ], [ 0.188, 0.944, -1.02 ], [ 1.664, -1.612, -0.408 ], [ -1.112, 1.072, 1.684 ], [ 1.916, -0.916, 1.12 ], [ 0.308, 1.4, 1.884 ], [ 0.124, 1.208, 1.084 ], [ -1.98, 1.068, 1.264 ], ... ],
    	[ [ -0.228, -1.152, 1.148 ], [ -0.024, -0.004, 1.612 ], [ 0.824, 0.796, 1.844 ], [ 1.764, -0.808, 0.34 ], [ -1.904, -1.352, 0.764 ], [ 1.084, -0.792, -0.704 ], [ -0.652, -0.3, -1.22 ], [ 0.644, -1.976, -1.8 ], ... ],
    	[ [ 0.476, -0.808, -1.76 ], [ 1.072, 0.444, 1.876 ], [ -0.508, -1.236, -0.244 ], [ 0.432, 0.248, 1.204 ], [ -0.836, 0.116, -1.58 ], [ -0.852, -1.592, 1.188 ], [ -0.288, -1.912, -0.68 ], [ -0.728, 0.012, 0.288 ], ... ],
    	[ [ 0.456, -0.484, -0.896 ], [ 1.856, -1.344, -1.884 ], [ 0.476, -0.976, 0.74 ], [ 1.728, -0.104, 1.608 ], [ -0.952, -1.82, -1.392 ], [ 0.296, 1.38, 1.728 ], [ 0.856, -0.676, 0.72 ], [ 1.156, -1.82, -0.616 ], ... ],
    	[ [ 0.604, -1.232, -0.412 ], [ -1.456, 1.964, 0.668 ], [ -0.284, -0.26, 0.284 ], [ -0.116, -1.092, 0.384 ], [ 0.88, -0.336, -1.644 ], [ -0.92, 1.216, 1.524 ], [ 1.48, 1.316, -0.128 ], [ -1.412, 1.956, 1.36 ], ... ],
    	[ [ 1.336, 0.468, 0.772 ], [ 0.592, 1.076, -0.36 ], [ 0.904, 0.34, -1.284 ], [ -0.66, -1.344, 1.548 ], [ 1.576, -1.152, -1.108 ], [ -0.5, 1.232, 0.028 ], [ -1.084, 0.18, -1.232 ], [ -1.6, -0.372, 1.992 ], ... ],
    	...
    ]
    [
    	[ [ -0.928, -0.2, 0.728 ], [ 1.092, -0.128, -1.888 ], [ -1.484, -1.132, -1.116 ], [ -1.684, -0.608, -1.372 ], [ 1.948, -0.348, -1.672 ], [ 1.26, -0.516, 0.772 ], [ 0.256, 1.064, 1.52 ], [ 0.264, -1.268, -1.5 ], ... ],
    	[ [ 1.5, -1.976, 1.764 ], [ 1.36, -1.016, 1.84 ], [ 0.868, 0.84, 1.604 ], [ 0.992, 1.592, 0.24 ], [ 1.0, 0.928, -1.236 ], [ -1.776, 1.532, 1.716 ], [ -0.784, -1.724, 1.808 ], [ 1.192, 0.304, 1.22 ], ... ],
    	[ [ -0.44, -1.908, -0.96 ], [ -1.816, 0.376, -1.736 ], [ -0.08, -0.596, -1.58 ], [ -0.016, -1.556, -1.592 ], [ -1.472, -0.824, -1.428 ], [ 1.124, -1.872, 1.268 ], [ -1.216, 0.208, -0.988 ], [ -0.8, 0.74, -1.46 ], ... ],
    	[ [ -0.436, 1.304, -1.236 ], [ -1.408, -0.996, 0.3 ], [ 0.488, 1.444, -1.788 ], [ 0.136, 1.004, -1.272 ], [ -0.156, 1.5, 0.244 ], [ 0.704, -0.38, 1.524 ], [ -1.508, -1.116, 1.036 ], [ 1.164, -1.232, 0.06 ], ... ],
    	[ [ -1.74, 0.632, 0.128 ], [ 0.072, 0.62, 1.468 ], [ -0.02, 0.144, -0.056 ], [ -1.028, 1.756, 1.248 ], [ 1.58, -0.852, -1.644 ], [ -0.08, 0.572, 0.648 ], [ -0.144, 1.336, 0.012 ], [ 0.864, 1.136, 1.976 ], ... ],
    	[ [ 0.372, 0.328, 0.048 ], [ -0.792, -0.408, -0.664 ], [ -1.0, 1.356, -0.044 ], [ -1.38, 0.604, -0.716 ], [ 1.564, 0.196, 1.652 ], [ 0.86, -1.208, 1.08 ], [ 0.444, 0.72, 1.432 ], [ 1.136, -1.512, 1.68 ], ... ],
    	[ [ -1.688, 1.0, 0.26 ], [ -0.144, 0.104, -1.1 ], [ -1.14, 0.108, 1.416 ], [ 0.612, -0.812, -1.344 ], [ 0.388, 1.0, 1.904 ], [ -1.916, -1.484, -1.872 ], [ 0.14, -0.716, -1.38 ], [ -0.08, -1.968, -0.012 ], ... ],
    	[ [ 0.688, 1.096, 1.56 ], [ 1.112, -0.964, -1.332 ], [ -0.844, -1.58, 1.912 ], [ 1.076, 0.436, -1.676 ], [ 1.296, 1.144, 0.78 ], [ -1.336, -0.704, -1.36 ], [ -1.228, 1.072, 0.552 ], [ 0.988, 1.8, 1.732 ], ... ],
    	...
    ]
    [
    	[ [ -0.196, 1.136, -0.04 ], [ 0.3, -0.04, 1.04 ], [ -0.324, -1.828, 1.844 ], [ 1.124, -1.828, 1.712 ], [ -0.148, 1.968, 0.7 ], [ 0.72, 0.296, -1.82 ], [ 1.0, -1.288, 0.16 ], [ 1.384, 1.328, -0.328 ], ... ],
    	[ [ -0.848, 1.64, -1.168 ], [ -1.556, -1.84, -1.496 ], [ -1.356, 1.028, 0.776 ], [ 1.964, 1.676, -0.5 ], [ 1.784, 0.86, 0.412 ], [ 1.588, 0.804, -1.432 ], [ -0.216, -0.596, -1.38 ], [ -1.108, 1.672, -0.892 ], ... ],
    	[ [ -1.1, -1.764, -0.788 ], [ -0.512, 0.316, -1.188 ], [ 1.448, 1.464, 1.28 ], [ -0.86, 0.684, -1.94 ], [ -0.328, 0.6, -1.78 ], [ -1.916, 1.952, -0.24 ], [ 0.136, 1.876, 1.428 ], [ 1.768, -1.424, 0.244 ], ... ],
    	[ [ 0.184, 0.648, -1.92 ], [ -0.932, -1.712, 1.856 ], [ 1.436, 0.74, -1.484 ], [ -0.916, 0.504, -1.6 ], [ -1.432, -0.584, -0.952 ], [ 1.728, -1.732, -1.156 ], [ -1.688, 1.464, 1.044 ], [ -0.828, -1.816, 1.376 ], ... ],
    	[ [ 0.324, 1.356, -0.948 ], [ 0.42, -0.792, 0.596 ], [ -1.344, 1.356, -0.156 ], [ -0.572, 0.584, -0.764 ], [ 1.268, 0.2, 1.772 ], [ 0.772, 1.62, -0.512 ], [ -1.816, 0.384, 1.608 ], [ -1.584, 1.432, 0.184 ], ... ],
    	[ [ 0.468, -1.62, 1.996 ], [ -0.796, -0.04, -0.656 ], [ 1.848, -0.46, 1.416 ], [ 1.272, -1.472, -1.992 ], [ -0.636, 0.216, -0.84 ], [ 0.148, 0.184, 1.332 ], [ -1.068, -1.764, 1.316 ], [ 0.492, 0.1, 0.764 ], ... ],
    	[ [ -0.324, -1.728, 0.708 ], [ -1.428, -0.06, 1.972 ], [ 0.396, -1.164, 0.5 ], [ -0.28, -1.928, 1.908 ], [ -1.212, 0.94, 1.212 ], [ 1.984, -1.736, -1.892 ], [ -0.588, 1.776, -0.432 ], [ -1.924, -0.744, 0.372 ], ... ],
    	[ [ -0.404, 1.12, 0.088 ], [ 0.028, 1.284, 0.18 ], [ 1.704, -1.996, -0.264 ], [ 1.26, -1.448, 1.936 ], [ -0.748, -1.228, 0.744 ], [ -1.696, 0.248, -0.932 ], [ -1.216, 0.04, 0.08 ], [ -0.844, -1.748, -0.776 ], ... ],
    	...
    ]
    [
    	[ [ -1.832, 1.888, -0.992 ], [ -0.684, -1.58, -0.312 ], [ -1.328, 0.56, 1.856 ], [ -1.896, 1.808, 1.108 ], [ 0.204, -0.552, 0.016 ], [ 0.44, 0.58, 0.648 ], [ -1.904, 1.008, 1.764 ], [ 0.44, 0.484, 1.692 ], ... ],
    	[ [ 1.968, -0.7, 0.116 ], [ 1.356, 1.536, 0.104 ], [ -0.62, -0.104, 1.156 ], [ -0.064, 0.92, -1.656 ], [ -1.832, 1.948, -1.448 ], [ 0.888, -1.672, -1.576 ], [ -0.496, -0.212, -1.516 ], [ 1.152, -1.3, 0.672 ], ... ],
    	[ [ -0.736, 0.692, 0.068 ], [ -0.796, 0.408, -1.044 ], [ -1.2, -0.628, 0.32 ], [ -0.788, -0.084, 0.096 ], [ -1.748, -0.188, 1.392 ], [ -0.924, 1.872, -1.08 ], [ -0.688, 0.84, 1.5 ], [ -0.06, -0.636, -1.488 ], ... ],
    	[ [ -1.932, 1.612, 1.144 ], [ 0.452, 1.884, -0.016 ], [ -1.268, 0.348, -0.012 ], [ 0.604, -1.496, -1.968 ], [ 1.408, -1.96, -0.156 ], [ -0.984, 1.176, -1.664 ], [ -1.812, -1.196, 0.596 ], [ -1.468, -1.144, 1.42 ], ... ],
    	[ [ -0.256, 0.344, -0.792 ], [ -1.072, -1.56, -0.036 ], [ 0.34, 0.492, 0.172 ], [ -0.292, 0.372, 0.94 ], [ 1.236, -0.824, -0.228 ], [ 0.864, 0.52, -0.616 ], [ 0.864, -0.388, -0.296 ], [ 0.184, 1.712, 0.9 ], ... ],
    	[ [ -1.196, 1.952, -0.372 ], [ 0.62, 1.784, -0.348 ], [ 1.188, 1.208, -1.988 ], [ -1.088, -1.204, 0.496 ], [ -1.692, 0.384, -0.644 ], [ -1.904, -0.908, -0.716 ], [ 1.648, 1.344, -1.244 ], [ 1.196, -1.864, 0.324 ], ... ],
    	[ [ 0.844, 1.564, 1.6 ], [ -0.164, 0.396, 1.584 ], [ 0.244, -1.636, -0.552 ], [ 0.348, -1.52, -1.888 ], [ 0.7, -0.768, -1.684 ], [ -0.884, -1.196, -0.988 ], [ 1.3, 1.664, -1.828 ], [ 1.756, 1.872, -1.02 ], ... ],
    	[ [ 0.864, 1.168, -0.412 ], [ -1.056, -0.464, 1.456 ], [ -0.828, -1.1, -1.12 ], [ -1.8, -1.556, 0.872 ], [ 1.072, 0.912, -1.384 ], [ 1.628, -1.428, 1.604 ], [ -2.0, 0.912, -1.64 ], [ -0.988, -1.768, -0.948 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 33.14 seconds (1.991 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 13884368362905
Reset training subject: 13885426031321
Constructing line search parameters: GD
th(0)=2.8447419582209585;dx=-3.612717452597433E-6
New Minimum: 2.8447419582209585 > 2.8447341748695747
WOLFE (weak): th(2.154434690031884)=2.8447341748695747; dx=-3.6127059216894818E-6 evalInputDelta=7.78335138384989E-6
New Minimum: 2.8447341748695747 > 2.844726391543033
WOLFE (weak): th(4.308869380063768)=2.844726391543033; dx=-3.6126943908082917E-6 evalInputDelta=1.556667792534938E-5
New Minimum: 2.844726391543033 > 2.8446952584852925
WOLFE (weak): th(12.926608140191302)=2.8446952584852925; dx=-3.6126482675511883E-6 evalInputDelta=4.6699735666067E-5
New Minimum: 2.8446952584852925 > 2.844555164644176
WOLFE (weak): th(51.70643256076521)=2.844555164644176; dx=-3.612440718193595E-6 evalInputDelta=1.867935767827511E-4
New Minimum: 2.844555164644176 > 2.8438081334145746
WOLFE (weak): th(258.53216280382605)=2.8438081334145746; dx=-3.611333934734635E-6 evalInputDelta=9.338248063839316E-4
New Minimum: 2.8438081334145746 > 2.839144371901265
WOLFE (weak): th(1551.1929768229563)=2.839144371901265; dx=-3.6044221245673617E-6 evalInputDelta=0.005597586319693537
New Minimum: 2.839144371901265 > 2.8058280973564793
WOLFE (weak): th(10858.350837760694)=2.8058280973564793; dx=-3.554940813632334E-6 evalInputDelta=0.03891386086447923
New Minimum: 2.8058280973564793 > 2.550488255098469
END: th(86866.80670208555)=2.550488255098469; dx=-3.169172257298667E-6 evalInputDelta=0.29425370312248944
Fitness changed from 2.8447419582209585 to 2.550488255098469
Iteration 1 complete. Error: 2.550488255098469 Total: 16.7305; Orientation: 0.9463; Line Search: 13.3469
th(0)=2.550488255098469;dx=-2.7907078335382146E-6
New Minimum: 2.550488255098469 > 2.086829529006901
END: th(187148.86177126726)=2.086829529006901; dx=-2.1824743343621503E-6 evalInputDelta=0.4636587260915679
Fitness changed from 2.550488255098469 to 2.086829529006901
Iteration 2 complete. Error: 2.086829529006901 Total: 3.9866; Orientation: 0.9392; Line Search: 2.3995
th(0)=2.086829529006901;dx=-1.7379399190312432E-6
New Minimum: 2.086829529006901 > 1.5050314062090213
END: th(403200.00000000006)=1.5050314062090213; dx=-1.1728881103551775E-6 evalInputDelta=0.5817981227978799
Fitness changed from 2.086829529006901 to 1.5050314062090213
Iteration 3 complete. Error: 1.5050314062090213 Total: 4.3859; Orientation: 0.9880; Line Search: 2.7482
th(0)=1.5050314062090213;dx=-8.548796752086888E-7
New Minimum: 1.5050314062090213 > 0.9154596441362713
END: th(868668.0670208557)=0.9154596441362713; dx=-4.985376365622726E-7 evalInputDelta=0.58957176207275
Fitness changed from 1.5050314062090213 to 0.9154596441362713
Iteration 4 complete. Error: 0.9154596441362713 Total: 4.0608; Orientation: 0.9336; Line Search: 2.3738
th(0)=0.9154596441362713;dx=-3.802767934604366E-7
New Minimum: 0.9154596441362713 > 0.4656649560130813
END: th(1871488.617712673)=0.4656649560130813; dx=-1.3782863728034216E-8 evalInputDelta=0.44979468812319007
Fitness changed from 0.9154596441362713 to 0.4656649560130813
Iteration 5 complete. Error: 0.4656649560130813 Total: 3.9670; Orientation: 0.9421; Line Search: 2.3796
Final threshold in iteration 5: 0.4656649560130813 (> 0.0) after 33.132s (< 30.000s)

Returns

    0.4656649560130813

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.9606773271653838, -1.3044457985451834, 1.8711847238502386 ], [ -1.7037943455259787, -0.7198495469329964, 0.293067345976585 ], [ -0.5176965876829769, 1.781245949936316, -2.2059034806721476 ], [ 1.0241009985034017, 0.11555144921599218, -1.6560352117122827 ], [ -1.1723906636308363, -1.7811010625265242, -0.6023483750536089 ], [ 0.44420140020595444, -1.20331732257322, 1.3148360803392192 ], [ 1.2062783714249747, 0.6439008489515556, -0.7327660992748777 ], [ -1.281168403795748, 0.6179812561274932, 2.002646651678347 ], ... ],
    	[ [ -0.30860741447559553, -0.791016790004577, -0.6022653088118622 ], [ -1.3722491315932837, 1.0664081255831832, 0.04140480399897953 ], [ 1.2573492152217753, 0.02905088903247509, -0.8606459918910692 ], [ -0.09425314339719855, 0.5559468745334866, -0.6382318158811701 ], [ 0.8864584173145094, 0.4475476558535981, 0.7617903696787529 ], [ -1.0745480131175382, 0.7894531315709794, -0.020478240945994576 ], [ -0.9001615050666218, -1.4963928045461627, -1.8740481759597885 ], [ -1.0260892657076275, 0.7689538190338763, -0.5819952954373109 ], ... ],
    	[ [ -0.45940002969369864, 1.8764335682153013, -0.1504730486655226 ], [ -1.784116393852165, 0.1700314033262331, -1.5267719406806912 ], [ -0.5043009745336685, -0.6265806717246025, -1.5126208421670595 ], [ -1.3032537428256565, 0.4649938004899808, -1.4553579351614692 ], [ 0.8901672360780359, -0.7017427687367366, -1.795642083209437 ], [ 0.9961271384243512, 1.434525537675294, 0.8468870449935713 ], [ -0.6555154727364207, -1.469669710164126, 1.4818626175773058 ], [ -0.4494277747167984, -0.44166041879188794, 0.6286829300950341 ], ... ],
    	[ [ 1.6478695884876868, -0.6336011049178578, 0.6840922796563607 ], [ 1.124096161236557, 1.7732015382735722, -0.14902693110114268 ], [ 1.19596411748012, -1.9610571179511673, 1.5621590999979194 ], [ 0.6370062753004493, -0.7435003313849917, -0.683208443435298 ], [ -0.6762700258475954, -1.527102606116577, 1.0537235828457412 ], [ 0.7516863473647466, -0.4682165992735465, 0.6725554039822896 ], [ 0.45685417255226446, 0.3793917680707474, -1.3507902726537389 ], [ 1.2495014415604888, -0.583650646620497, -0.6737898712130106 ], ... ],
    	[ [ 1.5039147401376252, 0.6128392001789033, 1.9448483192149473 ], [ -0.8685470512737257, -0.924826006244076, 0.008735387862100737 ], [ -1.1709691505466688, 0.7038544590062077, -0.6495290302816392 ], [ -0.8469106109115758, -0.7252113699891114, -1.7539459390567933 ], [ -2.0422815717617793, -0.8189531230070571, -0.19872393477537223 ], [ 0.914183601050857, 0.27454849567449696, 0.5662314383232355 ], [ -0.6504293982749348, 0.44606057400826576, -0.776482791523658 ], [ -0.6776663878953845, 0.8519198235867663, -1.1644044369848063 ], ... ],
    	[ [ -0.5307374427730338, -0.5916978808084796, -0.8947666988391977 ], [ -0.3278160122008901, -1.4594404088488546, -1.1184111361814415 ], [ 0.8163640248309016, 0.8092506505614239, 0.9095012467286243 ], [ 0.4718474545599795, 0.6836244207391085, -2.318361833091033 ], [ -1.7482954165776967, -0.7708055334358777, 1.261671252382671 ], [ 1.0206814266554702, 1.156616432739499, -2.1711442992059307 ], [ -0.7956078386152473, 1.442086930473712, -0.8165068478339825 ], [ -0.7642032884196243, -2.0317275157231025, 0.78643521758436 ], ... ],
    	[ [ 0.6695699814401812, -1.1341448215896799, -0.7763167357288207 ], [ -1.6436222659711648, 1.0212167316110448, -0.7349536492015115 ], [ 0.8034795192978605, -1.683766721840566, -0.6524960204115356 ], [ 1.3790835988000205, -1.2717285654932282, 1.4381964437911081 ], [ 0.12862362820023082, -0.9051883741822834, 2.250443515015739 ], [ 1.383450977638557, -0.6479079053100593, 2.0148536289900614 ], [ -0.7179994065804558, -0.821119636990669, 0.7833715785949621 ], [ 0.14319523287133962, -1.3923109878426796, 0.7232931762541739 ], ... ],
    	[ [ 1.1940424302626362, 1.062157446886754, -1.1956003715981687 ], [ 1.3962793154449602, 1.6228177207256562, -1.774530163685714 ], [ 1.9144042446042966, 0.21964385618953375, -0.7361383028386335 ], [ 1.0090313313239263, -0.6995734779563395, 1.3606420492592912 ], [ 1.8082147278836296, -1.157089855022605, -0.6996163186652958 ], [ 0.3831748375456534, 1.9011030450161357, 0.2467038581766812 ], [ -0.208, -1.791790435995693, 0.6066058459281831 ], [ 1.0205647733505423, 0.6663739712107919, -0.5773521163416843 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.07 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.9229009269296259, 1.701578841342181, 3.5013322707704937 ], [ 2.902915171846298, 0.5181833702196402, 0.08588846927775938 ], [ 0.2680097568985982, 3.1728371341645287, 4.866010166041495 ], [ 1.0487828551356644, 0.01335213741591602, 2.742452622430945 ], [ 1.3744998681687528, 3.1723209949331137, 0.3628235649297231 ], [ 0.1973148839449305, 1.447972578804783, 1.7287939181618015 ], [ 1.4551075093676893, 0.41460830328053405, 0.5369461562465199 ], [ 1.6413924788845449, 0.3819008329249144, 4.0105936114784955 ], ... ],
    	[ [ 0.09523853626931202, 0.6257075620691451, 0.3627235021982478 ], [ 1.883067679158521, 1.1372262903098382, 0.001714357794193911 ], [ 1.5809270490188143, 8.439541535771814E-4, 0.7407115233581623 ], [ 0.008883655040252873, 0.30907692730355224, 0.40733985080297574 ], [ 0.7858085256277448, 0.2002989042600507, 0.580324567335291 ], [ 1.1546534324948492, 0.6232362469472261, 4.193583522422088E-4 ], [ 0.8102907352038059, 2.2391914254975305, 3.5120565658182104 ], [ 1.0528591812004182, 0.5912899758067833, 0.33871852391116275 ], ... ],
    	[ [ 0.2110483872825712, 3.5210029359252077, 0.022642138374696735 ], [ 3.183071306812053, 0.02891067811708815, 2.331032558849884 ], [ 0.2543194729156078, 0.3926033381788541, 2.2880218121581843 ], [ 1.6984703181890826, 0.21621923449411606, 2.118066719437455 ], [ 0.7923977081868098, 0.49244291347430097, 3.224330490992727 ], [ 0.9922692759054865, 2.057863518242591, 0.7172176669779432 ], [ 0.4297005349968532, 2.1599290569739065, 2.1959168173730643 ], [ 0.20198532468689329, 0.19506392552742585, 0.3952422265928775 ], ... ],
    	[ [ 2.715474180662578, 0.4014503601531303, 0.46798224708543634 ], [ 1.2635921797067635, 3.1442436953357626, 0.022209026193424727 ], [ 1.4303301703000024, 3.8457450198669387, 2.4403410537063093 ], [ 0.4057769947721518, 0.5527927427695924, 0.4667737771812827 ], [ 0.45734114785990737, 2.3320423696080415, 1.1103333890452656 ], [ 0.5650323648145544, 0.2192267838352848, 0.4523307714257808 ], [ 0.20871573497841422, 0.1439381136798478, 1.824634360695962 ], [ 1.5612538524617396, 0.3406480773005243, 0.4539927905492454 ], ... ],
    	[ [ 2.2617595456032205, 0.37557188527591784, 3.7824349847532055 ], [ 0.7543739802762839, 0.8553031418253676, 7.630700110133689E-5 ], [ 1.3711687515319873, 0.4954110994629213, 0.4218879611786066 ], [ 0.7172575828746186, 0.5259315311614838, 3.0763263571338166 ], [ 4.170914018357764, 0.670684217683012, 0.0394912022526064 ], [ 0.8357316564303126, 0.07537687647712928, 0.3206180417456001 ], [ 0.42305840214029383, 0.19897003568458355, 0.6029255255323724 ], [ 0.4592317332831777, 0.725767385820107, 1.3558376928699039 ], ... ],
    	[ [ 0.28168223316125934, 0.35010638215324574, 0.8006074453515954 ], [ 0.1074633378552941, 2.129966306980912, 1.2508434695346629 ], [ 0.666450221038109, 0.6548866154340878, 0.827192517800922 ], [ 0.22264002037473193, 0.4673423486308816, 5.374801589133215 ], [ 3.056536863626582, 0.594141170375368, 1.5918143490888574 ], [ 1.041790574719446, 1.337761572483044, 4.713867567974412 ], [ 0.6329918328660255, 2.079614715043093, 0.6666834325597863 ], [ 0.5840066660313674, 4.12791669814637, 0.6184803514569596 ], ... ],
    	[ [ 0.4483239600458046, 1.286284476338687, 0.6026676741726515 ], [ 2.7014941531961862, 1.0428836129223447, 0.5401568664746185 ], [ 0.6455793379311211, 2.835070373577726, 0.42575105665289115 ], [ 1.901871572479216, 1.617293544291464, 2.06840901093339 ], [ 0.016544037731391213, 0.8193659927547654, 5.064496014276395 ], [ 1.9139366075290793, 0.41978465376326873, 4.05963514625442 ], [ 0.5155231478498867, 0.6742374582516881, 0.6136710301503628 ], [ 0.02050487471707718, 1.9385298868674583, 0.5231530188158514 ], ... ],
    	[ [ 1.4257373252675025, 1.1281784419769878, 1.429460248565679 ], [ 1.9495959267394467, 2.6335373547012138, 3.148957301830447 ], [ 3.6649436117589476, 0.04824342356180858, 0.5418996009061436 ], [ 1.0181442275933352, 0.48940305105992904, 1.8513467862125235 ], [ 3.2696405021352684, 1.338856932596233, 0.48946299334278076 ], [ 0.14682295612813787, 3.6141927877696234, 0.06086279363926003 ], [ 0.043264, 3.210512966525636, 0.36797065231424664 ], [ 1.0415524566040437, 0.4440542695072413, 0.3333354662442217 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 50.67 seconds (2.476 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 13917819657638
Reset training subject: 13918472104751
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=2.8447419582209585}, derivative=-3.612717452597433E-6}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=2.8447419582209585}, derivative=-3.612717452597432E-6}, evalInputDelta = 0.0
New Minimum: 2.8447419582209585 > 2.8447419582209563
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=2.8447419582209563}, derivative=-3.612717452597429E-6}, evalInputDelta = -2.220446049250313E-15
New Minimum: 2.8447419582209563 > 2.844741958220941
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=2.844741958220941}, derivative=-3.612717452597406E-6}, evalInputDelta = -1.7763568394002505E-14
New Minimum: 2.844741958220941 > 2.8447419582208346
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=2.8447419582208346}, derivative=-3.612717452597251E-6}, evalInputDelta = -1.2390088954816747E-13
New Minimum: 2.8447419582208346 > 2.844741958220091
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=2.844741958220091}, derivative=-3.612717452596149E-6}, evalInputDelta = -8.677503160470224E-13
New Minimum: 2.844741958220091 > 2.844741958214887
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=2.844741958214887}, derivative=-3.612717452588438E-6}, evalInputDelta = -6.071587677070056E-12
New Minimum: 2.844741958214887 > 2.844741958178455
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=2.844741958178455}, derivative=-3.612717452534464E-6}, evalInputDelta = -4.250333418553964E-11
New Minimum: 2.844741958178455 > 2.844741957923436
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=2.844741957923436}, derivative=-3.61271745215665E-6}, evalInputDelta = -2.975224511203578E-10
New Minimum: 2.844741957923436 > 2.844741956138299
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=2.844741956138299}, derivative=-3.612717449512005E-6}, evalInputDelta = -2.082659378288554E-9
New Minimum: 2.844741956138299 > 2.8447419436423407
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=2.8447419436423407}, derivative=-3.612717430999454E-6}, evalInputDelta = -1.4578617868465926E-8
New Minimum: 2.8447419436423407 > 2.844741856170635
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=2.844741856170635}, derivative=-3.6127173014116107E-6}, evalInputDelta = -1.0205032374699385E-7
New Minimum: 2.844741856170635 > 2.84474124386878
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=2.84474124386878}, derivative=-3.612716394296764E-6}, evalInputDelta = -7.143521787433826E-7
New Minimum: 2.84474124386878 > 2.8447369577601016
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=2.8447369577601016}, derivative=-3.6127100444974952E-6}, evalInputDelta = -5.000460856940947E-6
New Minimum: 2.8447369577601016 > 2.844706955210289
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=2.844706955210289}, derivative=-3.6126655961298703E-6}, evalInputDelta = -3.5003010669498735E-5
New Minimum: 2.844706955210289 > 2.8444969476971083
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=2.8444969476971083}, derivative=-3.612354468691896E-6}, evalInputDelta = -2.4501052385028643E-4
New Minimum: 2.8444969476971083 > 2.8430274014586847
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=2.8430274014586847}, derivative=-3.6101771222093615E-6}, evalInputDelta = -0.0017145567622738334
New Minimum: 2.8430274014586847 > 2.8327653596136604
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=2.8327653596136604}, derivative=-3.5949624128652364E-6}, evalInputDelta = -0.011976598607298161
New Minimum: 2.8327653596136604 > 2.762135287623682
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=2.762135287623682}, derivative=-3.4897625141072464E-6}, evalInputDelta = -0.08260667059727655
New Minimum: 2.762135287623682 > 2.323355283548814
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=2.323355283548814}, derivative=-2.8151489731195333E-6}, evalInputDelta = -0.5213866746721445
New Minimum: 2.323355283548814 > 1.0214232227404807
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=1.0214232227404807}, derivative=-4.122271092352191E-7}, evalInputDelta = -1.8233187354804778
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=234.89423832526367}, derivative=1.4564714570830466E-4}, evalInputDelta = 232.0494963670427
F(613786.6638277846) = LineSearchPoint{point=PointSample{avg=1.4335126688563051}, derivative=-1.2817539645807934E-6}, evalInputDelta = -1.4112292893646534
F(4296506.646794492) = LineSearchPoint{point=PointSample{avg=9.150206962025472}, derivative=1.14538211380568E-5}, evalInputDelta = 6.305465003804513
F(330500.511291884) = LineSearchPoint{point=PointSample{avg=1.910056493567096}, derivative=-2.138132554310862E-6}, evalInputDelta = -0.9346854646538625
New Minimum: 1.0214232227404807 > 0.9995449122967839
F(2313503.579043188) = LineSearchPoint{point=PointSample{avg=0.9995449122967839}, derivative=4.094058675957634E-7}, evalInputDelta = -1.8451970459241747
0.9995449122967839 <= 2.8447419582209585
New Minimum: 0.9995449122967839 > 0.9316298917888919
F(2078015.538383464) = LineSearchPoint{point=PointSample{avg=0.9316298917888919}, derivative=1.8363857554332472E-7}, evalInputDelta = -1.9131120664320667
Right bracket at 2078015.538383464
New Minimum: 0.9316298917888919 > 0.9168343172590887
F(1977497.091062673) = LineSearchPoint{point=PointSample{avg=0.9168343172590887}, derivative=1.1273664049019766E-7}, evalInputDelta = -1.9279076409618698
Right bracket at 1977497.091062673
New Minimum: 0.9168343172590887 > 0.911215551651309
F(1917655.6937309518) = LineSearchPoint{point=PointSample{avg=0.911215551651309}, derivative=7.559324387200769E-8}, evalInputDelta = -1.9335264065696496
Right bracket at 1917655.6937309518
New Minimum: 0.911215551651309 > 0.9086959431041637
F(1878352.6559859186) = LineSearchPoint{point=PointSample{avg=0.9086959431041637}, derivative=5.281096919582334E-8}, evalInputDelta = -1.9360460151167949
Right bracket at 1878352.6559859186
New Minimum: 0.9086959431041637 > 0.9074717394081165
F(1851290.35750137) = LineSearchPoint{point=PointSample{avg=0.9074717394081165}, derivative=3.773812628663079E-8}, evalInputDelta = -1.937270218812842
Right bracket at 1851290.35750137
New Minimum: 0.9074717394081165 > 0.906849330588981
F(1832151.8615534296) = LineSearchPoint{point=PointSample{avg=0.906849330588981}, derivative=2.733770380261568E-8}, evalInputDelta = -1.9378926276319777
Right bracket at 1832151.8615534296
Converged to right
Fitness changed from 2.8447419582209585 to 0.906849330588981
Iteration 1 complete. Error: 0.906849330588981 Total: 50.6695; Orientation: 0.9350; Line Search: 47.5966
Final threshold in iteration 1: 0.906849330588981 (> 0.0) after 50.670s (< 30.000s)

Returns

    0.906849330588981

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.4594563822171931, -1.1036695167986368, 1.4843810933134574 ], [ -1.4029699935983626, 0.016575543850643415, 0.2833652261419807 ], [ -0.5604964270922083, 1.4652591330787978, -1.743579897006438 ], [ 1.0035430659897817, 0.10157479585841599, -0.9012589316152985 ], [ -0.9881742342324297, -1.8460198800907361, -0.5999084169228411 ], [ 0.44908114313900127, -0.3951143091128224, 1.291587679071059 ], [ 1.0407740532754755, 0.6563467196969462, -0.6740434466099045 ], [ -0.8941532038537059, 0.4824483275066578, 2.2436395702503016 ], ... ],
    	[ [ -0.2853719646946784, -0.760478650393408, -0.6589224311772175 ], [ -1.3593737041960867, 0.9011324239343933, 0.02894024965260774 ], [ 0.7504045846634262, 0.02654892224138178, -0.7120718967637598 ], [ -0.08931653012895859, 0.261202684360707, -0.687280289433955 ], [ 0.8740332847463568, 0.3863764162734056, 0.6752367274277552 ], [ -0.23854675724651964, 0.8148340330965543, -0.008715388888837158 ], [ -0.5427233519443462, -1.3106094229513225, -1.8173045426761032 ], [ -1.0222632892145636, 0.30160298930822105, -0.6412175757338889 ], ... ],
    	[ [ -0.42047468029239043, 2.058357207594635, -0.1493304842531686 ], [ -1.5774643502808303, 0.11423070674821634, -1.3846559548727013 ], [ -0.5425921512250973, -0.6690374326211492, -1.34588764646001 ], [ -1.1625378758960039, 0.4662393753471216, -1.4523966972593816 ], [ 0.453112663837279, 0.3255595353612577, -0.9444015342737482 ], [ 0.9628638383988324, 0.9703500093760499, 0.8734322314310349 ], [ 0.1634682084133383, -1.4766519236717173, 1.1756452761890808 ], [ -0.47341022405850786, -0.4443057738441202, 0.6445115970786917 ], ... ],
    	[ [ 1.4240464297836435, -0.6607795428708498, 0.5286310694401514 ], [ 1.036520981144315, 1.5076754939418076, -0.07594795952928104 ], [ 1.1956874160667803, -2.3899351261694965, 1.1947497462809613 ], [ 0.6833205456393641, -0.7098767950720528, -0.1810882146216366 ], [ -0.6641475470184216, -1.1957991134648165, 0.8945653402308128 ], [ 0.12762628738381276, -0.47776960320524003, 0.5737940809900689 ], [ 0.46777029395073566, 0.39596640188294346, -1.0911459267665435 ], [ 0.6815993019254134, -0.6270061436491423, -0.30887290035051795 ], ... ],
    	[ [ 1.141805998066299, 0.668151467184576, 0.9719515775616903 ], [ 0.16204334043949986, -0.8299850269515403, 0.0083835630279405 ], [ -1.0652147127794795, 0.509267101419299, -0.35607766372050054 ], [ -0.6981122866727667, 0.20187602737200172, -1.8946684982900712 ], [ -2.0728459478227705, -0.841834050126344, -0.1956188337853517 ], [ 0.8915227908839166, 0.2751969149102496, 0.6213253726277087 ], [ -0.6994479164981204, 0.3968951742477165, -0.49668156775789907 ], [ -0.40711670816327383, 0.8677395374431067, -1.1551303709904337 ], ... ],
    	[ [ -0.5675990041734411, -0.4383042824573803, -0.5753134175103864 ], [ -0.3330896117520502, -1.464315509752644, -0.8713199789307013 ], [ 0.8397399780523285, -0.2904686577812221, -0.11032855531031016 ], [ 0.48152466485859324, 0.6637733241552085, -1.4922886938326243 ], [ -1.8838298146397356, -0.5331525707856628, 1.2231529525629266 ], [ 0.6487931537106921, 1.141789007128837, -1.3017321447700603 ], [ -0.8217197061790016, 1.2459461531606495, -0.6774815704301513 ], [ -0.738566689909538, -1.8144037168062432, 0.7419277784130318 ], ... ],
    	[ [ 0.6640326038523315, -1.1092885054478623, -0.6808573347022431 ], [ -1.37775166487993, 0.27751150396632984, -0.46338180639404225 ], [ -0.09351380749836591, -1.7149867570273158, -0.5347035165241758 ], [ 1.16811085964938, -0.6781482282818575, 0.8481098128022189 ], [ 0.1264657344039701, -0.6418414418450145, 1.6416657591313624 ], [ 1.0733746350108315, -0.41955409021035395, 2.2683741388996195 ], [ -0.6453020267208643, 0.11200821382829185, -0.3712550166521309 ], [ 0.11326022243121701, -1.39106660805219, 0.14340437819706997 ], ... ],
    	[ [ 0.7408503155556407, 0.8631876941541792, -1.1928035097518366 ], [ 1.2901852890234866, 1.2482765597151704, -1.6708221430095556 ], [ 1.7368198886430992, 0.15519388679923357, -0.624529326305496 ], [ 0.8153598997944614, -0.5707100473021551, 1.2682478118503455 ], [ 1.360556134611205, -1.1532138376977037, -0.7182377054733636 ], [ 0.19270471275536022, 1.0394506512186295, 0.18771607450846778 ], [ -0.208, -1.5157696505233793, 0.6559869086842304 ], [ 0.7873296487518259, 0.6105420136164958, -0.5914816715006566 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.07 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.21110016716011143, 1.2180864023105364, 2.203387230186455 ], [ 1.9683248029373897, 2.7474865394460274E-4, 0.08029585138649585 ], [ 0.31415624478313114, 2.14698432707083, 3.040070857244981 ], [ 1.0070986852961714, 0.010317439153678882, 0.8122676618163492 ], [ 0.9764883172008488, 3.407789397690216, 0.35989010869486937 ], [ 0.20167387312303214, 0.156115317265703, 1.6681987327281649 ], [ 1.0832106299714621, 0.43079101645694173, 0.4543345679177591 ], [ 0.7995099519618469, 0.23275638871397134, 5.033918521192958 ], ... ],
    	[ [ 0.08143715823370078, 0.5783277777041793, 0.43417877030849494 ], [ 1.8478968676597898, 0.8120396454658751, 8.375380499552625E-4 ], [ 0.5631070406838892, 7.048452721789362E-4, 0.5070463861607386 ], [ 0.007977442554277167, 0.06822684231723912, 0.472354196244421 ], [ 0.763934182844506, 0.14928673505228, 0.4559446380673446 ], [ 0.056904555392829965, 0.6639545014923967, 7.595800348366619E-5 ], [ 0.29454863674570664, 1.7176970595287986, 3.3025958008312006 ], [ 1.0450222324757783, 0.09096436315965489, 0.41115997943004556 ], ... ],
    	[ [ 0.17679895676698795, 4.236834394056783, 0.02229959352728584 ], [ 2.488393776406922, 0.013048654364196997, 1.9172721133644324 ], [ 0.29440624257107884, 0.4476110862482988, 1.811413556893665 ], [ 1.3514943128927925, 0.21737915512407416, 2.10945616620996 ], [ 0.205311086129715, 0.105989011064638, 0.8918942579386095 ], [ 0.9271067712961328, 0.9415791406961, 0.7628838629025969 ], [ 0.02672185516186661, 2.180500903683383, 1.3821418154257 ], [ 0.2241172402431266, 0.19740762067122247, 0.4153951987689258 ], ... ],
    	[ [ 2.0279082341795416, 0.4366296042766093, 0.2794508075774381 ], [ 1.0743757443523734, 2.2730853950326733, 0.0057680925566613115 ], [ 1.4296683969404538, 5.711789907298807, 1.4274269562384214 ], [ 0.4669269680928782, 0.5039250641817693, 0.03279294147485192 ], [ 0.44109196421058655, 1.429935519763241, 0.8002471479422698 ], [ 0.016288469231375564, 0.2282637937468925, 0.3292396473792377 ], [ 0.21880904790275765, 0.15678939142012469, 1.1905994334992192 ], [ 0.46457760838521084, 0.3931367041737689, 0.095402468570941 ], ... ],
    	[ [ 1.303720937220177, 0.4464263831009015, 0.9446898691246584 ], [ 0.02625804418079165, 0.6888751449637491, 7.02841290434509E-5 ], [ 1.1346823843218692, 0.25935298058801454, 0.12679130260064986 ], [ 0.4873607648034792, 0.04075393042750119, 3.5897687184127536 ], [ 4.29669032340528, 0.708684567952124, 0.038266728131541056 ], [ 0.7948128866654477, 0.07573334197611917, 0.3860452186709611 ], [ 0.4892273878935617, 0.15752577934112522, 0.24669257975044448 ], [ 0.16574401406570027, 0.7529719048419768, 1.334326173984497 ], ... ],
    	[ [ 0.322168629538682, 0.192110644020479, 0.33098552836748024 ], [ 0.11094868945713154, 2.1442199121021455, 0.7591985056837979 ], [ 0.7051632307393251, 0.08437204115322472, 0.012172390116860169 ], [ 0.23186600286718054, 0.4405950258600555, 2.22692554574068 ], [ 3.548814770525581, 0.2842516637353612, 1.4961031453634048 ], [ 0.42093255630186577, 1.3036821368002554, 1.6945065767276613 ], [ 0.6752232755229047, 1.5523818165758207, 0.4589812782725041 ], [ 0.5454807554439317, 3.29206084756031, 0.5504568283808968 ], ... ],
    	[ [ 0.4409392989789075, 1.230520988318752, 0.46356671021784224 ], [ 1.8981996500794187, 0.07701263483365431, 0.21472269849700565 ], [ 0.008744832192841437, 2.94117957677907, 0.2859078505833195 ], [ 1.3644829804308138, 0.4598850195218223, 0.7192902545714147 ], [ 0.015993581978335506, 0.41196043646968705, 2.695066464704352 ], [ 1.152133107084636, 0.17602563461223783, 5.14552123402859 ], [ 0.4164147056900551, 0.012545839965004349, 0.137830287389374 ], [ 0.012827877985168754, 1.9350663080378254, 0.020564815686088277 ], ... ],
    	[ [ 0.5488591900588925, 0.7450929953392088, 1.4227802128762996 ], [ 1.6645780800126178, 1.5581943695343412, 2.7916466335710437 ], [ 3.0165433255862277, 0.024085142499853326, 0.39003687941559667 ], [ 0.6648117661928342, 0.32570995809162817, 1.6084525122631894 ], [ 1.8511129954281833, 1.3299021554574657, 0.5158654015636422 ], [ 0.037135106318125895, 1.0804576563188328, 0.035237324628868624 ], [ 0.043264, 2.2975576334477674, 0.4303188243650928 ], [ 0.6198879758036735, 0.3727615503908854, 0.3498505677212106 ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 269.54 seconds (4.830 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 13968806027187
Reset training subject: 13969443053656
Adding measurement 312f78f7 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 2.8447419582209585 < 2.8447419582209585. Total: 1
th(0)=2.8447419582209585;dx=-3.612717452597433E-6
Adding measurement 60b73341 to history. Total: 1
New Minimum: 2.8447419582209585 > 2.8447341748695747
WOLFE (weak): th(2.154434690031884)=2.8447341748695747; dx=-3.6127059216894818E-6 evalInputDelta=7.78335138384989E-6
Adding measurement 5950c5bd to history. Total: 2
New Minimum: 2.8447341748695747 > 2.844726391543033
WOLFE (weak): th(4.308869380063768)=2.844726391543033; dx=-3.6126943908082917E-6 evalInputDelta=1.556667792534938E-5
Adding measurement 229b11d0 to history. Total: 3
New Minimum: 2.844726391543033 > 2.8446952584852925
WOLFE (weak): th(12.926608140191302)=2.8446952584852925; dx=-3.6126482675511883E-6 evalInputDelta=4.6699735666067E-5
Adding measurement 18893432 to history. Total: 4
New Minimum: 2.8446952584852925 > 2.844555164644176
WOLFE (weak): th(51.70643256076521)=2.844555164644176; dx=-3.612440718193595E-6 evalInputDelta=1.867935767827511E-4
Adding measurement 4812172d to history. Total: 5
New Minimum: 2.844555164644176 > 2.8438081334145746
WOLFE (weak): th(258.53216280382605)=2.8438081334145746; dx=-3.611333934734635E-6 evalInputDelta=9.338248063839316E-4
Adding measurement 68d5f15 to history. Total: 6
New Minimum: 2.8438081334145746 > 2.839144371901265
WOLFE (weak): th(1551.1929768229563)=2.839144371901265; dx=-3.6044221245673617E-6 evalInputDelta=0.005597586319693537
Adding measurement 697d5806 to history. Total: 7
New Minimum: 2.839144371901265 > 2.8058280973564793
WOLFE (weak): th(10858.350837760694)=2.8058280973564793; dx=-3.554940813632334E-6 evalInputDelta=0.03891386086447923
Adding measurement 434504e0 to history. Total: 8
New Minimum: 2.8058280973564793 > 2.550488255098469
END: th(86866.80670208555)=2.550488255098469; dx=-3.169172257298667E-6 evalInputDelta=0.29425370312248944
Fitness changed from 2.8447419582209585 to 2.550488255098469
Iteration 1 complete. Error: 2.550488255098469 Total: 19.3406; Orientation: 1.0265; Line Search: 16.3937
Non-optimal measurement 2.550488255098469 < 2.550488255098469. Total: 9
Rejected: LBFGS Orientation magnitude: 1.424e+03, gradient 1.671e-03, dot -0.961; [7d4f6e9b-b7b8-4d60-a04a-b056f36a1a69 = 1.000/1.000e+00, 3bdb7e46-edb6-42b0-bf29-8669d073129b = 1.000/1.000e+00, 82381586-07f7-48f5-9a81-7ba826047140 = 1.000/1.000e+00, 7477ab1b-15f7-4ca7-91ac-a3c86e7fe464 = 1.000/1.000e+00, 23843d12-2c73-466c-ab87-9598d193d6b5 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 2.550488255098469, 2.8058280973564793, 2.839144371901265, 2.8438081334145746, 2.844555164644176, 2.8446952584852925, 2.844726391543033, 2.8447341748695747, 2.8447419582209585
Rejected: LBFGS Orientation magnitude: 1.424e+03, gradient 1.671e-03, dot -0.961; [7d4f6e9b-b7b8-4d60-a04a-b056f36a1a69 = 1.000/1.000e+00, 82381586-07f7-48f5-9a81-7ba826047140 = 1.000/1.000e+00, 7477ab1b-15f7-4ca7-91ac-a3c86e7fe464 = 1.000/1.000e+00, 3bdb7e46-edb6-42b0-bf29-8669d073129b = 1.000/1.000e+00, 23843d12-2c73-466c-ab87-9598d193d6b5 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 2.550488255098469, 2.8058280973564793, 2.839144371901265, 2.8438081334145746, 2.844555164644176, 2.8446952584852925, 2.844726391543033, 2.8447341748695747
Rejected: LBFGS Orientation magnitude: 1.424e+03, gradient 1.671e-03, dot -0.961; [7d4f6e9b-b7b8-4d60-a04a-b056f36a1a69 = 1.000/1.000e+00, 82381586-07f7-48f5-9a81-7ba826047140 = 1.000/1.000e+00, 3bdb7e46-edb6-42b0-bf29-8669d073129b = 1.000/1.000e+00, 23843d12-2c73-466c-ab87-9598d193d6b5 = 1.000/1.000e+00, 7477ab1b-15f7-4ca7-91ac-a3c86e7fe464 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 2.550488255098469, 2.8058280973564793, 2.839144371901265, 2.8438081334145746, 2.844555164644176, 2.8446952584852925, 2.844726391543033
Rejected: LBFGS Orientation magnitude: 1.424e+03, gradient 1.671e-03, dot -0.961; [23843d12-2c73-466c-ab87-9598d193d6b5 = 1.000/1.000e+00, 7477ab1b-15f7-4ca7-91ac-a3c86e7fe464 = 1.000/1.000e+00, 82381586-07f7-48f5-9a81-7ba826047140 = 1.000/1.000e+00, 7d4f6e9b-b7b8-4d60-a04a-b056f36a1a69 = 1.000/1.000e+00, 3bdb7e46-edb6-42b0-bf29-8669d073129b = 1.000/1.000e+00]
Orientation rejected. Popping history element from 2.550488255098469, 2.8058280973564793, 2.839144371901265, 2.8438081334145746, 2.844555164644176, 2.8446952584852925
Rejected: LBFGS Orientation magnitude: 1.424e+03, gradient 1.671e-03, dot -0.961; [7477ab1b-15f7-4ca7-91ac-a3c86e7fe464 = 1.000/1.000e+00, 3bdb7e46-edb6-42b0-bf29-8669d073129b = 1.000/1.000e+00, 7d4f6e9b-b7b8-4d60-a04a-b056f36a1a69 = 1.000/1.000e+00, 23843d12-2c73-466c-ab87-9598d193d6b5 = 1.000/1.000e+00, 82381586-07f7-48f5-9a81-7ba826047140 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 2.550488255098469, 2.8058280973564793, 2.839144371901265, 2.8438081334145746, 2.844555164644176
Rejected: LBFGS Orientation magnitude: 1.425e+03, gradient 1.671e-03, dot -0.961; [7d4f6e9b-b7b8-4d60-a04a-b056f36a1a69 = 1.000/1.000e+00, 23843d12-2c73-466c-ab87-9598d193d6b5 = 1.000/1.000e+00, 82381586-07f7-48f5-9a81-7ba826047140 = 1.000/1.000e+00, 7477ab1b-15f7-4ca7-91ac-a3c86e7fe464 = 1.000/1.000e+00, 3bdb7e46-edb6-42b0-bf29-8669d073129b = 1.000/1.000e+00]
Orientation rejected. Popping history element from 2.550488255098469, 2.8058280973564793, 2.839144371901265, 2.8438081334145746
LBFGS Accumulation History: 3 points
Removed measurement 434504e0 to history. Total: 8
Removed measurement 697d5806 to history. Total: 7
Removed measurement 68d5f15 to history. Total: 6
Removed measurement 4812172d to history. Total: 5
Removed measurement 18893432 to history. Total: 4
Removed measurement 229b11d0 to history. Total: 3
Adding measurement 197652d3 to history. Total: 3
th(0)=2.550488255098469;dx=-2.7907078335382146E-6
Adding measurement 5bda3855 to history. Total: 4
New Minimum: 2.550488255098469 > 2.086829529006901
END: th(187148.86177126726)=2.086829529006901; dx=-2.1824743343621503E-6 evalInputDelta=0.4636587260915679
Fitness changed from 2.550488255098469 to 2.086829529006901
Iteration 2 complete. Error: 2.086829529006901 Total: 250.1953; Orientation: 246.3091; Line Search: 3.2506
Final threshold in iteration 2: 2.086829529006901 (> 0.0) after 269.536s (< 30.000s)

Returns

    2.086829529006901

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.5616164409455706, -1.5722058208932608, 0.9439045833517459 ], [ -1.8529753475164907, -1.5374725184559288, 0.27369930558299527 ], [ -0.6350898319141968, 1.910173450680468, -1.007571681076763 ], [ 1.2472875800921024, 0.09004003155062379, -0.5176892679796026 ], [ -1.4972872746678338, -1.3458054234517023, -0.5965801002130444 ], [ 0.4549704507524693, -1.7506563884665132, 1.394825111017225 ], [ 0.8155002340981498, 1.0603288627800023, -1.2008289417074418 ], [ -0.6038414979444405, 0.37846159393776774, 1.4221651946780762 ], ... ],
    	[ [ -0.2638009328526067, -1.1853342069817656, -0.8820238075484685 ], [ -1.1924984563503314, 0.713076869571477, 0.021356441648674254 ], [ 1.6882728683935306, 0.024382521660269732, -1.3376971905419996 ], [ -0.08479664326450795, 0.14858533868674556, -0.8176433612402167 ], [ 0.8484264522765135, 0.33336532786080725, 1.2424999623131432 ], [ -1.7054130229143896, 0.8841092358641542, -0.004732151933879916 ], [ -1.4781998985304985, -1.6700893304288016, -1.2200210810733978 ], [ -1.2018383724975772, 1.470307292604376, -0.8177981657234387 ], ... ],
    	[ [ -0.38265624956209127, 1.7744279072616176, -0.14819894861567373 ], [ -1.0657835721373623, 0.0818101510072238, -1.6629956411364935 ], [ -0.6051542947343642, -0.9727011108670277, -0.9969547400699317 ], [ -0.9151781937322225, 0.4677379809270882, -1.4623467451150571 ], [ 0.2660482953441683, -1.600449965908044, -0.5278845287414253 ], [ 1.2621711690153816, 1.7562899208150269, 1.0192288170792667 ], [ -1.532484031968882, -1.4520902073600492, 1.721121632893568 ], [ -0.5062756867112259, -0.44745007624541616, 1.0483213671130238 ], ... ],
    	[ [ 1.7862722948815961, -1.023092849629261, 0.4092081878274676 ], [ 0.8793434067271998, 1.0080008090478496, -0.04553261782254217 ], [ 1.189114294696647, -1.6726223029995981, 1.789969301628151 ], [ 0.7965479976794193, -0.6640025744978945, -1.44720879293383 ], [ -0.6469887874690978, -0.8221546651702956, 0.7120847593663231 ], [ 1.5175645078180224, -0.489884999865475, 1.2180075634170975 ], [ 0.48158779406109514, 0.4164260841874087, -1.6295402484668942 ], [ 0.4059941311743627, -0.7131374375999971, -0.1728944559709426 ], ... ],
    	[ [ 0.7767910255901505, 0.8353967489503857, 0.5256290008133142 ], [ -1.667813630301622, -0.7091529949914044, 0.008057454467585481 ], [ -1.431820449151096, 1.3062897885287357, -1.341145811859854 ], [ -1.332297465751318, -1.5878797468238801, -1.5599616086669115 ], [ -1.2823415272827356, -0.9085578181093206, -0.192541122207278 ], [ 1.2141830735759649, 0.2758801033379685, 0.7498986360908027 ], [ -0.9324368282176277, 0.3519295687726565, -1.3881527113241419 ], [ -1.3410968101226601, 0.9123431098885865, -1.2689135561781282 ], ... ],
    	[ [ -0.6293080878978266, -0.331096053506148, -0.38598968944744894 ], [ -0.3389697876703402, -1.265782740150164, -0.644222568086097 ], [ 1.0623914457315133, 1.6584952870864744, 1.681594234367231 ], [ 0.4938488014418034, 0.6367178812266555, -0.8214568786912275 ], [ -1.5175516182536724, -1.361759120293876, 1.3919834152908064 ], [ 0.4277177716073011, 1.2841699572693346, -0.7246601614604228 ], [ -1.0495088082480193, 1.643845770626614, -1.3121003340974 ], [ -1.1684493196203771, -1.1221805145606008, 1.203432789957961 ], ... ],
    	[ [ 1.1027651840437953, -1.3001549028916415, -1.2571282089758926 ], [ -1.800819275855831, 1.6609100492754836, -1.3661853615425414 ], [ 1.6125611911089568, -1.3128439244059429, -1.2264549837587249 ], [ 0.8686030487621087, -0.39875330019467053, 0.515992112919513 ], [ 0.12436888662672735, -1.432336486290136, 0.9315940422397944 ], [ 0.7561819780391267, -0.2876713435749858, 1.422549489029617 ], [ -1.2106466617766725, -1.62794822020589, 1.6589132875956802 ], [ 0.09181236591386994, -1.238035690027213, 1.4914559289839788 ], ... ],
    	[ [ 1.6414022844912775, 0.663288124029113, -1.250316748513405 ], [ 1.5530715257019752, 0.8335760320279312, -1.160827367676858 ], [ 1.1270721550788823, 0.11515630430985418, -0.5186706163732537 ], [ 0.6289355439184776, -1.2557102660681259, 1.5189323529902465 ], [ 0.8643389735609395, -1.241197876467565, -1.0622248335012934 ], [ 0.11427783648384504, 0.5830208778876032, 0.14721200507326967 ], [ -0.208, -1.005871509221396, 0.7720480423080252 ], [ 1.4626900378876637, 1.171211191161076, -0.6123682665861654 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.07 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 2.4386459086315107, 2.471831143250652, 0.890955862472433 ], [ 3.4335176385038597, 2.3638217450072165, 0.07491130987661382 ], [ 0.4033390946008027, 3.648762611684526, 1.0152006925078543 ], [ 1.5557263074520127, 0.008107207281637328, 0.26800217818125677 ], [ 2.2418691828822292, 1.8111922377920158, 0.3559078159702061 ], [ 0.20699811105790508, 3.064797790478615, 1.945537090324214 ], [ 0.6650406318141371, 1.1242972972443328, 1.4419901472422147 ], [ 0.36462455463978577, 0.1432331780859158, 2.0225538409537305 ], ... ],
    	[ [ 0.06959093217390551, 1.405017182241091, 0.7779659970822979 ], [ 1.422052568397923, 0.5084786219178572, 4.560975998932283E-4 ], [ 2.8502652781537194, 5.945073625135227E-4, 1.789433773583959 ], [ 0.007190470708928221, 0.022077602872654888, 0.6685406661801996 ], [ 0.719827444922511, 0.11113244181974351, 1.5438061563481622 ], [ 2.908433578725996, 0.7816491409402987, 2.2393261925323425E-5 ], [ 2.1850749400155762, 2.7891983716121227, 1.4884514382635023 ], [ 1.4444154736076251, 2.1618035346856104, 0.6687938398606208 ], ... ],
    	[ [ 0.14642580532892546, 3.148594398068844, 0.021962928370791102 ], [ 1.135894622637876, 0.006692900807824762, 2.765554502438977 ], [ 0.36621172043544575, 0.9461474510819498, 0.9939187537479051 ], [ 0.8375511262829735, 0.2187788188017491, 2.1384580029486018 ], [ 0.07078169545553781, 2.5614400933750594, 0.2786620756845567 ], [ 1.593076059893655, 3.0845542859564534, 1.0388273815648013 ], [ 2.348507308239601, 2.1085659703109507, 2.9622596752142214 ], [ 0.2563150709549234, 0.20021157073202875, 1.0989776887457192 ], ... ],
    	[ [ 3.1907687114615637, 1.0467189789625218, 0.16745134098504 ], [ 0.7732448269545975, 1.0160656310411194, 0.002073219285773685 ], [ 1.4139928058519045, 2.7976653684916797, 3.2039901007711706 ], [ 0.6344887126070922, 0.44089941893983187, 2.0944132903449932 ], [ 0.41859449111073344, 0.6759382934612808, 0.5070647045217943 ], [ 2.303002035388957, 0.23998731309319646, 1.4835424245412547 ], [ 0.23192680338863178, 0.1734106835916588, 2.655401421373547 ], [ 0.16483123454802565, 0.5085650049066899, 0.029892492905488212 ], ... ],
    	[ [ 0.6034042974373978, 0.6978877281568738, 0.2762858464960031 ], [ 2.7816023054198755, 0.5028979703052788, 6.492257249721322E-5 ], [ 2.050109798607246, 1.706393011614449, 1.7986720886692271 ], [ 1.7750165372473845, 2.5213620903734695, 2.4334802205146584 ], [ 1.644399792593819, 0.8254773088475692, 0.037072083740837966 ], [ 1.474240536158377, 0.07610983141776817, 0.5623479644108461 ], [ 0.8694384386165498, 0.12385442137650797, 1.9269679499565664 ], [ 1.7985406541211744, 0.8323699501611773, 1.6101416130526236 ], ... ],
    	[ [ 0.3960286694936186, 0.10962459664734603, 0.14898804035973806 ], [ 0.11490051695327552, 1.6022059452620576, 0.4150227172314459 ], [ 1.128675583963495, 2.7506066172880472, 2.827759169057114 ], [ 0.24388663868550572, 0.4054096602737614, 0.6747914035491341 ], [ 2.3029629140643397, 1.8543879017035507, 1.9376178284446575 ], [ 0.1829424921487154, 1.6490924791531245, 0.525132349607846 ], [ 1.1014687385901778, 2.7022289176070067, 1.7216072867385088 ], [ 1.3652738125213222, 1.2592891072594947, 1.448250479946002 ], ... ],
    	[ [ 1.2160910511391458, 1.6904027715131738, 1.5803713338029355 ], [ 3.2429500642939195, 2.7586221917842892, 1.8664624420931246 ], [ 2.6003535950707373, 1.7235591698495971, 1.5041918271866141 ], [ 0.7544712563188302, 0.15900419441614103, 0.26624786059514344 ], [ 0.015467619960771761, 2.0515878099579727, 0.8678674595366798 ], [ 0.5718111839111663, 0.08275480191423752, 2.023647048738425 ], [ 1.4656653396710007, 2.6502154076715247, 2.7519932957615083 ], [ 0.008429510534702348, 1.5327323697811572, 2.224440788101463 ], ... ],
    	[ [ 2.6942014595331845, 0.43995113547806, 1.5632919716131333 ], [ 2.412031163946261, 0.6948490011714306, 1.3475201775475831 ], [ 1.270291642754156, 0.013260974422303742, 0.26901920828901094 ], [ 0.39555991840403126, 1.5768082723088834, 2.307155492960487 ], [ 0.7470818612163784, 1.5405721685475928, 1.1283215969068505 ], [ 0.013059423911428425, 0.33991334405283147, 0.021671374437692376 ], [ 0.043264, 1.0117774930633288, 0.5960581796316542 ], [ 2.139462146935815, 1.3717356543009467, 0.3749948939217449 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.14 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[1.0, -0.33192644401445087], [5.0, 0.40662332796221856]; valueStats=DoubleSummaryStatistics{count=8, sum=13.067641, min=0.465665, average=1.633455, max=2.550488}
Plotting 5 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[0.0, -0.33192644401445087], [250.196, 0.40662332796221856]; valueStats=DoubleSummaryStatistics{count=8, sum=13.067641, min=0.465665, average=1.633455, max=2.550488}
Plotting 5 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 2.086829529006901 }, "CjGD": { "type": "NonConverged", "value": 0.906849330588981 }, "GD": { "type": "NonConverged", "value": 0.4656649560130813 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 2.086829529006901 }, "CjGD": { "type": "NonConverged", "value": 0.906849330588981 }, "GD": { "type": "NonConverged", "value": 0.4656649560130813 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "358.529",
      "gc_time": "9.628"
    },
    "created_on": 1586748512446,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Double",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SquareActivationLayerTest.Double",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/SquareActivationLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 2.086829529006901
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.906849330588981
        },
        "GD": {
          "type": "NonConverged",
          "value": 0.4656649560130813
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/SquareActivationLayer/Double/trainingTest/202004132832",
    "id": "ac3d46dc-3892-4bde-b2e3-b6a235a1d12f",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "SquareActivationLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SquareActivationLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/SquareActivationLayer.java",
      "javaDoc": ""
    }
  }