1. Test Modules
  2. Training Characteristics
    1. Input Learning
      1. Gradient Descent
      2. Conjugate Gradient Descent
      3. Limited-Memory BFGS
    2. Results
  3. Results

Subreport: Logs for com.simiacryptus.ref.lang.ReferenceCountingBase

Test Modules

Using Seed 4905999002729319424

Training Characteristics

Input Learning

In this apply, we use a network to learn this target input, given it's pre-evaluated output:

TrainingTester.java:332 executed in 0.03 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(input_target)).flatMap(RefArrays::stream).map(x -> {
      try {
        return x.prettyPrint();
      } finally {
        x.freeRef();
      }
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -1.98, 1.264, -0.912 ], [ -0.624, 1.416, -0.536 ], [ 0.044, -0.824, -1.004 ], [ 0.992, 0.856, -0.02 ], [ -1.18, -0.984, -1.74 ], [ -0.02, -0.184, -0.148 ], [ -1.76, 0.9, 0.62 ], [ -0.444, -0.336, 0.06 ], ... ],
    	[ [ 1.824, -1.244, 0.308 ], [ 1.116, -1.908, 0.58 ], [ -1.044, -0.276, -0.984 ], [ -1.628, 0.668, 0.976 ], [ -1.32, -0.896, 0.052 ], [ 1.476, -0.248, 1.12 ], [ -0.648, -0.864, -1.836 ], [ -1.408, -1.176, 1.616 ], ... ],
    	[ [ -1.504, 0.104, 1.184 ], [ -0.436, 1.724, -0.52 ], [ -1.62, -0.676, -0.044 ], [ -0.052, 0.768, -0.568 ], [ -1.408, -1.304, 1.052 ], [ -1.028, -1.912, -0.296 ], [ 0.652, -0.428, 0.38 ], [ -0.488, 0.768, -0.72 ], ... ],
    	[ [ 1.92, -0.272, 0.596 ], [ 1.196, -0.616, -0.8 ], [ -0.592, -0.808, 1.892 ], [ -0.048, 1.54, -0.056 ], [ 0.884, 0.528, -1.344 ], [ 1.92, 0.664, 1.6 ], [ -0.44, -1.944, 0.24 ], [ -0.808, -1.428, 0.316 ], ... ],
    	[ [ -0.364, 0.12, -0.904 ], [ -0.408, 1.092, 1.784 ], [ 0.092, -0.436, 0.888 ], [ 0.404, -1.944, -1.432 ], [ 1.868, 1.336, -1.872 ], [ 1.916, 1.992, 1.98 ], [ 0.928, -1.6, -0.312 ], [ 0.04, -1.416, -0.992 ], ... ],
    	[ [ -0.32, 0.612, -0.716 ], [ -1.824, -1.716, 1.94 ], [ -1.416, -1.404, -1.928 ], [ -1.216, 0.764, -1.792 ], [ -1.652, -1.692, 1.672 ], [ 1.148, 0.672, 1.108 ], [ 1.912, -0.964, 1.088 ], [ 0.972, 1.688, 1.78 ], ... ],
    	[ [ -0.832, -1.716, -0.652 ], [ -1.5, 1.92, -0.28 ], [ -1.388, -1.596, 1.084 ], [ -0.528, 1.7, 0.528 ], [ -0.316, 0.244, 0.984 ], [ -0.352, -0.576, -1.412 ], [ -0.532, 0.268, -1.556 ], [ 0.432, -1.708, 1.232 ], ... ],
    	[ [ -1.376, 1.116, 0.328 ], [ -0.988, -0.008, -1.664 ], [ -1.436, 1.716, -0.376 ], [ -1.256, -1.24, 0.588 ], [ -0.3, 1.82, 0.836 ], [ 0.056, 0.288, 1.576 ], [ -1.34, 0.788, -0.932 ], [ 1.276, 0.492, -0.576 ], ... ],
    	...
    ]
    [
    	[ [ 1.304, -1.916, -0.128 ], [ 0.424, 1.792, -0.408 ], [ -0.712, 1.848, 0.884 ], [ 0.804, 0.996, -1.12 ], [ -0.432, 0.996, -1.308 ], [ 1.264, 1.576, 1.94 ], [ 0.08, 1.9, -1.196 ], [ -1.088, -1.376, 1.2 ], ... ],
    	[ [ 0.648, 0.036, -1.888 ], [ 0.352, -0.532, 0.972 ], [ 0.416, -0.256, 1.588 ], [ 0.46, -0.792, 1.636 ], [ 1.928, 1.82, -0.384 ], [ -0.216, -1.288, 0.648 ], [ -0.408, 1.824, 1.36 ], [ -0.86, -1.108, 1.208 ], ... ],
    	[ [ 1.472, 0.908, 0.12 ], [ 0.824, 1.996, 1.616 ], [ 0.84, 1.848, 0.064 ], [ -1.832, 1.968, -1.096 ], [ -1.496, 0.124, 0.196 ], [ -1.852, -1.06, 0.46 ], [ -1.128, -0.648, 0.756 ], [ 0.524, -1.488, -1.388 ], ... ],
    	[ [ 1.168, 1.92, -1.364 ], [ -0.944, 0.828, 1.808 ], [ 1.08, 1.784, 1.444 ], [ 0.476, -0.364, 0.56 ], [ 1.824, 1.74, 0.56 ], [ -0.648, 1.744, 1.332 ], [ -1.38, -1.1, -1.112 ], [ -0.392, -1.48, 0.208 ], ... ],
    	[ [ -1.076, -1.276, 1.728 ], [ -0.176, -1.636, -1.712 ], [ -0.352, -1.624, -0.652 ], [ -0.212, -1.772, -0.124 ], [ 1.192, 0.54, 0.064 ], [ 0.888, -1.296, 0.084 ], [ 0.744, -0.972, -0.92 ], [ 0.12, -0.788, -1.096 ], ... ],
    	[ [ 1.196, 0.156, 1.204 ], [ 1.26, 0.896, -0.948 ], [ -1.664, 0.168, 1.184 ], [ -0.872, 1.58, 0.7 ], [ -1.34, 0.356, 1.976 ], [ -1.08, 0.584, -1.208 ], [ 0.632, -0.788, 0.652 ], [ -0.5, -1.528, 0.844 ], ... ],
    	[ [ 1.924, 0.244, -0.2 ], [ -1.748, -1.892, 1.648 ], [ -0.604, 0.288, 1.332 ], [ -1.368, -1.068, -1.352 ], [ -0.708, -0.196, -1.836 ], [ 1.468, 2.0, 0.532 ], [ -1.004, -1.644, -0.152 ], [ 1.324, 0.384, -1.304 ], ... ],
    	[ [ 1.74, -0.856, 0.844 ], [ 0.84, 0.336, -0.696 ], [ -1.604, 0.692, 0.44 ], [ -1.064, 1.532, -0.572 ], [ -0.784, -0.06, 0.44 ], [ -1.124, 0.828, 1.416 ], [ -1.06, -0.728, 0.156 ], [ 0.176, -1.464, 0.796 ], ... ],
    	...
    ]
    [
    	[ [ 1.768, 1.232, -0.692 ], [ 1.092, 1.916, -0.892 ], [ 1.54, -1.06, -0.352 ], [ -1.868, 1.2, 1.352 ], [ -1.128, 1.004, -1.204 ], [ -0.584, 1.912, -0.532 ], [ -1.52, -1.62, -1.34 ], [ 0.984, -1.384, 0.664 ], ... ],
    	[ [ 1.66, -0.788, 0.664 ], [ 0.68, 0.004, -1.856 ], [ 1.488, -1.844, -0.832 ], [ -0.844, -0.804, -0.152 ], [ 0.264, 0.216, -0.408 ], [ -0.116, -1.264, 0.688 ], [ -0.92, 0.896, 1.656 ], [ 1.092, -0.612, 0.096 ], ... ],
    	[ [ -1.992, 0.476, 1.496 ], [ -1.092, -0.56, 0.888 ], [ 1.388, 1.872, 1.904 ], [ 1.928, 1.044, -0.212 ], [ -1.012, 1.508, -1.284 ], [ -0.224, 0.348, -0.908 ], [ -1.468, -0.72, 0.26 ], [ 1.156, 1.348, 0.052 ], ... ],
    	[ [ -0.164, -0.312, 1.7 ], [ 0.792, 0.504, 1.284 ], [ -1.908, -1.996, -0.74 ], [ 1.516, -1.012, 0.672 ], [ -0.1, -0.528, -1.376 ], [ -1.08, 0.904, -1.94 ], [ -1.984, 1.308, -1.228 ], [ 0.116, -0.348, -0.232 ], ... ],
    	[ [ 1.308, -1.812, -0.704 ], [ 1.992, 0.556, -0.948 ], [ -0.732, -0.572, 0.896 ], [ -1.976, 0.276, 0.28 ], [ 0.804, 1.28, -1.564 ], [ 0.04, 0.5, -0.564 ], [ 0.448, -0.5, -1.044 ], [ -1.644, -0.536, -1.704 ], ... ],
    	[ [ 1.192, -1.232, -1.104 ], [ -1.808, 0.88, 1.068 ], [ -1.124, -1.56, -0.676 ], [ 0.416, 0.644, 1.724 ], [ -1.652, 1.28, 1.768 ], [ 1.324, 1.088, -0.232 ], [ 0.528, 0.372, 1.128 ], [ -0.484, -0.404, 1.728 ], ... ],
    	[ [ -1.156, 0.348, -0.928 ], [ 1.104, -1.916, 1.816 ], [ 1.848, -1.72, -1.316 ], [ 1.82, 0.2, -0.856 ], [ -1.128, 1.812, -0.08 ], [ 1.52, -0.676, -1.484 ], [ -0.668, -0.776, -1.64 ], [ 1.74, 0.148, 0.036 ], ... ],
    	[ [ -0.072, -0.128, -0.848 ], [ -0.136, 1.188, 0.488 ], [ 0.58, -0.112, -0.04 ], [ 0.088, 0.908, -0.872 ], [ -1.08, -0.192, -0.416 ], [ -0.784, 1.88, 0.744 ], [ 0.996, 1.712, 1.428 ], [ 0.4, 1.612, -0.864 ], ... ],
    	...
    ]
    [
    	[ [ 0.472, -1.8, 0.864 ], [ 0.344, -1.916, 1.988 ], [ -0.128, 1.012, -1.86 ], [ -0.056, -0.736, 0.944 ], [ 1.396, 1.012, 1.928 ], [ 1.192, -0.104, -1.972 ], [ 1.004, 0.336, 0.256 ], [ 0.2, 1.676, 1.264 ], ... ],
    	[ [ 1.748, -1.86, -0.128 ], [ -0.284, -1.94, 0.92 ], [ 0.184, 0.472, -0.008 ], [ -1.88, -1.408, 0.332 ], [ 1.596, -1.34, -1.012 ], [ -0.86, -0.208, 0.364 ], [ 0.576, -1.296, 1.868 ], [ -1.2, -0.556, 0.944 ], ... ],
    	[ [ -1.208, 1.624, 1.432 ], [ -0.148, -1.912, -1.164 ], [ 0.368, 1.64, -1.22 ], [ 0.072, -0.996, 1.052 ], [ -0.5, -1.288, -0.504 ], [ 1.072, -1.644, -1.552 ], [ -1.408, 1.856, -0.092 ], [ -0.504, -0.02, -0.74 ], ... ],
    	[ [ 0.228, 1.592, -0.74 ], [ 0.436, -1.988, -0.352 ], [ -1.96, -1.556, -1.248 ], [ 1.148, -1.596, 0.908 ], [ -0.816, -0.284, -0.548 ], [ 0.808, -0.02, 1.164 ], [ -1.552, 0.2, -0.192 ], [ 1.7, 0.972, -0.136 ], ... ],
    	[ [ -1.224, -1.908, 1.528 ], [ -1.92, -1.368, -1.892 ], [ -1.912, -0.252, 1.004 ], [ -0.48, 1.196, -0.696 ], [ 1.392, -1.428, -0.924 ], [ 1.412, -0.756, -1.34 ], [ 1.52, 0.36, -0.288 ], [ -1.384, 0.168, -1.292 ], ... ],
    	[ [ 0.788, 0.812, 1.408 ], [ -1.54, 1.072, -1.972 ], [ -1.876, -0.268, 0.912 ], [ -1.42, 1.592, 0.328 ], [ 0.856, -1.48, -0.528 ], [ 0.62, 0.752, 0.168 ], [ -1.996, 1.204, -0.792 ], [ -0.564, -0.96, -0.804 ], ... ],
    	[ [ -0.164, -1.092, 1.712 ], [ 1.664, 1.144, 0.168 ], [ -1.268, -0.12, 0.852 ], [ 0.364, 0.836, 1.444 ], [ -0.784, 0.404, -1.1 ], [ 0.352, -0.916, 1.256 ], [ -1.872, -1.328, -1.236 ], [ 0.304, -0.996, -1.72 ], ... ],
    	[ [ -1.236, -0.356, 0.152 ], [ -0.644, -0.916, -1.992 ], [ -1.468, 1.988, -0.368 ], [ -1.712, -0.92, 0.476 ], [ -0.916, 1.784, -1.776 ], [ -0.232, 1.58, -1.816 ], [ -0.696, 1.788, -1.38 ], [ 1.64, 0.448, -0.244 ], ... ],
    	...
    ]
    [
    	[ [ -0.74, -0.028, -1.584 ], [ 0.436, -0.392, -1.024 ], [ 1.012, -0.536, 1.732 ], [ -0.692, 1.856, -0.236 ], [ 1.504, -0.868, -1.156 ], [ 0.624, -1.412, 1.728 ], [ -1.608, 1.412, 1.6 ], [ -1.156, -1.504, -1.032 ], ... ],
    	[ [ -1.96, 1.564, 0.836 ], [ 1.852, 1.108, -1.128 ], [ -1.288, -1.436, 0.992 ], [ -1.0, -1.668, 0.528 ], [ -0.212, 0.208, 1.712 ], [ 0.636, 1.764, 1.444 ], [ -1.436, -1.84, 1.452 ], [ 0.304, -0.296, -0.892 ], ... ],
    	[ [ -1.752, -1.52, -1.924 ], [ -1.356, -1.296, -1.744 ], [ -0.448, -1.248, 0.156 ], [ -0.944, -1.368, -0.12 ], [ -0.912, -0.124, 0.416 ], [ -1.684, 0.924, 1.864 ], [ -0.228, 1.288, -1.936 ], [ -0.784, 0.124, 1.412 ], ... ],
    	[ [ 1.168, -1.244, 1.492 ], [ 1.308, 1.62, 1.74 ], [ -0.3, 0.5, 1.484 ], [ -0.604, 0.568, 1.888 ], [ 0.568, -0.512, 0.544 ], [ 0.208, 1.564, -1.552 ], [ -1.884, -0.752, -0.436 ], [ -0.7, 1.628, -0.376 ], ... ],
    	[ [ -1.588, 0.88, -1.736 ], [ -0.656, -0.944, 1.432 ], [ -0.38, 1.48, 0.856 ], [ -1.88, 1.888, 0.888 ], [ 1.088, -1.3, 1.164 ], [ -1.516, -0.02, -0.616 ], [ 1.62, -1.84, 0.628 ], [ 0.604, -1.42, -1.988 ], ... ],
    	[ [ 1.744, 1.092, -1.304 ], [ -1.06, -0.2, 0.452 ], [ -0.532, 1.656, -0.392 ], [ -0.968, 1.284, -1.672 ], [ 0.356, -1.996, -0.988 ], [ -0.492, 0.272, 0.188 ], [ -0.024, -0.316, 0.24 ], [ 1.064, -0.136, 1.6 ], ... ],
    	[ [ -1.952, 1.428, 0.684 ], [ -0.956, -1.2, -0.948 ], [ 0.58, 1.044, -0.704 ], [ -1.84, 0.432, -0.108 ], [ 0.468, 1.452, -1.404 ], [ -1.152, -1.696, -0.308 ], [ 0.848, -0.864, -0.484 ], [ -0.72, 0.708, -1.692 ], ... ],
    	[ [ -1.204, -0.82, -0.9 ], [ -1.424, 0.888, 0.964 ], [ -0.332, 0.872, 1.632 ], [ -0.656, 0.28, 1.136 ], [ 1.332, -1.724, -0.244 ], [ 1.684, -0.336, 0.272 ], [ 0.564, 1.316, -1.252 ], [ 0.888, 0.232, -0.148 ], ... ],
    	...
    ]

Gradient Descent

First, we train using basic gradient descent method apply weak line search conditions.

TrainingTester.java:480 executed in 31.57 seconds (2.820 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 7980598524492
Reset training subject: 7981439101334
Constructing line search parameters: GD
th(0)=0.12462857083227141;dx=-1.5338153326849526E-9
New Minimum: 0.12462857083227141 > 0.12462856752776645
WOLFE (weak): th(2.154434690031884)=0.12462856752776645; dx=-1.533815321639375E-9 evalInputDelta=3.3045049613322774E-9
New Minimum: 0.12462856752776645 > 0.12462856422326155
WOLFE (weak): th(4.308869380063768)=0.12462856422326155; dx=-1.5338153105937936E-9 evalInputDelta=6.609009867153404E-9
New Minimum: 0.12462856422326155 > 0.12462855100524206
WOLFE (weak): th(12.926608140191302)=0.12462855100524206; dx=-1.5338152664114417E-9 evalInputDelta=1.982702935166003E-8
New Minimum: 0.12462855100524206 > 0.12462849152415918
WOLFE (weak): th(51.70643256076521)=0.12462849152415918; dx=-1.5338150675902981E-9 evalInputDelta=7.930811223022527E-8
New Minimum: 0.12462849152415918 > 0.12462817429184744
WOLFE (weak): th(258.53216280382605)=0.12462817429184744; dx=-1.533814007195415E-9 evalInputDelta=3.9654042396919387E-7
New Minimum: 0.12462817429184744 > 0.12462619159486817
WOLFE (weak): th(1551.1929768229563)=0.12462619159486817; dx=-1.5338073791378437E-9 evalInputDelta=2.379237403246637E-6
New Minimum: 0.12462619159486817 > 0.12461191642963917
WOLFE (weak): th(10858.350837760694)=0.12461191642963917; dx=-1.5337596271168604E-9 evalInputDelta=1.665440263223794E-5
New Minimum: 0.12461191642963917 > 0.12449535260210769
WOLFE (weak): th(86866.80670208555)=0.12449535260210769; dx=-1.5333676798118431E-9 evalInputDelta=1.332182301637258E-4
New Minimum: 0.12449535260210769 > 0.12343104734665233
WOLFE (weak): th(781801.26031877)=0.12343104734665233; dx=-1.529621149063688E-9 evalInputDelta=0.0011975234856190786
New Minimum: 0.12343104734665233 > 0.11284216570850833
WOLFE (weak): th(7818012.6031877)=0.11284216570850833; dx=-1.4752563644641226E-9 evalInputDelta=0.011786405123763083
New Minimum: 0.11284216570850833 > 0.047459686247836516
END: th(8.599813863506469E7)=0.047459686247836516; dx=-2.3712508076797127E-10 evalInputDelta=0.0771688845844349
Fitness changed from 0.12462857083227141 to 0.047459686247836516
Iteration 1 complete. Error: 0.047459686247836516 Total: 19.7782; Orientation: 0.7815; Line Search: 17.0078
th(0)=0.047459686247836516;dx=-4.753473270617498E-10
New Minimum: 0.047459686247836516 > 0.020050210125649056
END: th(1.0E8)=0.020050210125649056; dx=-6.286043644420532E-11 evalInputDelta=0.02740947612218746
Fitness changed from 0.047459686247836516 to 0.020050210125649056
Iteration 2 complete. Error: 0.020050210125649056 Total: 3.5251; Orientation: 0.7101; Line Search: 2.2472
th(0)=0.020050210125649056;dx=-1.7630388683540952E-10
New Minimum: 0.020050210125649056 > 0.009682921598361397
END: th(1.0E8)=0.009682921598361397; dx=-2.8194196764846402E-11 evalInputDelta=0.010367288527287659
Fitness changed from 0.020050210125649056 to 0.009682921598361397
Iteration 3 complete. Error: 0.009682921598361397 Total: 3.6284; Orientation: 0.7092; Line Search: 2.3508
Low gradient: 8.672009384169624E-6
th(0)=0.009682921598361397;dx=-7.520374675912601E-11
New Minimum: 0.009682921598361397 > 0.005246880155172717
END: th(1.0E8)=0.005246880155172717; dx=-1.1430949858207367E-11 evalInputDelta=0.00443604144318868
Fitness changed from 0.009682921598361397 to 0.005246880155172717
Iteration 4 complete. Error: 0.005246880155172717 Total: 4.6300; Orientation: 0.7712; Line Search: 3.2329
Final threshold in iteration 4: 0.005246880155172717 (> 0.0) after 31.563s (< 30.000s)

Returns

    0.005246880155172717

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.49187803906476535, 2.152244787314055, -0.0643667482492896 ], [ -1.7538852145673394, 0.22962004169499192, -1.7557348271276523 ], [ -0.14057773882663874, -0.996579147446591, -1.2188431137267703 ], [ -0.8217807545032895, -0.9352929038070179, -1.7829263416896926 ], [ 0.22433169408486256, 0.4140931825972858, -0.5384248766821487 ], [ 1.44321964143489, 1.487455138205099, -0.6986747796399887 ], [ -2.5719558842684256, -0.7898310538713331, -0.9822130618602412 ], [ 0.4820759184251867, 1.0853631622538038, -0.675439080678991 ], ... ],
    	[ [ 0.5894332592286249, -1.7443331239361253, -1.2011001352925 ], [ 1.306063429161898, -0.43872000520141274, 0.7566565760395147 ], [ -1.5126876785837833, -0.745665190604478, -1.4496471308117387 ], [ -0.27272537527217033, 0.9979115120012314, 1.3548138632709392 ], [ 0.5722732879325729, -1.7668385202231764, 1.5545652322906036 ], [ 0.09317854884898971, -1.5522178096494383, -0.44496073919955115 ], [ 0.8033241563207546, 0.3199795386723692, -1.427303694993124 ], [ -0.11415936612003, -0.8789538482964518, 2.313113214416482 ], ... ],
    	[ [ -1.6087492532073404, -0.8557356068422195, 0.35648486004955965 ], [ -1.8666997533034912, 0.45601468847740395, -1.625314935173913 ], [ -1.3565313965891215, -0.7039998955360641, -0.015468707874814337 ], [ 0.23236425085750373, 0.9882708210726224, -0.5606350719301262 ], [ -1.4117972904424199, 0.3598660786850238, 3.0279312117573958 ], [ 0.1999273094532974, -0.5116317630305816, 0.959704453577284 ], [ 1.0179989670235758, -0.015637702096784167, 0.6976387350732081 ], [ -1.2158762002888248, 0.0961077081683583, -1.320231507879534 ], ... ],
    	[ [ -0.12128348850176475, -2.0921434489207225, -1.5185730625775133 ], [ 1.6261218282373306, -0.34078402339816233, -0.2213378048391682 ], [ -0.9684130548944825, -1.9559137390379358, 1.2443267939324183 ], [ -1.42014315454799, 0.49231193509027504, -0.956168780542285 ], [ 2.001349620457177, -0.7540931271011341, 0.9287435066439576 ], [ 2.0354368924280672, 0.8644466356277399, 1.7161164719441926 ], [ -0.633506056712087, -1.7562717203519764, 0.06577777706406389 ], [ -0.13274887521536122, -0.3622840443045429, 1.239032919519905 ], ... ],
    	[ [ -0.6318861564632162, -0.10561707507267727, -1.090496768464107 ], [ 0.1719123367386465, 0.7165391075791714, 1.595548555682182 ], [ -1.2292653873457844, -1.679654953231187, -0.4030796594230291 ], [ 2.1459560820585297, -0.4453347250388377, 0.36737864298030853 ], [ 1.586932044651887, 1.0081351213090466, -0.499067165960934 ], [ -0.2827614243619364, 0.06792179453161401, 0.2588396298303223 ], [ 1.272531768057428, -0.4914830545283665, -0.4530487135290615 ], [ 1.2538058602366169, 0.10693637297009041, -0.2927422332067071 ], ... ],
    	[ [ 0.5116482094828388, 1.4322542466799202, 0.16809754383724088 ], [ -1.9705058596267135, -1.7952869382463355, 1.105792797873049 ], [ -1.0019286756755441, -1.0062379338707488, -1.503833390453707 ], [ 0.2380107941945776, 2.244685398706297, -0.1586961929008744 ], [ -0.16692105414597713, -0.02386208129711779, 3.0387831354430954 ], [ 0.8787850115345525, 0.5201446206853562, 0.9130703677800915 ], [ 0.45840834554113247, -1.3518038646803896, -0.49060448086074254 ], [ -1.4380943208412431, -0.792263229345286, -0.709642449813471 ], ... ],
    	[ [ -0.3329782902309565, -1.1436389018338058, -0.17138280793523777 ], [ -1.343449684435047, 1.5906790984076664, -1.4472294139726194 ], [ -0.4104321952550915, -0.41254980848787415, 2.190982003742966 ], [ -1.0301348757615068, 0.922949277792221, -0.7128144020307151 ], [ 0.5611521680478575, 1.0249967558149589, 1.7978510761371838 ], [ -0.8389935484461062, -1.0963005452986798, -1.848705906255214 ], [ -0.2558270032947987, 0.8474612711576929, -0.44363426786289406 ], [ -0.5989973216731774, -1.7654408177211, 0.2724381393942778 ], ... ],
    	[ [ -0.5460143838863029, 1.1827597088459378, 0.31925467504036587 ], [ -0.7184400876908839, 0.2891790977741033, -1.2787390100832192 ], [ -0.8564635467493931, 2.488817581805985, -0.012354035056591647 ], [ 0.2648108209924013, 0.48130811000462703, 2.305881069002971 ], [ -0.9412973855373244, 0.6523011484936344, -0.5230037629563098 ], [ -1.7869032080230147, -1.6251584342920662, -0.307938357684919 ], [ -1.0204895661391522, 0.8251633508535605, -1.2126737847144087 ], [ 0.5951370073579898, -0.2540106077584847, -0.981126399599505 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.08 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.06022451687916873, 0.8474250111457989, 0.09235047197503216 ], [ 0.10791762126271721, 0.7843641787794043, 0.1077181999578785 ], [ 0.5665585539597349, 0.24070666330251142, 0.19273478273775368 ], [ 0.4395318746859781, 0.392367181971623, 0.16810094334239895 ], [ 0.37378423052410986, 0.451890636409455, 0.17432513306643518 ], [ 0.4623933970448393, 0.4833067447054448, 0.054299858249715886 ], [ 0.08442410893342403, 0.5016877138125952, 0.41388817725398064 ], [ 0.3182296687092308, 0.58176151433693, 0.10000881695383917 ], ... ],
    	[ [ 0.7912637386970519, 0.07669718094728804, 0.13203908035566003 ], [ 0.5707844190205494, 0.09970622916729696, 0.32950935181215363 ], [ 0.2370558546969582, 0.5104630603159371, 0.25248108498710464 ], [ 0.1035791582694251, 0.3690650891534501, 0.527355752577125 ], [ 0.26546441270351356, 0.025594282450424916, 0.7089413048460615 ], [ 0.5628191544078538, 0.108588119027146, 0.32859272656500016 ], [ 0.5799864556673178, 0.3576876549332188, 0.06232588939946329 ], [ 0.07816546632853782, 0.03638051979008689, 0.8854540138813752 ], ... ],
    	[ [ 0.09746614635176158, 0.2069585945999254, 0.6955752590483131 ], [ 0.08015168596036201, 0.817814350674603, 0.10203396336503515 ], [ 0.1482909406212496, 0.284777353775867, 0.5669317056028833 ], [ 0.27917155457863624, 0.5945073349271035, 0.12632111049426017 ], [ 0.010913151934406538, 0.06417617465301068, 0.9249106734125827 ], [ 0.27558245487973637, 0.1352774430291642, 0.5891401020910993 ], [ 0.48040003770636924, 0.17088357891456246, 0.3487163833790683 ], [ 0.17811218976052454, 0.6614256226113433, 0.16046218762813225 ], ... ],
    	[ [ 0.7211868367647412, 0.10048799613194012, 0.17832516710331864 ], [ 0.7706973833057594, 0.1078121052667479, 0.1214905114274928 ], [ 0.09511822033563881, 0.035432159152658446, 0.8694496205117027 ], [ 0.10683679848107436, 0.7232518573292787, 0.1699113441896471 ], [ 0.7113910513111781, 0.04523085708965144, 0.24337809159917043 ], [ 0.49098974139427365, 0.15223615667318957, 0.3567741019325369 ], [ 0.29960843844405366, 0.09748619256988415, 0.6029053689860621 ], [ 0.17429886704209288, 0.13855069442283555, 0.6871504385350716 ], ... ],
    	[ [ 0.30077296608726045, 0.5090899887176806, 0.1901370451950589 ], [ 0.14543008933465437, 0.2507163589957963, 0.6038535516695491 ], [ 0.25497404243991206, 0.1625153046405578, 0.5825106529195302 ], [ 0.8039868213115653, 0.06023731859969712, 0.13577586008873765 ], [ 0.593557991966539, 0.3327322118203573, 0.0737097962131038 ], [ 0.24161628364261262, 0.3431041625149927, 0.4152795538423948 ], [ 0.7410562800057742, 0.1269840834261008, 0.13195963656812504 ], [ 0.6533336692301418, 0.2075180845679934, 0.13914824620186483 ], ... ],
    	[ [ 0.23696350491495627, 0.5949707107919611, 0.16806578429308283 ], [ 0.0418944274816856, 0.04991750504770873, 0.9081880674706057 ], [ 0.3844562042699855, 0.3828030477184651, 0.2327407480115494 ], [ 0.1097565790415044, 0.8164287128382564, 0.0738147081202392 ], [ 0.03727634270813532, 0.043009363789414036, 0.9197142935024507 ], [ 0.3658306273989593, 0.25557861900085793, 0.3785907536001828 ], [ 0.6448524251930795, 0.10551037425430854, 0.24963720055261196 ], [ 0.2008257013791761, 0.3830894320765404, 0.4160848665442835 ], ... ],
    	[ [ 0.38168682403336013, 0.16968432997112817, 0.4486288459955116 ], [ 0.04829392884594364, 0.908172772722476, 0.04353329843158042 ], [ 0.06459666007562985, 0.0644600140662223, 0.8709433258581479 ], [ 0.10611384185997794, 0.7481448815820158, 0.14574127655800623 ], [ 0.16571653661741997, 0.26351842924807295, 0.5707650341345071 ], [ 0.46784619688380846, 0.3617063178532284, 0.17044748526296327 ], [ 0.20649060976231728, 0.6223752536498047, 0.171134136587878 ], [ 0.2701379494603039, 0.08414060397910801, 0.645721446560588 ], ... ],
    	[ [ 0.11099532553392356, 0.6253191634110126, 0.2636855110550638 ], [ 0.23201262036614537, 0.6354992775336342, 0.1324881021002203 ], [ 0.03155125985384222, 0.8950634850010704, 0.07338525514508741 ], [ 0.10059791190586234, 0.12491441091000022, 0.7744876771841375 ], [ 0.13439426398857685, 0.6614115626992646, 0.20419417331215864 ], [ 0.15234703913699826, 0.1790931111040116, 0.6685598497589902 ], [ 0.12258826424172813, 0.7762574355006908, 0.10115430025758099 ], [ 0.6117983130339154, 0.26171472095496034, 0.1264869660111243 ], ... ],
    	...
    ]

Conjugate Gradient Descent

First, we use a conjugate gradient descent method, which converges the fastest for purely linear functions.

TrainingTester.java:452 executed in 54.68 seconds (1.508 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new QuadraticSearch());
      iterativeTrainer.setOrientation(new GradientDescent());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 8012498521580
Reset training subject: 8013049157906
Constructing line search parameters: GD
F(0.0) = LineSearchPoint{point=PointSample{avg=0.12462857083227141}, derivative=-1.5338153326849526E-9}
F(1.0E-10) = LineSearchPoint{point=PointSample{avg=0.12462857083227141}, derivative=-1.5338153326849526E-9}, evalInputDelta = 0.0
F(7.000000000000001E-10) = LineSearchPoint{point=PointSample{avg=0.12462857083227141}, derivative=-1.5338153326849526E-9}, evalInputDelta = 0.0
F(4.900000000000001E-9) = LineSearchPoint{point=PointSample{avg=0.12462857083227141}, derivative=-1.5338153326849526E-9}, evalInputDelta = 0.0
New Minimum: 0.12462857083227141 > 0.12462857083227137
F(3.430000000000001E-8) = LineSearchPoint{point=PointSample{avg=0.12462857083227137}, derivative=-1.5338153326849524E-9}, evalInputDelta = -4.163336342344337E-17
New Minimum: 0.12462857083227137 > 0.12462857083227104
F(2.4010000000000004E-7) = LineSearchPoint{point=PointSample{avg=0.12462857083227104}, derivative=-1.5338153326849516E-9}, evalInputDelta = -3.7470027081099033E-16
New Minimum: 0.12462857083227104 > 0.12462857083226882
F(1.6807000000000003E-6) = LineSearchPoint{point=PointSample{avg=0.12462857083226882}, derivative=-1.533815332684944E-9}, evalInputDelta = -2.5951463200613034E-15
New Minimum: 0.12462857083226882 > 0.12462857083225336
F(1.1764900000000001E-5) = LineSearchPoint{point=PointSample{avg=0.12462857083225336}, derivative=-1.5338153326848923E-9}, evalInputDelta = -1.8055001937966608E-14
New Minimum: 0.12462857083225336 > 0.1246285708321451
F(8.235430000000001E-5) = LineSearchPoint{point=PointSample{avg=0.1246285708321451}, derivative=-1.5338153326845306E-9}, evalInputDelta = -1.2631562462672719E-13
New Minimum: 0.1246285708321451 > 0.1246285708313872
F(5.764801000000001E-4) = LineSearchPoint{point=PointSample{avg=0.1246285708313872}, derivative=-1.5338153326819971E-9}, evalInputDelta = -8.842093723870903E-13
New Minimum: 0.1246285708313872 > 0.12462857082608189
F(0.004035360700000001) = LineSearchPoint{point=PointSample{avg=0.12462857082608189}, derivative=-1.533815332664264E-9}, evalInputDelta = -6.189521117860863E-12
New Minimum: 0.12462857082608189 > 0.12462857078894492
F(0.028247524900000005) = LineSearchPoint{point=PointSample{avg=0.12462857078894492}, derivative=-1.5338153325401303E-9}, evalInputDelta = -4.332649516936016E-11
New Minimum: 0.12462857078894492 > 0.12462857052898599
F(0.19773267430000002) = LineSearchPoint{point=PointSample{avg=0.12462857052898599}, derivative=-1.5338153316711971E-9}, evalInputDelta = -3.032854245521577E-10
New Minimum: 0.12462857052898599 > 0.12462856870927355
F(1.3841287201) = LineSearchPoint{point=PointSample{avg=0.12462856870927355}, derivative=-1.533815325588659E-9}, evalInputDelta = -2.1229978608428013E-9
New Minimum: 0.12462856870927355 > 0.12462855597128668
F(9.688901040700001) = LineSearchPoint{point=PointSample{avg=0.12462855597128668}, derivative=-1.5338152830108725E-9}, evalInputDelta = -1.4860984734466065E-8
New Minimum: 0.12462855597128668 > 0.12462846680538839
F(67.8223072849) = LineSearchPoint{point=PointSample{avg=0.12462846680538839}, derivative=-1.5338149849651927E-9}, evalInputDelta = -1.0402688302435514E-7
New Minimum: 0.12462846680538839 > 0.12462784264458553
F(474.7561509943) = LineSearchPoint{point=PointSample{avg=0.12462784264458553}, derivative=-1.533812898587871E-9}, evalInputDelta = -7.281876858861169E-7
New Minimum: 0.12462784264458553 > 0.12462347354274081
F(3323.2930569601003) = LineSearchPoint{point=PointSample{avg=0.12462347354274081}, derivative=-1.5337982911260727E-9}, evalInputDelta = -5.097289530603399E-6
New Minimum: 0.12462347354274081 > 0.12459289099588919
F(23263.0513987207) = LineSearchPoint{point=PointSample{avg=0.12459289099588919}, derivative=-1.5336959006850759E-9}, evalInputDelta = -3.567983638222416E-5
New Minimum: 0.12459289099588919 > 0.12437887067147853
F(162841.3597910449) = LineSearchPoint{point=PointSample{avg=0.12437887067147853}, derivative=-1.532972394819232E-9}, evalInputDelta = -2.497001607928845E-4
New Minimum: 0.12437887067147853 > 0.12288367184063556
F(1139889.5185373144) = LineSearchPoint{point=PointSample{avg=0.12288367184063556}, derivative=-1.5275758648831433E-9}, evalInputDelta = -0.0017448989916358515
New Minimum: 0.12288367184063556 > 0.1126044616263274
F(7979226.6297612) = LineSearchPoint{point=PointSample{avg=0.1126044616263274}, derivative=-1.4736667000843992E-9}, evalInputDelta = -0.012024109205944017
New Minimum: 0.1126044616263274 > 0.060324536552334475
F(5.58545864083284E7) = LineSearchPoint{point=PointSample{avg=0.060324536552334475}, derivative=-6.533652137694317E-10}, evalInputDelta = -0.06430403427993694
F(3.909821048582988E8) = LineSearchPoint{point=PointSample{avg=0.07257720067960391}, derivative=8.600871418248831E-11}, evalInputDelta = -0.0520513701526675
0.07257720067960391 <= 0.12462857083227141
F(3.702219067539073E8) = LineSearchPoint{point=PointSample{avg=0.07073746689582054}, derivative=9.128608523416537E-11}, evalInputDelta = -0.05389110393645087
Right bracket at 3.702219067539073E8
F(3.4942559942019826E8) = LineSearchPoint{point=PointSample{avg=0.06878110423341663}, derivative=9.69140386582667E-11}, evalInputDelta = -0.055847466598854784
Right bracket at 3.4942559942019826E8
F(3.2865928059042E8) = LineSearchPoint{point=PointSample{avg=0.0667075231796676}, derivative=1.028374474496384E-10}, evalInputDelta = -0.05792104765260381
Right bracket at 3.2865928059042E8
F(3.0800830201586014E8) = LineSearchPoint{point=PointSample{avg=0.06452103489815997}, derivative=1.0894429124505085E-10}, evalInputDelta = -0.060107535934111445
Right bracket at 3.0800830201586014E8
F(2.875818527216941E8) = LineSearchPoint{point=PointSample{avg=0.06223336350262685}, derivative=1.1503617675262655E-10}, evalInputDelta = -0.06239520732964456
Right bracket at 2.875818527216941E8
New Minimum: 0.060324536552334475 > 0.05986686383835306
F(2.6751799818343738E8) = LineSearchPoint{point=PointSample{avg=0.05986686383835306}, derivative=1.2079050261911054E-10}, evalInputDelta = -0.06476170699391835
Right bracket at 2.6751799818343738E8
New Minimum: 0.05986686383835306 > 0.057458176201941644
F(2.4798849286515263E8) = LineSearchPoint{point=PointSample{avg=0.057458176201941644}, derivative=1.2571950812277176E-10}, evalInputDelta = -0.06717039463032977
Right bracket at 2.4798849286515263E8
New Minimum: 0.057458176201941644 > 0.055061443651359496
F(2.2920190847025067E8) = LineSearchPoint{point=PointSample{avg=0.055061443651359496}, derivative=1.291422437353526E-10}, evalInputDelta = -0.06956712718091192
Right bracket at 2.2920190847025067E8
New Minimum: 0.055061443651359496 > 0.0527492586371456
F(2.1140250748253006E8) = LineSearchPoint{point=PointSample{avg=0.0527492586371456}, derivative=1.302024086787179E-10}, evalInputDelta = -0.0718793121951258
Right bracket at 2.1140250748253006E8
New Minimum: 0.0527492586371456 > 0.050608533503959885
F(1.9486114798212644E8) = LineSearchPoint{point=PointSample{avg=0.050608533503959885}, derivative=1.27980201211177E-10}, evalInputDelta = -0.07402003732831153
Right bracket at 1.9486114798212644E8
New Minimum: 0.050608533503959885 > 0.04872860931345892
F(1.79854266318096E8) = LineSearchPoint{point=PointSample{avg=0.04872860931345892}, derivative=1.217357888566019E-10}, evalInputDelta = -0.0758999615188125
Right bracket at 1.79854266318096E8
New Minimum: 0.04872860931345892 > 0.04718179609515965
F(1.6662924372315997E8) = LineSearchPoint{point=PointSample{avg=0.04718179609515965}, derivative=1.1125252128243864E-10}, evalInputDelta = -0.07744677473711176
Loops = 12
Fitness changed from 0.12462857083227141 to 0.04718179609515965
Iteration 1 complete. Error: 0.04718179609515965 Total: 54.6750; Orientation: 0.7041; Line Search: 52.3150
Final threshold in iteration 1: 0.04718179609515965 (> 0.0) after 54.676s (< 30.000s)

Returns

    0.04718179609515965

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.01 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ 0.6498681009605671, 1.0967053541067147, -0.1505734550672817 ], [ -1.3056582948012871, 1.1354046973833367, -3.1097464025820494 ], [ -0.3172780213616342, -1.7477390387146123, -0.29098293992375335 ], [ -0.3474548198474553, -0.7367622571226726, -2.455782923029872 ], [ 0.9049416441413253, -0.1095416510703009, -0.6953999930710248 ], [ 1.822282988508578, 1.4388058414677047, -1.0290888299762826 ], [ -3.663789052915835, 0.1269275188020056, -0.8071384658861709 ], [ 1.8737467826300258, 0.3321851174896824, -1.313931900119709 ], ... ],
    	[ [ 2.0070454349579787, -2.9353749918505545, -1.4276704431074247 ], [ 0.6905066433130022, 0.5754069869501602, 0.3580863697368377 ], [ -1.5486259405945626, -0.30352468856515125, -1.8558493708402861 ], [ 0.027031628215753556, 0.6892901097506268, 1.3636782620336199 ], [ 0.8001722968974692, -1.9078097789094812, 1.467637482012012 ], [ 0.04852441010272934, -1.1693270851702842, -0.7831973249324444 ], [ 0.30309418018085754, 0.964746345992191, -1.5718405261730484 ], [ -1.3865625920349287, -0.6950233698784174, 3.4015859619133453 ], ... ],
    	[ [ -1.4777900519043206, -2.1125477952650833, 1.4823378471694038 ], [ -1.9418615351644268, 0.6647855825267338, -1.758924047362307 ], [ -2.2559798515732954, 0.35714795806069577, -0.1771681064874 ], [ -0.7188206674789893, 2.0913897659627243, -0.7125690984837348 ], [ -1.437624123826981, -1.2446037375063137, 4.658227861333295 ], [ -0.28151311248514055, -0.612138538778495, 1.5416516512636353 ], [ 1.504509399013279, -0.5884628091632909, 0.7839534101500114 ], [ -1.8044056342367192, -0.1153901198440419, -0.5202042459192397 ], ... ],
    	[ [ 0.5308041635424328, -2.82618998502133, -1.4366141785211028 ], [ 2.775462834867568, -1.7491954616379384, 0.03773262677037101 ], [ -1.126501688000916, -1.9886653329204642, 1.4351670209213803 ], [ -1.5894277246783326, 0.7588445991118923, -1.0534168744335595 ], [ 3.0885077986849394, -1.0826653483598705, 0.17015754967493124 ], [ 2.432066754293313, 0.9058864207308781, 1.278046824975809 ], [ -0.5386195425099016, -2.859122926706147, 1.0737424692160493 ], [ 0.23785637817474048, 1.3470366345709435, -0.8408930127456842 ], ... ],
    	[ [ 0.0847442309174129, -0.0025645620404557423, -1.9101796688769577 ], [ -0.6696968686975933, 0.1528433283947946, 3.000853540302799 ], [ -1.3301491950480726, -1.9916403905287527, 0.009789585576825077 ], [ 2.8717646794699414, -0.5324413267222355, -0.2713233527477059 ], [ 1.5403127270839607, 0.9236161834456489, -0.3679289105296097 ], [ -0.9585967469683989, 0.9906646118057703, 0.011932135162628588 ], [ 1.4188817405876883, -0.35352160130986854, -0.7373601392778198 ], [ 1.3199146665956096, 0.3659583165002636, -0.6178729830958731 ], ... ],
    	[ [ -0.010266605771382942, 0.7353835572560583, 1.3868830485153243 ], [ -2.0059127361731393, -1.9062352671304938, 1.2521480033036334 ], [ -1.4197597024229962, -1.1786231242867098, -0.9136171732902936 ], [ 0.1647863270211175, 3.277849184348815, -1.1186355113699324 ], [ 1.239255339871888, -0.977647919633968, 2.58639257976208 ], [ 2.0394416386766863, 0.11208724242035784, 0.16047111890295596 ], [ 0.650340337101646, -1.139548669229574, -0.894791667872072 ], [ -2.271160672766644, -1.1789432841428589, 0.5101039569095027 ], ... ],
    	[ [ -0.25598350132642456, -0.7478192845891334, -0.6441972140844422 ], [ -3.2533024673140334, 3.480116568942548, -1.4268141016285139 ], [ 0.7082133119706469, -1.2625309349965324, 1.9223176230258867 ], [ -0.46024177033126956, -0.5826613036448542, 0.22290307397612208 ], [ 0.6965886640647453, 0.44190568131530794, 2.245505654619947 ], [ -0.7001637541136283, -1.099126446458185, -1.9847097994281864 ], [ -0.7487350540854694, 2.572592913875506, -1.675857859790036 ], [ -0.5367583200229207, -1.8973385221498325, 0.34209684217275393 ], ... ],
    	[ [ 0.55148243915484, 0.0673104833349123, 0.33720707751024936 ], [ -0.7202012737233248, 0.18681971452386303, -1.174618440800538 ], [ -0.835050022595217, 4.36268838372106, -1.9076383611258418 ], [ 0.12570935390825194, -0.9850241230950567, 3.911314769186804 ], [ -2.210420748180766, 2.3041384061462704, -0.9057176579655047 ], [ -1.804133056038498, -2.8196071289974203, 0.9037401850359186 ], [ -1.866753473811798, 0.17137349277206848, 0.28737998103972867 ], [ 0.7446496319741434, -0.17635466554494353, -1.2082949664291993 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.08 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.3319512597200797, 0.5189592705028141, 0.14908946977710627 ], [ 0.07905221149337283, 0.9079338570864348, 0.013013931420192365 ], [ 0.4413369479529709, 0.10556718983436361, 0.45309586221266557 ], [ 0.5558746354387694, 0.37661948559950637, 0.06750587896172414 ], [ 0.6392156414157769, 0.23177302893538002, 0.12901132964884313 ], [ 0.574959587086088, 0.3918278589293398, 0.0332125539845722 ], [ 0.01595118594217098, 0.7064481093402161, 0.2776007047176128 ], [ 0.7966133569086331, 0.17051235762973022, 0.032874285461636565 ], ... ],
    	[ [ 0.9621195543567893, 0.006866937945776928, 0.03101350769743373 ], [ 0.3833674993306956, 0.34168675662144204, 0.27494574404786243 ], [ 0.19198380412920057, 0.6668147471977927, 0.14120144867300677 ], [ 0.14824862188238722, 0.28747883020782566, 0.5642725479097871 ], [ 0.33156925533321857, 0.02210612295707136, 0.6463246217097101 ], [ 0.5776459153121932, 0.17090529666506532, 0.25144878802274145 ], [ 0.3234825819535099, 0.6269065182453136, 0.0496108998011766 ], [ 0.008125085370977343, 0.016224062139751454, 0.9756508524892712 ], ... ],
    	[ [ 0.04800651969172907, 0.02544652229758949, 0.9265469580106815 ], [ 0.06347485260467516, 0.8603083944624872, 0.0762167529328378 ], [ 0.04417621452353779, 0.6026365754495148, 0.3531872100269474 ], [ 0.05370660131702739, 0.8922499951681337, 0.05404340351483898 ], [ 0.002241021487248998, 0.002718151814340616, 0.9950408266984103 ], [ 0.126423910857599, 0.09083232329693246, 0.7827437658454686 ], [ 0.6211945003583724, 0.07660574161940348, 0.302199758022224 ], [ 0.09974136740904158, 0.5400145178759584, 0.360244114715 ], ... ],
    	[ [ 0.8513122127674853, 0.02965956855496634, 0.11902821867754856 ], [ 0.9297521211159162, 0.010077041501019684, 0.060170837383064224 ], [ 0.06954263018976808, 0.02936420171331434, 0.9010931680969175 ], [ 0.07589185383203534, 0.7943956801119355, 0.12971246605602924 ], [ 0.9350540614474553, 0.014431758078152074, 0.050514180474392656 ], [ 0.6524302855742866, 0.14181508035219823, 0.20575463407351513 ], [ 0.16358938612753846, 0.016068424914233097, 0.8203421889582283 ], [ 0.22873389444758582, 0.6934918940946506, 0.07777421145776359 ], ... ],
    	[ [ 0.4872301648454079, 0.4464948366645411, 0.066274998490051 ], [ 0.02350187801758835, 0.05349673000817616, 0.9230013919742355 ], [ 0.18744525794036287, 0.09673697036884935, 0.7158177716907878 ], [ 0.9290377361474721, 0.030874893698447647, 0.04008737015408034 ], [ 0.5923940934667252, 0.3197295774943236, 0.08787632903895103 ], [ 0.09378366415198976, 0.6586893606387665, 0.24752697520924385 ], [ 0.7777964294709035, 0.1321663675550248, 0.0900372029740716 ], [ 0.653921223869198, 0.2518995863676614, 0.09417918976314067 ], ... ],
    	[ [ 0.13983140864847193, 0.294738238713257, 0.5654303526382709 ], [ 0.03558227068869133, 0.03931180944375268, 0.925105919867556 ], [ 0.2543510173784125, 0.3237112288233561, 0.4219377537982313 ], [ 0.042075306137011825, 0.9462661513101307, 0.011658542552857483 ], [ 0.2018023206365242, 0.021985548431464007, 0.7762121309320119 ], [ 0.7702498769765772, 0.11209655313756765, 0.1176535698858553 ], [ 0.7244999122140049, 0.12097605509803581, 0.15452403268795933 ], [ 0.049701060677937156, 0.1481527243683562, 0.8021462149537066 ], ... ],
    	[ [ 0.43672505732376277, 0.267058585154857, 0.29621635752138026 ], [ 0.001180321999254156, 0.9914874777047485, 0.0073322002959972515 ], [ 0.22189530706192076, 0.030921799909953876, 0.7471828930281254 ], [ 0.2587407717837177, 0.2289279089758312, 0.5123313192404512 ], [ 0.15428449801928348, 0.11959551072574075, 0.7261199912549758 ], [ 0.5134018591029269, 0.3445007257902681, 0.14209741510680488 ], [ 0.03437276864020277, 0.9520262515376867, 0.013600979822110624 ], [ 0.27287712431896843, 0.0699962309946073, 0.6571266446864243 ], ... ],
    	[ [ 0.4126547452064921, 0.25428083847283606, 0.33306441632067174 ], [ 0.24320541391637757, 0.6024034474095578, 0.15439113867406465 ], [ 0.005488327724317754, 0.9926339954782336, 0.0018776767974487203 ], [ 0.022030480753465264, 0.00725501900746274, 0.9707145002390719 ], [ 0.010414075531731787, 0.9511934525018875, 0.038392471966380716 ], [ 0.06112627671094816, 0.02214181188385923, 0.9167319114051926 ], [ 0.05781471409319112, 0.44379834394246215, 0.4983869419643468 ], [ 0.6493612929340407, 0.25852307527575924, 0.09211563179020014 ], ... ],
    	...
    ]

Limited-Memory BFGS

Next, we apply the same optimization using L-BFGS, which is nearly ideal for purely second-order or quadratic functions.

TrainingTester.java:509 executed in 412.33 seconds (7.283 gc):

    IterativeTrainer iterativeTrainer = new IterativeTrainer(trainable.addRef());
    try {
      iterativeTrainer.setLineSearchFactory(label -> new ArmijoWolfeSearch());
      iterativeTrainer.setOrientation(new LBFGS());
      iterativeTrainer.setMonitor(TrainingTester.getMonitor(history));
      iterativeTrainer.setTimeout(30, TimeUnit.SECONDS);
      iterativeTrainer.setIterationsPerSample(100);
      iterativeTrainer.setMaxIterations(250);
      iterativeTrainer.setTerminateThreshold(0);
      return iterativeTrainer.run();
    } finally {
      iterativeTrainer.freeRef();
    }
Logging
Reset training subject: 8067492883437
Reset training subject: 8068021234447
Adding measurement 5726aab6 to history. Total: 0
LBFGS Accumulation History: 1 points
Constructing line search parameters: GD
Non-optimal measurement 0.12462857083227141 < 0.12462857083227141. Total: 1
th(0)=0.12462857083227141;dx=-1.5338153326849526E-9
Adding measurement 517cb82a to history. Total: 1
New Minimum: 0.12462857083227141 > 0.12462856752776645
WOLFE (weak): th(2.154434690031884)=0.12462856752776645; dx=-1.533815321639375E-9 evalInputDelta=3.3045049613322774E-9
Adding measurement 468dd370 to history. Total: 2
New Minimum: 0.12462856752776645 > 0.12462856422326155
WOLFE (weak): th(4.308869380063768)=0.12462856422326155; dx=-1.5338153105937936E-9 evalInputDelta=6.609009867153404E-9
Adding measurement 3178ae3d to history. Total: 3
New Minimum: 0.12462856422326155 > 0.12462855100524206
WOLFE (weak): th(12.926608140191302)=0.12462855100524206; dx=-1.5338152664114417E-9 evalInputDelta=1.982702935166003E-8
Adding measurement 74754f9e to history. Total: 4
New Minimum: 0.12462855100524206 > 0.12462849152415918
WOLFE (weak): th(51.70643256076521)=0.12462849152415918; dx=-1.5338150675902981E-9 evalInputDelta=7.930811223022527E-8
Adding measurement 331b519a to history. Total: 5
New Minimum: 0.12462849152415918 > 0.12462817429184744
WOLFE (weak): th(258.53216280382605)=0.12462817429184744; dx=-1.533814007195415E-9 evalInputDelta=3.9654042396919387E-7
Adding measurement 7381d893 to history. Total: 6
New Minimum: 0.12462817429184744 > 0.12462619159486817
WOLFE (weak): th(1551.1929768229563)=0.12462619159486817; dx=-1.5338073791378437E-9 evalInputDelta=2.379237403246637E-6
Adding measurement 712e36f0 to history. Total: 7
New Minimum: 0.12462619159486817 > 0.12461191642963917
WOLFE (weak): th(10858.350837760694)=0.12461191642963917; dx=-1.5337596271168604E-9 evalInputDelta=1.665440263223794E-5
Adding measurement 34c6dede to history. Total: 8
New Minimum: 0.12461191642963917 > 0.12449535260210769
WOLFE (weak): th(86866.80670208555)=0.12449535260210769; dx=-1.5333676798118431E-9 evalInputDelta=1.332182301637258E-4
Adding measurement 7d7299e1 to history. Total: 9
New Minimum: 0.12449535260210769 > 0.12343104734665233
WOLFE (weak): th(781801.26031877)=0.12343104734665233; dx=-1.529621149063688E-9 evalInputDelta=0.0011975234856190786
Adding measurement 7283bb81 to history. Total: 10
New Minimum: 0.12343104734665233 > 0.11284216570850833
WOLFE (weak): th(7818012.6031877)=0.11284216570850833; dx=-1.4752563644641226E-9 evalInputDelta=0.011786405123763083
Adding measurement 33e9073 to history. Total: 11
New Minimum: 0.11284216570850833 > 0.047459686247836516
END: th(8.599813863506469E7)=0.047459686247836516; dx=-2.3712508076797127E-10 evalInputDelta=0.0771688845844349
Fitness changed from 0.12462857083227141 to 0.047459686247836516
Iteration 1 complete. Error: 0.047459686247836516 Total: 23.7163; Orientation: 0.8582; Line Search: 21.2622
Non-optimal measurement 0.047459686247836516 < 0.047459686247836516. Total: 12
Rejected: LBFGS Orientation magnitude: 4.626e+03, gradient 2.180e-05, dot -0.448; [1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00, 7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00, 64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917, 0.12462619159486817, 0.12462817429184744, 0.12462849152415918, 0.12462855100524206, 0.12462856422326155, 0.12462856752776645, 0.12462857083227141
Rejected: LBFGS Orientation magnitude: 4.626e+03, gradient 2.180e-05, dot -0.448; [7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00, 1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917, 0.12462619159486817, 0.12462817429184744, 0.12462849152415918, 0.12462855100524206, 0.12462856422326155, 0.12462856752776645
Rejected: LBFGS Orientation magnitude: 4.626e+03, gradient 2.180e-05, dot -0.448; [7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00, 64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, 1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917, 0.12462619159486817, 0.12462817429184744, 0.12462849152415918, 0.12462855100524206, 0.12462856422326155
Rejected: LBFGS Orientation magnitude: 4.626e+03, gradient 2.180e-05, dot -0.448; [1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00, 7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917, 0.12462619159486817, 0.12462817429184744, 0.12462849152415918, 0.12462855100524206
Rejected: LBFGS Orientation magnitude: 4.625e+03, gradient 2.180e-05, dot -0.448; [64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00, 1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917, 0.12462619159486817, 0.12462817429184744, 0.12462849152415918
Rejected: LBFGS Orientation magnitude: 4.625e+03, gradient 2.180e-05, dot -0.448; [64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00, 1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00, 7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917, 0.12462619159486817, 0.12462817429184744
Rejected: LBFGS Orientation magnitude: 4.623e+03, gradient 2.180e-05, dot -0.448; [110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00, 7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00, 1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917, 0.12462619159486817
Rejected: LBFGS Orientation magnitude: 4.605e+03, gradient 2.180e-05, dot -0.449; [7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00, 64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00, 1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769, 0.12461191642963917
Rejected: LBFGS Orientation magnitude: 4.448e+03, gradient 2.180e-05, dot -0.459; [64f46736-7d1a-4400-907c-32dbde4f5a48 = 1.000/1.000e+00, 7c220d37-8863-47e9-acfc-8ac0c49b84f0 = 1.000/1.000e+00, eb2fc42f-cc29-41ef-877b-5f6dacc07ec7 = 1.000/1.000e+00, 110136c5-b36f-48ec-9533-ec0216545a32 = 1.000/1.000e+00, 1ce48abd-f1f1-42ae-bc96-f3b8cab04aa9 = 1.000/1.000e+00]
Orientation rejected. Popping history element from 0.047459686247836516, 0.11284216570850833, 0.12343104734665233, 0.12449535260210769
LBFGS Accumulation History: 3 points
Removed measurement 33e9073 to history. Total: 11
Removed measurement 7283bb81 to history. Total: 10
Removed measurement 7d7299e1 to history. Total: 9
Removed measurement 34c6dede to history. Total: 8
Removed measurement 712e36f0 to history. Total: 7
Removed measurement 7381d893 to history. Total: 6
Removed measurement 331b519a to history. Total: 5
Removed measurement 74754f9e to history. Total: 4
Removed measurement 3178ae3d to history. Total: 3
Adding measurement 25e2fca3 to history. Total: 3
th(0)=0.047459686247836516;dx=-4.753473270617498E-10
Adding measurement 17b79dce to history. Total: 4
New Minimum: 0.047459686247836516 > 0.020050210125649056
END: th(1.0E8)=0.020050210125649056; dx=-6.286043644420532E-11 evalInputDelta=0.02740947612218746
Fitness changed from 0.047459686247836516 to 0.020050210125649056
Iteration 2 complete. Error: 0.020050210125649056 Total: 388.6093; Orientation: 384.9867; Line Search: 3.0838
Final threshold in iteration 2: 0.020050210125649056 (> 0.0) after 412.326s (< 30.000s)

Returns

    0.020050210125649056

This training apply resulted in the following configuration:

TrainingTester.java:610 executed in 0.00 seconds (0.000 gc):

    RefList<double[]> state = network.state();
    assert state != null;
    String description = state.stream().map(RefArrays::toString).reduce((a, b) -> a + "\n" + b)
        .orElse("");
    state.freeRef();
    return description;

Returns

    

And regressed input:

TrainingTester.java:622 executed in 0.00 seconds (0.000 gc):

    return RefArrays.stream(RefUtil.addRef(data)).flatMap(x -> {
      return RefArrays.stream(x);
    }).limit(1).map(x -> {
      String temp_18_0015 = x.prettyPrint();
      x.freeRef();
      return temp_18_0015;
    }).reduce((a, b) -> a + "\n" + b).orElse("");

Returns

    [
    	[ [ -0.40214609858568506, 1.9817190574534846, 0.01642704113220017 ], [ -1.7210326592992418, 0.18826753802931967, -1.7472348787300778 ], [ -0.10575659820300842, -0.9790318884472085, -1.2712115133497832 ], [ -0.8721978887997783, -0.9348386721176809, -1.7329634390825406 ], [ 0.3270586748328156, 0.5138929818550394, -0.7409516566878553 ], [ 1.6140782734611454, 1.6137377208413024, -0.9958159943024476 ], [ -2.4382270640437245, -0.8864161629979674, -1.019356772958308 ], [ 0.7892320734659004, 1.3552385507026916, -1.2524706241685926 ], ... ],
    	[ [ 0.7184834337514551, -1.7146325088200656, -1.3598509249313897 ], [ 1.0749353193133064, -0.08901748770297574, 0.6380821683896694 ], [ -1.5111941722316566, -0.762356264015499, -1.4344495637528445 ], [ 0.05240307471658395, 0.7744305234822884, 1.253166401801128 ], [ 0.6466695042968016, -1.8949644780442307, 1.608294973747429 ], [ 0.35523935264912176, -1.5672584740264865, -0.691980878622635 ], [ 0.866779629782017, 0.3931777415422526, -1.5639573713242696 ], [ 0.007098704619321772, -0.8734761406126814, 2.1863774359933594 ], ... ],
    	[ [ -1.4646736436376098, -1.0031405523934676, 0.359814196031077 ], [ -1.8811203949387583, 0.42098766581765645, -1.5758672708788983 ], [ -1.176852331272135, -0.9177492628832298, 0.018601594155364776 ], [ 0.3219733708230487, 1.0081312842792587, -0.6701046551023074 ], [ -1.4378267630727357, 0.2527020957234596, 3.161124667349276 ], [ 0.13794730950212353, -0.41562839558773274, 0.9256810860856091 ], [ 1.0647588269572699, 0.06337465796054087, 0.571866515082189 ], [ -1.3182373544789954, 0.2011474235027665, -1.3229100690237714 ], ... ],
    	[ [ -0.06928016865235392, -2.056347434040566, -1.6063723973070805 ], [ 1.6092326682624085, -0.3788829998470342, -0.1663496684153743 ], [ -0.9041468383570015, -1.9681130530348887, 1.1922598913918903 ], [ -1.5693273884535384, 0.7222453467438932, -1.0369179582903547 ], [ 2.191619897169643, -1.0593645721430922, 1.0437446749734496 ], [ 1.9920718372865776, 0.9465672569482422, 1.6773609057651802 ], [ -0.6684055022801035, -1.6574429116475817, 0.0018484139276856437 ], [ 0.7916255271988573, 0.5679100758640474, -0.615535603062904 ], ... ],
    	[ [ -0.7946234452321691, -0.001021771287762152, -1.032354783480069 ], [ 0.38843054963162527, 0.23193000110760947, 1.8636394492607653 ], [ -1.325973172804255, -1.651921143915438, -0.33410568328030743 ], [ 2.1928020204828598, -0.4767330552657071, 0.35193103478284776 ], [ 1.5201869179040082, 0.9212898089805438, -0.3454767268845521 ], [ -0.8808889169225677, 0.31149591299141344, 0.6133930039311541 ], [ 1.4194537986009295, -0.3841791549023659, -0.7072746436985636 ], [ 1.3773503905630962, 0.2777860418787519, -0.587136432441848 ], ... ],
    	[ [ 0.7401327423551891, 1.1304956910734862, 0.24137156657132453 ], [ -1.9000940810514342, -1.7012984835012686, 0.9413925645527026 ], [ -0.9467815656221461, -1.1459839443086688, -1.419234490069185 ], [ 0.2675787163518155, 2.1769283953822036, -0.12050711173401904 ], [ -0.15247890890997606, -8.336410255264659E-4, 3.001312549935503 ], [ 0.9982778338507791, 1.0206824993112362, 0.2930396668379847 ], [ 0.6107393383074813, -1.1754069188883343, -0.8193324194191469 ], [ -1.0846954877892054, -1.119517334185864, -0.7357871780249307 ], ... ],
    	[ [ -0.36651649562959526, -1.018014922323771, -0.2634685820466337 ], [ -1.3597028262642978, 1.6749337247569007, -1.5152308984926026 ], [ -0.42761286940167836, -0.4128867609918834, 2.208499630393562 ], [ -1.001044930759145, -0.01724072531308174, 0.19828565607222592 ], [ 0.6171456028693071, 0.9225394338964158, 1.844314963234277 ], [ -0.8287840478761432, -1.1947603859509914, -1.7604555661728654 ], [ -0.70158581269001, 1.196298905908628, -0.3467130932186179 ], [ -0.8872863083699473, -1.6895230544764324, 0.48480936284638 ], ... ],
    	[ [ 0.1327372625619152, 0.5226384200643757, 0.30062431737371 ], [ -0.7576697815911988, 0.27878416478396967, -1.2291143831927707 ], [ -0.8835908846104792, 2.6160337713075, -0.11244288669702071 ], [ 0.15228829382694636, 0.3784708322994546, 2.5212408738735985 ], [ -0.8880319261423517, 0.9403696349491271, -0.8643377088067753 ], [ -1.7796392557755762, -1.6752573075486439, -0.26510343667577974 ], [ -0.9564607729075855, 0.851463981448662, -1.3030032085410768 ], [ 0.7300765964794477, -0.47239946223745455, -0.8976771342419929 ], ... ],
    	...
    ]

To produce the following output:

TrainingTester.java:633 executed in 0.08 seconds (0.000 gc):

    Result[] array = ConstantResult.batchResultArray(pop(RefUtil.addRef(data)));
    @Nullable
    Result eval = layer.eval(array);
    assert eval != null;
    TensorList tensorList = Result.getData(eval);
    String temp_18_0016 = tensorList.stream().limit(1).map(x -> {
      String temp_18_0017 = x.prettyPrint();
      x.freeRef();
      return temp_18_0017;
    }).reduce((a, b) -> a + "\n" + b).orElse("");
    tensorList.freeRef();
    return temp_18_0016;

Returns

    [
    	[ [ 0.07481368890357205, 0.8114851042861386, 0.11370120681028914 ], [ 0.11464599486453556, 0.7736729758136509, 0.11168102932181358 ], [ 0.5782477974822618, 0.2414656439565873, 0.18028655856115094 ], [ 0.4233486835975121, 0.3976432980127374, 0.1790080183897505 ], [ 0.39229236396193445, 0.47288001831925675, 0.13482761771880894 ], [ 0.48234570709851715, 0.4821814709714037, 0.035472821930079175 ], [ 0.10149750168883687, 0.4790692147094357, 0.4194332836017273 ], [ 0.34589783968574145, 0.6092020265994781, 0.04490013371478041 ], ... ],
    	[ [ 0.8244693151008148, 0.072357852519914, 0.10317283237927125 ], [ 0.5106428762671166, 0.1594479707135849, 0.3299091530192984 ], [ 0.23841837587423292, 0.5041455017956279, 0.2574361223301391 ], [ 0.15670898790725626, 0.32260154467984936, 0.5206894674128945 ], [ 0.27065922974200773, 0.021311068645339937, 0.7080297016126524 ], [ 0.6679343755197599, 0.09767953816935036, 0.23438608631088986 ], [ 0.5845467687255239, 0.36402949857687, 0.051423732697606035 ], [ 0.09751842389475034, 0.04042573058662374, 0.8620558455186258 ], ... ],
    	[ [ 0.11381583848156783, 0.18056932597775116, 0.705614835540681 ], [ 0.08095724326199419, 0.809186332569292, 0.10985642416871379 ], [ 0.17854512408034448, 0.2313529423919539, 0.5901019335277016 ], [ 0.29789606003249985, 0.5916424881349313, 0.11046145183256884 ], [ 0.009451581287584275, 0.051249759141165876, 0.9392986595712498 ], [ 0.2650200663627723, 0.1523575163302334, 0.5826224173069943 ], [ 0.5055029982265039, 0.18570693273700575, 0.3087900690364903 ], [ 0.15232869494005294, 0.6960527382232755, 0.15161856683667155 ], ... ],
    	[ [ 0.7395890170475927, 0.10139536541641021, 0.15901561753599716 ], [ 0.7654986514335699, 0.10483752652344053, 0.12966382204298973 ], [ 0.10546338594092312, 0.036393770697333336, 0.8581428433617435 ], [ 0.07940597176901984, 0.7853632203240444, 0.13523080790693584 ], [ 0.7374379353876638, 0.028565436127379096, 0.23399662848495725 ], [ 0.4804194782090637, 0.16887436816874582, 0.35070615362219043 ], [ 0.3006010696671827, 0.11180392041002223, 0.5875950099227951 ], [ 0.48914586077289024, 0.39109295857773557, 0.11976118064937424 ], ... ],
    	[ [ 0.2500149302869248, 0.5528696785941282, 0.1971153911189471 ], [ 0.1605889110197125, 0.13732456388070238, 0.702086525099585 ], [ 0.22634124234654285, 0.1633827832590034, 0.6102759743944538 ], [ 0.8143563941145618, 0.05642221844798936, 0.1292213874374488 ], [ 0.5867819052769803, 0.3223881004252302, 0.09082999429778953 ], [ 0.11427180267196455, 0.37651755557156025, 0.5092106417564752 ], [ 0.778860985013928, 0.1282779794122964, 0.09286103557377565 ], [ 0.6787742061480461, 0.22604275996513015, 0.09518303388682374 ], ... ],
    	[ [ 0.3241701802134201, 0.47896693901755416, 0.1968628807690257 ], [ 0.05164979218388151, 0.063009264400878, 0.8853409434152405 ], [ 0.40935706506839376, 0.33542065015187383, 0.25522228477973236 ], [ 0.11866545032670146, 0.8008373526695027, 0.08049719700379562 ], [ 0.039080120255208524, 0.04547938617544219, 0.9154404935693493 ], [ 0.3973538707188332, 0.40635692992492906, 0.1962891993562377 ], [ 0.7107842978235067, 0.11913088325441425, 0.170084818922079 ], [ 0.2955697054445994, 0.28545455868746894, 0.4189757358679317 ], ... ],
    	[ [ 0.3802558048504787, 0.19821370566925275, 0.4215304894802685 ], [ 0.04415131459158348, 0.9180567916061594, 0.037791893802257136 ], [ 0.06260302854730443, 0.06353174896794066, 0.873865222484755 ], [ 0.14301022873258817, 0.3824968288891605, 0.4744929423782514 ], [ 0.1733487300212237, 0.23526185656227652, 0.5913894134164999 ], [ 0.47906165958489133, 0.3322384641829814, 0.18869987623212728 ], [ 0.10991709414883338, 0.7333412240350684, 0.15674168181609824 ], [ 0.18546229343488366, 0.08314739243586125, 0.731390314129255 ], ... ],
    	[ [ 0.2732510833423952, 0.4035467124680506, 0.3232022041895542 ], [ 0.22505783760054263, 0.6344835567612681, 0.14045860563818927 ], [ 0.027574590055053402, 0.9128023329877211, 0.05962307695722545 ], [ 0.07727976464982819, 0.09689371760049119, 0.8258265177496806 ], [ 0.12124287594957492, 0.754607188804839, 0.12414993524558615 ], [ 0.15021027467057987, 0.16673706187199208, 0.683052663457428 ], [ 0.12812449662305192, 0.7812749803953791, 0.09060052298156891 ], [ 0.6680832082715928, 0.20072517227681388, 0.1311916194515931 ], ... ],
    	...
    ]

TrainingTester.java:432 executed in 0.14 seconds (0.000 gc):

    return TestUtil.compare(title + " vs Iteration", runs);
Logging
Plotting range=[1.0, -2.2800988554779686], [4.0, -1.3236751371950324]; valueStats=DoubleSummaryStatistics{count=7, sum=0.197131, min=0.005247, average=0.028162, max=0.047460}
Plotting 4 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

TrainingTester.java:435 executed in 0.02 seconds (0.000 gc):

    return TestUtil.compareTime(title + " vs Time", runs);
Logging
Plotting range=[0.0, -2.2800988554779686], [388.609, -1.3236751371950324]; valueStats=DoubleSummaryStatistics{count=7, sum=0.197131, min=0.005247, average=0.028162, max=0.047460}
Plotting 4 points for GD
Only 1 points for CjGD
Plotting 2 points for LBFGS

Returns

Result

Results

TrainingTester.java:255 executed in 0.00 seconds (0.000 gc):

    return grid(inputLearning, modelLearning, completeLearning);

Returns

Result

TrainingTester.java:258 executed in 0.00 seconds (0.000 gc):

    return new ComponentResult(null == inputLearning ? null : inputLearning.value,
        null == modelLearning ? null : modelLearning.value, null == completeLearning ? null : completeLearning.value);

Returns

    {"input":{ "LBFGS": { "type": "NonConverged", "value": 0.020050210125649056 }, "CjGD": { "type": "NonConverged", "value": 0.04718179609515965 }, "GD": { "type": "NonConverged", "value": 0.005246880155172717 } }, "model":null, "complete":null}

LayerTests.java:425 executed in 0.00 seconds (0.000 gc):

    throwException(exceptions.addRef());

Results

detailsresult
{"input":{ "LBFGS": { "type": "NonConverged", "value": 0.020050210125649056 }, "CjGD": { "type": "NonConverged", "value": 0.04718179609515965 }, "GD": { "type": "NonConverged", "value": 0.005246880155172717 } }, "model":null, "complete":null}OK
  {
    "result": "OK",
    "performance": {
      "execution_time": "503.749",
      "gc_time": "11.964"
    },
    "created_on": 1586742608785,
    "file_name": "trainingTest",
    "report": {
      "simpleName": "Pixel",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SoftmaxLayerTest.Pixel",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/test/java/com/simiacryptus/mindseye/layers/cudnn/SoftmaxLayerTest.java",
      "javaDoc": ""
    },
    "training_analysis": {
      "input": {
        "LBFGS": {
          "type": "NonConverged",
          "value": 0.020050210125649056
        },
        "CjGD": {
          "type": "NonConverged",
          "value": 0.04718179609515965
        },
        "GD": {
          "type": "NonConverged",
          "value": 0.005246880155172717
        }
      }
    },
    "archive": "s3://code.simiacrypt.us/tests/com/simiacryptus/mindseye/layers/cudnn/SoftmaxActivationLayer/Pixel/trainingTest/202004135008",
    "id": "acf66b45-dfef-4f07-b189-80c132d8cdb6",
    "report_type": "Components",
    "display_name": "Comparative Training",
    "target": {
      "simpleName": "SoftmaxActivationLayer",
      "canonicalName": "com.simiacryptus.mindseye.layers.cudnn.SoftmaxActivationLayer",
      "link": "https://github.com/SimiaCryptus/mindseye-cudnn/tree/59d5b3318556370acb2d83ee6ec123ce0fc6974f/src/main/java/com/simiacryptus/mindseye/layers/cudnn/SoftmaxActivationLayer.java",
      "javaDoc": ""
    }
  }